From b35276f351fb68a2251ae117d3e9bdaecacd8782 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Tue, 14 Oct 2014 10:46:58 -0600 Subject: [PATCH 01/71] changed versions --- setup.py | 2 +- src/ocgis/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 008d6cb51..3f79be007 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ import tempfile -VERSION = '1.0.0' +VERSION = '1.0.0-next' class UninstallCommand(Command): diff --git a/src/ocgis/__init__.py b/src/ocgis/__init__.py index 9416ced2e..564d9a2e3 100644 --- a/src/ocgis/__init__.py +++ b/src/ocgis/__init__.py @@ -1,5 +1,5 @@ __VER__ = '1.0.0' -__RELEASE__ = '1.0.0' +__RELEASE__ = '1.0.0-next' from util.environment import env from api.operations import OcgOperations From 396668f399d157de9f2545371520b70d16ef9596 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Tue, 14 Oct 2014 11:09:06 -0600 Subject: [PATCH 02/71] added and reorganized imports --- .../v0.06b/fragments/test_process_manager.py | 57 ------------------- src/ocgis/__init__.py | 28 +++++---- 2 files changed, 16 insertions(+), 69 deletions(-) delete mode 100644 examples/filed/v0.06b/test_scripts/v0.06b/fragments/test_process_manager.py diff --git a/examples/filed/v0.06b/test_scripts/v0.06b/fragments/test_process_manager.py b/examples/filed/v0.06b/test_scripts/v0.06b/fragments/test_process_manager.py deleted file mode 100644 index 923e0db33..000000000 --- a/examples/filed/v0.06b/test_scripts/v0.06b/fragments/test_process_manager.py +++ /dev/null @@ -1,57 +0,0 @@ -import unittest -import time -from multiprocessing import Pool -import numpy as np -import itertools - - -def iter_proc_args(): - for ii in range(0,10): - yield(ii) - -def operation(ii): - time.sleep(np.random.randint(0,3)) - return(ii*2) - - -class SubsetOperation(object): - - def __init__(self,it_procs,serial=True,nprocs=1): - self.it_procs = it_procs - self.serial = serial - self.nprocs = nprocs - - def __iter__(self): - if self.serial: - it = itertools.imap(operation,self.it_procs()) - else: - pool = Pool(processes=self.nprocs) - it = pool.imap_unordered(operation,self.it_procs()) - while True: - try: - yield(it.next()) - except StopIteration: - break - - def run(self): - path = '/tmp/foo.txt' - with open(path,'w') as f: - for value in self: - f.write(str(value)) - return(path) - - -class TestProcessManager(unittest.TestCase): - - def test(self): - serial = False - conv = SubsetOperation(iter_proc_args,serial=serial,nprocs=4) - ret = conv.run() - print ret - - - - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() \ No newline at end of file diff --git a/src/ocgis/__init__.py b/src/ocgis/__init__.py index 564d9a2e3..e8d5ffaf3 100644 --- a/src/ocgis/__init__.py +++ b/src/ocgis/__init__.py @@ -1,18 +1,22 @@ +from osgeo import ogr, osr + +from ocgis.api.collection import SpatialCollection +from ocgis.api.operations import OcgOperations +from ocgis.api.request.base import RequestDataset, RequestDatasetCollection +from ocgis.calc.library.register import FunctionRegistry +from ocgis.interface.base import crs +from ocgis.interface.base.crs import CoordinateReferenceSystem +from ocgis.interface.base.dimension.spatial import SpatialDimension +from ocgis.interface.base.field import Field +from ocgis.util.environment import env +from ocgis.util.inspect import Inspect +from ocgis.util.shp_cabinet import ShpCabinet, ShpCabinetIterator +from ocgis.util.zipper import format_return + + __VER__ = '1.0.0' __RELEASE__ = '1.0.0-next' -from util.environment import env -from api.operations import OcgOperations -from util.shp_cabinet import ShpCabinet, ShpCabinetIterator -from util.inspect import Inspect -from api.request.base import RequestDataset, RequestDatasetCollection -from util.zipper import format_return -from interface.base import crs -from calc.library.register import FunctionRegistry -from api.collection import SpatialCollection -from interface.base import crs -from ocgis.interface.base.crs import CoordinateReferenceSystem -from osgeo import ogr, osr # tell ogr/osr to raise exceptions ogr.UseExceptions() From 01888995fd74911b5b9f477b8f54b22a782812dc Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Wed, 15 Oct 2014 10:27:39 -0600 Subject: [PATCH 03/71] Added documentation to TestBase and renamed some attributes. Also, some attributes were made public. Moved cfunits specific tests to own module in test simple package. --- .../v0.06b/combinations/parameters.py | 2 +- src/ocgis/conv/fiona_.py | 6 +- src/ocgis/test/base.py | 361 ++++++++++++------ src/ocgis/test/test_base.py | 18 +- src/ocgis/test/test_misc/test_conversion.py | 18 +- .../test_ocgis/test_api/test_collection.py | 2 +- .../test_ocgis/test_api/test_interpreter.py | 4 +- .../test_ocgis/test_api/test_operations.py | 78 ++-- .../test_api/test_parms/test_definition.py | 16 +- .../test_api/test_request/test_base.py | 68 ++-- .../test_request/test_driver/test_base.py | 8 +- .../test_request/test_driver/test_nc.py | 66 ++-- .../test/test_ocgis/test_api/test_subset.py | 52 +-- .../test_ocgis/test_calc/test_calc_general.py | 16 +- .../test/test_ocgis/test_calc/test_engine.py | 12 +- .../test_calc/test_eval_function.py | 14 +- .../test_dynamic_kernel_percentile.py | 8 +- .../test_index/test_heat_index.py | 4 +- .../test_calc/test_library/test_math.py | 2 +- .../test_calc/test_library/test_statistics.py | 8 +- .../test_contrib/test_library_icclim.py | 36 +- .../test/test_ocgis/test_conv/test_base.py | 18 +- .../test/test_ocgis/test_conv/test_csv_shp.py | 16 +- .../test/test_ocgis/test_conv/test_fiona_.py | 12 +- .../test/test_ocgis/test_conv/test_meta.py | 2 +- .../test/test_ocgis/test_conv/test_nc.py | 4 +- .../test_interface/test_base/test_crs.py | 14 +- .../test_base/test_dimension/test_spatial.py | 2 +- .../test_base/test_dimension/test_temporal.py | 2 +- .../test_interface/test_base/test_field.py | 2 +- .../test_interface/test_base/test_variable.py | 2 +- .../test_interface/test_metadata.py | 2 +- .../test/test_ocgis/test_regrid/test_base.py | 18 +- .../test_ocgis/test_util/test_environment.py | 6 +- .../test/test_ocgis/test_util/test_helpers.py | 4 +- .../test/test_ocgis/test_util/test_inspect.py | 4 +- .../test_ocgis/test_util/test_large_array.py | 16 +- .../test_util/test_logging_ocgis.py | 4 +- .../test_ocgis/test_util/test_shp_cabinet.py | 12 +- .../test_ocgis/test_util/test_shp_process.py | 6 +- .../test_shp_scanner/test_shp_scanner.py | 6 +- .../test_spatial/test_spatial_subset.py | 18 +- .../test/test_ocgis/test_util/test_units.py | 6 +- .../test/test_ocgis/test_util/test_zipper.py | 2 +- src/ocgis/test/test_real_data/test_cf.py | 4 +- .../test/test_real_data/test_cf_exceptions.py | 10 +- .../test_real_data/test_multiple_datasets.py | 14 +- src/ocgis/test/test_real_data/test_narccap.py | 20 +- src/ocgis/test/test_real_data/test_package.py | 4 +- .../test_real_data/test_random_datasets.py | 52 +-- .../test/test_simple/make_test_data_subset.py | 93 ----- src/ocgis/test/test_simple/test_cfunits.py | 22 ++ src/ocgis/test/test_simple/test_simple.py | 116 +++--- 53 files changed, 669 insertions(+), 643 deletions(-) delete mode 100644 src/ocgis/test/test_simple/make_test_data_subset.py create mode 100644 src/ocgis/test/test_simple/test_cfunits.py diff --git a/examples/filed/v0.06b/test_scripts/v0.06b/combinations/parameters.py b/examples/filed/v0.06b/test_scripts/v0.06b/combinations/parameters.py index 39ee50ef0..f505248e9 100644 --- a/examples/filed/v0.06b/test_scripts/v0.06b/combinations/parameters.py +++ b/examples/filed/v0.06b/test_scripts/v0.06b/combinations/parameters.py @@ -93,7 +93,7 @@ def values(self): # values = [] # -# tdata = TestBase.get_tdata() +# tdata = TestBase.get_tst_data_nc() # # rd = tdata.get_rd('cancm4_tasmax_2001') # diff --git a/src/ocgis/conv/fiona_.py b/src/ocgis/conv/fiona_.py index d482028d7..70a3ff871 100644 --- a/src/ocgis/conv/fiona_.py +++ b/src/ocgis/conv/fiona_.py @@ -119,10 +119,10 @@ def _build_(self,coll): return(ret) - def _write_coll_(self,f,coll): + def _write_coll_(self, f, coll): fiona_object = f['fiona_object'] - for geom,properties in coll.get_iter_dict(use_upper_keys=True,conversion_map=f['fiona_conversion']): - to_write = {'geometry':mapping(geom),'properties':properties} + for geom, properties in coll.get_iter_dict(use_upper_keys=True, conversion_map=f['fiona_conversion']): + to_write = {'geometry': mapping(geom), 'properties': properties} fiona_object.write(to_write) diff --git a/src/ocgis/test/base.py b/src/ocgis/test/base.py index 72c285953..3e583fd7d 100644 --- a/src/ocgis/test/base.py +++ b/src/ocgis/test/base.py @@ -15,96 +15,68 @@ import netCDF4 as nc +class ToTest(Exception): + """ + Useful when wanting to flag things as not tested. + """ + class TestBase(unittest.TestCase): + """ + All tests should inherit from this. It allows test data to be written to a temporary folder and removed easily. + Also simplifies access to static files. + """ - '''All tests should inherit from this. It allows test data to be written to - a temporary folder and removed easily.''' __metaclass__ = abc.ABCMeta - _reset_env = True - _create_dir = True + # set to false to not resent the environment before each test + reset_env = True + # set to false to not create and destroy a temporary directory before each test + create_dir = True + # prefix for the temporary test directories + _prefix_path_test = 'ocgis_test_' - def __init__(self, *args, **kwds): - self.test_data = self.get_tdata() - super(TestBase, self).__init__(*args, **kwds) + def __init__(self, *args, **kwargs): + self.test_data_nc = self.get_tst_data_nc() + self.current_dir_output = None + self.ToTest = ToTest + super(TestBase, self).__init__(*args, **kwargs) @property - def _test_bin_dir(self): + def path_bin(self): + """Path to binary test file directory.""" + base_dir = os.path.split(__file__)[0] ret = os.path.join(base_dir, 'bin') - return (ret) - - @staticmethod - def get_tdata(): - test_data = TestData() - test_data.update(['daymet'], 'tmax', 'tmax.nc', key='daymet_tmax') - test_data.update(['CanCM4'], 'tas', 'tas_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', key='cancm4_tas') - test_data.update(['CanCM4'], 'tasmax', 'tasmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', - key='cancm4_tasmax_2011') - test_data.update(['CanCM4'], 'tasmax', 'tasmax_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', - key='cancm4_tasmax_2001') - test_data.update(['CanCM4'], 'tasmin', 'tasmin_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', - key='cancm4_tasmin_2001') - test_data.update(['CanCM4'], 'rhs', 'rhs_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', key='cancm4_rhs') - test_data.update(['CanCM4'], 'rhsmax', 'rhsmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', - key='cancm4_rhsmax') - test_data.update(['maurer', 'bccr'], 'Prcp', 'bccr_bcm2_0.1.sresa1b.monthly.Prcp.1950.nc', - key='maurer_bccr_1950') - test_data.update(['narccap'], 'pr', 'pr_CRCM_ccsm_1981010103.nc', key='narccap_crcm') - test_data.update(['narccap'], 'pr', 'pr_RCM3_gfdl_1981010103.nc', key='narccap_rcm3') - test_data.update(['narccap'], 'pr', 'pr_HRM3_gfdl_1981010103.nc', key='narccap_hrm3') - test_data.update(['narccap'], 'pr', 'pr_WRFG_ccsm_1986010103.nc', key='narccap_wrfg') - # test_data.update(['CCSM4'],'albisccp','albisccp_cfDay_CCSM4_1pctCO2_r2i1p1_00200101-00391231.nc',key='ccsm4') - # test_data.update(['hostetler'],'TG','RegCM3_Daily_srm_GFDL.ncml.nc',key='hostetler') - test_data.update(['maurer', '2010'], 'pr', - ['nldas_met_update.obs.daily.pr.1990.nc', 'nldas_met_update.obs.daily.pr.1991.nc'], - key='maurer_2010_pr') - test_data.update(['maurer', '2010'], 'tas', - ['nldas_met_update.obs.daily.tas.1990.nc', 'nldas_met_update.obs.daily.tas.1991.nc'], - key='maurer_2010_tas') - test_data.update(['maurer', '2010'], 'tasmin', - ['nldas_met_update.obs.daily.tasmin.1990.nc', 'nldas_met_update.obs.daily.tasmin.1991.nc'], - key='maurer_2010_tasmin') - test_data.update(['maurer', '2010'], 'tasmax', - ['nldas_met_update.obs.daily.tasmax.1990.nc', 'nldas_met_update.obs.daily.tasmax.1991.nc'], - key='maurer_2010_tasmax') - test_data.update(['narccap'], 'pr', ['pr_WRFG_ncep_1981010103.nc', 'pr_WRFG_ncep_1986010103.nc'], - key='narccap_pr_wrfg_ncep') - test_data.update(['narccap'], 'tas', 'tas_HRM3_gfdl_1981010103.nc', key='narccap_rotated_pole') - test_data.update(['narccap'], 'pr', 'pr_WRFG_ccsm_1986010103.nc', key='narccap_lambert_conformal') - test_data.update(['narccap'], 'pr', 'pr_CRCM_ccsm_1981010103.nc', key='narccap_polar_stereographic') - test_data.update(['narccap'], 'tas', 'tas_RCM3_gfdl_1981010103.nc', key='narccap_tas_rcm3_gfdl') - test_data.update(['snippets'], 'dtr', 'snippet_Maurer02new_OBS_dtr_daily.1971-2000.nc', - key='snippet_maurer_dtr') - test_data.update(['CMIP3'], 'Tavg', 'Extraction_Tavg.nc', key='cmip3_extraction') + return ret - test_data.update(['misc', 'subset_test'], 'Tavg', 'Tavg_bccr_bcm2_0.1.sresa2.nc', key='subset_test_Tavg') - test_data.update(['misc', 'subset_test'], 'Tavg', 'sresa2.bccr_bcm2_0.1.monthly.Tavg.RAW.1950-2099.nc', - key='subset_test_Tavg_sresa2') - test_data.update(['misc', 'subset_test'], 'Prcp', 'sresa2.ncar_pcm1.3.monthly.Prcp.RAW.1950-2099.nc', - key='subset_test_Prcp') + def assertDictEqual(self, d1, d2, msg=None): + """ + Asserts two dictionaries are equal. If they are not, identify the first key/value which are not equal. - test_data.update(['misc', 'month_in_time_units'], 'clt', 'clt.nc', key='clt_month_units') - test_data.update(['misc', 'rotated_pole'], 'tas', - 'tas_EUR-44_ICHEC-EC-EARTH_historical_r12i1p1_SMHI-RCA4_v1_day_19710101-19751231.nc', - key='rotated_pole_ichec') - test_data.update(['misc', 'rotated_pole'], 'tas', - 'tas_EUR-44_CCCma-CanESM2_rcp85_r1i1p1_SMHI-RCA4_v1_sem_209012-210011.nc', - key='rotated_pole_cccma') - test_data.update(['misc', 'rotated_pole'], 'pr', - 'pr_EUR-11_CNRM-CERFACS-CNRM-CM5_historical_r1i1p1_CLMcom-CCLM4-8-17_v1_mon_198101-199012.nc', - key='rotated_pole_cnrm_cerfacs') - return test_data + :param dict d1: A dictionary to test. + :param dict d2: A dictionary to test. + :param str msg: A message to attach to an assertion error. + :raises: AssertionError + """ - def shortDescription(self): - return None + try: + unittest.TestCase.assertDictEqual(self, d1, d2, msg=msg) + except AssertionError: + for k, v in d1.iteritems(): + self.assertEqual(v, d2[k]) + self.assertEqual(set(d1.keys()), set(d2.keys())) def assertNumpyAll(self, arr1, arr2, check_fill_value_dtype=True, check_arr_dtype=True): """ + Asserts arrays are equal according to the test criteria. + + :param arr1: An array to compare. :type arr1: :class:`numpy.ndarray` + :param arr2: An array to compare. :type arr2: :class:`numpy.ndarray` - :param check_fill_value_dtype: If ``True``, check that the data type for masked array fill values are equal. - :type check_fill_value_dtype: bool + :param bool check_fill_value_dtype: If ``True``, check that the data type for masked array fill values are equal. + :param bool check_arr_dtype: If ``True``, check the data types of the arrays are equal. + :raises: AssertionError """ self.assertEqual(type(arr1), type(arr2)) @@ -118,54 +90,28 @@ def assertNumpyAll(self, arr1, arr2, check_fill_value_dtype=True, check_arr_dtyp self.assertEqual(arr1.fill_value, arr2.fill_value) else: self.assertTrue(np.equal(arr1.fill_value, arr2.fill_value.astype(arr1.fill_value.dtype))) - return True - else: - return self.assertTrue(np.all(arr1 == arr2)) - - def assertNumpyAllClose(self, arr1, arr2): - self.assertEqual(type(arr1), type(arr2)) - self.assertEqual(arr1.shape, arr2.shape) - if isinstance(arr1, np.ma.MaskedArray) or isinstance(arr2, np.ma.MaskedArray): - self.assertTrue(np.allclose(arr1.data, arr2.data)) - self.assertTrue(np.all(arr1.mask == arr2.mask)) - self.assertEqual(arr1.fill_value, arr2.fill_value) - return True else: - return self.assertTrue(np.allclose(arr1, arr2)) - - def assertNumpyNotAll(self, arr1, arr2): - try: - self.assertNumpyAll(arr1, arr2) - except AssertionError: - ret = True - else: - raise AssertionError('Arrays are equivalent.') - return ret - - def assertNumpyNotAllClose(self, arr1, arr2): - try: - self.assertNumpyAllClose(arr1, arr2) - except AssertionError: - ret = True - else: - raise AssertionError('Arrays are equivalent within precision.') - return ret - - def assertDictEqual(self, d1, d2, msg=None): - try: - unittest.TestCase.assertDictEqual(self, d1, d2, msg=msg) - except AssertionError: - for k, v in d1.iteritems(): - self.assertEqual(v, d2[k]) - self.assertEqual(set(d1.keys()), set(d2.keys())) + self.assertTrue(np.all(arr1 == arr2)) def assertNcEqual(self, uri_src, uri_dest, check_types=True, close=False, metadata_only=False, ignore_attributes=None): """ - :param dict ignore_attributes: + Assert two netCDF files are equal according to the test criteria. + + :param str uri_src: A URI to a source file. + :param str uri_dest: A URI to a destination file. + :param bool check_types: If ``True``, check data types of variable arrays. + :param bool close: If ``False``, use exact value comparisons without a tolerance. + :param bool metadata_only: If ``False``, check array values associated with variables. If ``True``, only check + metadata values and not value arrays. + :param dict ignore_attributes: Select which attributes to ignore when testing. Keys are associated with variable + names. The exception is for dataset-level attributes which are selected with the key `'global'`. >>> ignore_attributes = {'global': ['history']} + + :raises: AssertionError """ + src = nc.Dataset(uri_src) dest = nc.Dataset(uri_dest) @@ -215,23 +161,147 @@ def assertNcEqual(self, uri_src, uri_dest, check_types=True, close=False, metada src.close() dest.close() - def setUp(self): - if self._reset_env: env.reset() - if self._create_dir: - self._test_dir = tempfile.mkdtemp(prefix='ocgis_test_') - env.DIR_OUTPUT = self._test_dir + def assertNumpyAllClose(self, arr1, arr2): + """ + Asserts arrays are close according to the test criteria. + + :param arr1: An array to compare. + :type arr1: :class:`numpy.ndarray` + :param arr2: An array to compare. + :type arr2: :class:`numpy.ndarray` + :raises: AssertionError + """ + + self.assertEqual(type(arr1), type(arr2)) + self.assertEqual(arr1.shape, arr2.shape) + if isinstance(arr1, np.ma.MaskedArray) or isinstance(arr2, np.ma.MaskedArray): + self.assertTrue(np.allclose(arr1.data, arr2.data)) + self.assertTrue(np.all(arr1.mask == arr2.mask)) + self.assertEqual(arr1.fill_value, arr2.fill_value) else: - self._create_dir = None + self.assertTrue(np.allclose(arr1, arr2)) + + def assertNumpyNotAll(self, arr1, arr2): + """ + Asserts arrays are not equal according to the test criteria. + + :param arr1: An array to compare. + :type arr1: :class:`numpy.ndarray` + :param arr2: An array to compare. + :type arr2: :class:`numpy.ndarray` + :raises: AssertionError + """ + + try: + self.assertNumpyAll(arr1, arr2) + except AssertionError: + pass + else: + raise AssertionError('Arrays are equivalent.') + + def assertNumpyNotAllClose(self, arr1, arr2): + """ + Asserts arrays are not close according to the test criteria. + + :param arr1: An array to compare. + :type arr1: :class:`numpy.ndarray` + :param arr2: An array to compare. + :type arr2: :class:`numpy.ndarray` + :raises: AssertionError + """ + + try: + self.assertNumpyAllClose(arr1, arr2) + except AssertionError: + pass + else: + raise AssertionError('Arrays are equivalent within precision.') + + def get_temporary_output_directory(self): + """ + :returns: A path to a temporary directory with an appropriate prefix. + :rtype: str + """ + + return tempfile.mkdtemp(prefix=self._prefix_path_test) + + @staticmethod + def get_tst_data_nc(): + """ + :returns: A dictionary-like object with special access methods for test files. + :rtype: :class:`ocgis.test.base.TestData` + """ + + test_data = TestData() + + test_data.update(['CMIP3'], 'Tavg', 'Extraction_Tavg.nc', key='cmip3_extraction') + test_data.update(['CanCM4'], 'rhs', 'rhs_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', key='cancm4_rhs') + test_data.update(['CanCM4'], 'rhsmax', 'rhsmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', key='cancm4_rhsmax') + test_data.update(['CanCM4'], 'tas', 'tas_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', key='cancm4_tas') + test_data.update(['CanCM4'], 'tasmax', 'tasmax_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', key='cancm4_tasmax_2001') + test_data.update(['CanCM4'], 'tasmax', 'tasmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', key='cancm4_tasmax_2011') + test_data.update(['CanCM4'], 'tasmin', 'tasmin_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', key='cancm4_tasmin_2001') + test_data.update(['daymet'], 'tmax', 'tmax.nc', key='daymet_tmax') + test_data.update(['maurer', '2010'], 'pr', ['nldas_met_update.obs.daily.pr.1990.nc', 'nldas_met_update.obs.daily.pr.1991.nc'], key='maurer_2010_pr') + test_data.update(['maurer', '2010'], 'tas', ['nldas_met_update.obs.daily.tas.1990.nc', 'nldas_met_update.obs.daily.tas.1991.nc'], key='maurer_2010_tas') + test_data.update(['maurer', '2010'], 'tasmax', ['nldas_met_update.obs.daily.tasmax.1990.nc', 'nldas_met_update.obs.daily.tasmax.1991.nc'], key='maurer_2010_tasmax') + test_data.update(['maurer', '2010'], 'tasmin', ['nldas_met_update.obs.daily.tasmin.1990.nc', 'nldas_met_update.obs.daily.tasmin.1991.nc'], key='maurer_2010_tasmin') + test_data.update(['maurer', 'bccr'], 'Prcp', 'bccr_bcm2_0.1.sresa1b.monthly.Prcp.1950.nc', key='maurer_bccr_1950') + test_data.update(['misc', 'month_in_time_units'], 'clt', 'clt.nc', key='clt_month_units') + test_data.update(['misc', 'rotated_pole'], 'pr', 'pr_EUR-11_CNRM-CERFACS-CNRM-CM5_historical_r1i1p1_CLMcom-CCLM4-8-17_v1_mon_198101-199012.nc', key='rotated_pole_cnrm_cerfacs') + test_data.update(['misc', 'rotated_pole'], 'tas', 'tas_EUR-44_CCCma-CanESM2_rcp85_r1i1p1_SMHI-RCA4_v1_sem_209012-210011.nc', key='rotated_pole_cccma') + test_data.update(['misc', 'rotated_pole'], 'tas', 'tas_EUR-44_ICHEC-EC-EARTH_historical_r12i1p1_SMHI-RCA4_v1_day_19710101-19751231.nc', key='rotated_pole_ichec') + test_data.update(['misc', 'subset_test'], 'Prcp', 'sresa2.ncar_pcm1.3.monthly.Prcp.RAW.1950-2099.nc', key='subset_test_Prcp') + test_data.update(['misc', 'subset_test'], 'Tavg', 'Tavg_bccr_bcm2_0.1.sresa2.nc', key='subset_test_Tavg') + test_data.update(['misc', 'subset_test'], 'Tavg', 'sresa2.bccr_bcm2_0.1.monthly.Tavg.RAW.1950-2099.nc', key='subset_test_Tavg_sresa2') + test_data.update(['narccap'], 'pr', 'pr_CRCM_ccsm_1981010103.nc', key='narccap_crcm') + test_data.update(['narccap'], 'pr', 'pr_CRCM_ccsm_1981010103.nc', key='narccap_polar_stereographic') + test_data.update(['narccap'], 'pr', 'pr_HRM3_gfdl_1981010103.nc', key='narccap_hrm3') + test_data.update(['narccap'], 'pr', 'pr_RCM3_gfdl_1981010103.nc', key='narccap_rcm3') + test_data.update(['narccap'], 'pr', 'pr_WRFG_ccsm_1986010103.nc', key='narccap_lambert_conformal') + test_data.update(['narccap'], 'pr', 'pr_WRFG_ccsm_1986010103.nc', key='narccap_wrfg') + test_data.update(['narccap'], 'pr', ['pr_WRFG_ncep_1981010103.nc', 'pr_WRFG_ncep_1986010103.nc'], key='narccap_pr_wrfg_ncep') + test_data.update(['narccap'], 'tas', 'tas_HRM3_gfdl_1981010103.nc', key='narccap_rotated_pole') + test_data.update(['narccap'], 'tas', 'tas_RCM3_gfdl_1981010103.nc', key='narccap_tas_rcm3_gfdl') + test_data.update(['snippets'], 'dtr', 'snippet_Maurer02new_OBS_dtr_daily.1971-2000.nc', key='snippet_maurer_dtr') + + return test_data + + def setUp(self): + self.current_dir_output = None + if self.reset_env: + env.reset() + if self.create_dir: + self.current_dir_output = self.get_temporary_output_directory() + env.DIR_OUTPUT = self.current_dir_output + + def shortDescription(self): + """ + Overloaded method so ``nose`` will not print the docstring associated with a test. + """ + + return None def tearDown(self): try: - if self._create_dir: shutil.rmtree(self._test_dir) + if self.create_dir: + shutil.rmtree(self.current_dir_output) finally: - if self._reset_env: env.reset() + if self.reset_env: + env.reset() class TestData(OrderedDict): - def copy_files(self, dest): + + def copy_files(self, dest, verbose=False): + """ + Copy test files from their source to the base directory ``dest``. The folder hierarchy will be recreated under + ``dest``. + + :param str dest: The base directory. The directory must exist. + :raises: IOError + """ + if not os.path.exists(dest): raise (IOError('Copy destination does not exist: {0}'.format(dest))) for k, v in self.iteritems(): @@ -245,28 +315,48 @@ def copy_files(self, dest): dst = os.path.join(dest_dir, os.path.split(to_copy_uri)[1]) if not os.path.exists(dest_dir): os.makedirs(dest_dir) - print('copying: {0}...'.format(dst)) + if verbose: + print 'copying: {0}...'.format(dst) shutil.copy2(to_copy_uri, dst) - print('copy completed.') + if verbose: + print 'copy completed' def copy_file(self, key, dest): + """ + Copy a single files with unique test key identifier ``key`` to the full path ``dest``. + + :param str key: The unique identifier key to a test dataset. + :param str dest: The full path for the test files to be copied to. + """ + src = self.get_uri(key) dest = os.path.join(dest, self[key]['filename']) shutil.copy2(src, dest) - return (dest) + return dest def get_rd(self, key, kwds=None): """ + :param str key: The unique identifier to the test dataset. + :param dict kwds: Any keyword arguments to pass to :class:`ocgis.RequestDataset` + :returns: A request dataset object to use for testing! :rtype: :class:`ocgis.RequestDataset` """ + ref = self[key] if kwds is None: kwds = {} kwds.update({'uri': self.get_uri(key), 'variable': ref['variable']}) rd = RequestDataset(**kwds) - return (rd) + return rd def get_uri(self, key): + """ + :param str key: The unique identifier to the test dataset. + :returns: The full URI to a dataset. + :rtype: str + :raises: OSError, ValueError + """ + ref = self[key] coll = deepcopy(ref['collection']) if env.DIR_TEST_DATA is None: @@ -324,8 +414,25 @@ def get_uri(self, key): raise (ValueError( '"wget" was unable to fetch the test data URL ({0}) to the destination location: {1}. The command list was: {2}'.format( wget_url, wget_dest, cmd))) - return (uri) + return uri def update(self, collection, variable, filename, key=None): + """ + Update storage with a new test dataset. + + :param sequence collection: A sequence of strings that when appended to the base directory will yield the full + path to the directory containing the test dataset. + + >>> collection = ['climate_data'] + >>> collection = ['cmip', 'test_data'] + + :param str variable: The variable name to extract from the dataset. + :param str filename: The filename of the dataset. + + >>> filename = 'test_data.nc' + + :param str key: If provided, use for the unique key identifier. Otherwise, ``filename`` is used. + """ + OrderedDict.update(self, {key or filename: {'collection': collection, 'filename': filename, 'variable': variable}}) diff --git a/src/ocgis/test/test_base.py b/src/ocgis/test/test_base.py index 7132a74ab..d517fa98f 100644 --- a/src/ocgis/test/test_base.py +++ b/src/ocgis/test/test_base.py @@ -44,31 +44,31 @@ def test_assertNumpyAll_type_differs(self): @dev def test_data_download(self): - ocgis.env.DIR_TEST_DATA = self._test_dir - rd1 = self.test_data.get_rd('cancm4_tas') + ocgis.env.DIR_TEST_DATA = self.current_dir_output + rd1 = self.test_data_nc.get_rd('cancm4_tas') ocgis.env.reset() - rd2 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tas') self.assertEqual(rd1,rd2) @dev def test_multifile_data_download(self): - ocgis.env.DIR_TEST_DATA = self._test_dir + ocgis.env.DIR_TEST_DATA = self.current_dir_output ocgis.env.DEBUG = True constants.test_data_download_url_prefix = 'https://dl.dropboxusercontent.com/u/867854/test_data_download/' - rd = self.test_data.get_rd('narccap_pr_wrfg_ncep') + rd = self.test_data_nc.get_rd('narccap_pr_wrfg_ncep') @dev def test_entirely_bad_location(self): - ocgis.env.DIR_TEST_DATA = self._test_dir + ocgis.env.DIR_TEST_DATA = self.current_dir_output with self.assertRaises(ValueError): - self.test_data.get_rd('cancm4_tasmax_2011') + self.test_data_nc.get_rd('cancm4_tasmax_2011') @dev def test_copy_files(self): - self.test_data.copy_files('/home/local/WX/ben.koziol/htmp/transfer') + self.test_data_nc.copy_files('/home/local/WX/ben.koziol/htmp/transfer') def test_multifile(self): - rd = self.test_data.get_rd('narccap_pr_wrfg_ncep') + rd = self.test_data_nc.get_rd('narccap_pr_wrfg_ncep') self.assertEqual(len(rd.uri),2) diff --git a/src/ocgis/test/test_misc/test_conversion.py b/src/ocgis/test/test_misc/test_conversion.py index e5500db18..36630b243 100644 --- a/src/ocgis/test/test_misc/test_conversion.py +++ b/src/ocgis/test/test_misc/test_conversion.py @@ -12,15 +12,15 @@ class Test(TestBase): def test_nc_projection_writing(self): - rd = self.test_data.get_rd('daymet_tmax') + rd = self.test_data_nc.get_rd('daymet_tmax') ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format='nc') ret = ops.execute() ds = nc.Dataset(ret) self.assertTrue('lambert_conformal_conic' in ds.variables) def test_csv_plus(self): - rd1 = self.test_data.get_rd('cancm4_tasmax_2011') - rd2 = self.test_data.get_rd('maurer_bccr_1950') + rd1 = self.test_data_nc.get_rd('cancm4_tasmax_2011') + rd2 = self.test_data_nc.get_rd('maurer_bccr_1950') ops = ocgis.OcgOperations(dataset=[rd1,rd2],snippet=True,output_format='csv+', geom='state_boundaries',agg_selection=True, select_ugid=[32]) @@ -32,8 +32,8 @@ def test_csv_plus(self): self.assertTrue(len(lines) > 50) def test_csv_plus_custom_headers(self): - rd1 = self.test_data.get_rd('cancm4_tasmax_2011') - rd2 = self.test_data.get_rd('maurer_bccr_1950') + rd1 = self.test_data_nc.get_rd('cancm4_tasmax_2011') + rd2 = self.test_data_nc.get_rd('maurer_bccr_1950') headers = ['did','ugid','gid','alias','value','time'] ops = ocgis.OcgOperations(dataset=[rd1,rd2],snippet=True,output_format='csv+', geom='state_boundaries',agg_selection=True, @@ -46,8 +46,8 @@ def test_csv_plus_custom_headers(self): self.assertEqual(fheaders,[h.upper() for h in headers]) def test_shp_custom_headers(self): - rd1 = self.test_data.get_rd('cancm4_tasmax_2011') - rd2 = self.test_data.get_rd('maurer_bccr_1950') + rd1 = self.test_data_nc.get_rd('cancm4_tasmax_2011') + rd2 = self.test_data_nc.get_rd('maurer_bccr_1950') headers = ['did','ugid','gid','alias','value','time'] ops = ocgis.OcgOperations(dataset=[rd1,rd2],snippet=True,output_format='shp', geom='state_boundaries',agg_selection=True, @@ -58,14 +58,14 @@ def test_shp_custom_headers(self): self.assertEqual(f.meta['schema']['properties'].keys(),[h.upper() for h in headers]) def test_meta(self): - rd = self.test_data.get_rd('cancm4_tasmax_2011') + rd = self.test_data_nc.get_rd('cancm4_tasmax_2011') ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format='meta', geom='state_boundaries',agg_selection=True) ret = ops.execute() self.assertTrue(isinstance(ret,basestring)) def test_meta_with_source(self): - rd = self.test_data.get_rd('cancm4_tasmax_2011') + rd = self.test_data_nc.get_rd('cancm4_tasmax_2011') ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format='csv', geom='state_boundaries',agg_selection=True) ret = ops.execute() diff --git a/src/ocgis/test/test_ocgis/test_api/test_collection.py b/src/ocgis/test/test_ocgis/test_api/test_collection.py index 7c38dfeea..55fc0ba26 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_collection.py +++ b/src/ocgis/test/test_ocgis/test_api/test_collection.py @@ -17,7 +17,7 @@ class TestAbstractCollection(TestBase): - _create_dir = False + create_dir = False def get_coll(self): coll = AbstractCollection() diff --git a/src/ocgis/test/test_ocgis/test_api/test_interpreter.py b/src/ocgis/test/test_ocgis/test_api/test_interpreter.py index 8875d7b4d..8e9c91188 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_interpreter.py +++ b/src/ocgis/test/test_ocgis/test_api/test_interpreter.py @@ -11,7 +11,7 @@ def test_execute_directory(self): """Test that the output directory is removed appropriately following an operations failure.""" kwds = dict(add_auxiliary_files=[True, False]) - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ## this geometry is outside the domain and will result in an exception geom = [1000, 1000, 1100, 1100] @@ -21,5 +21,5 @@ def test_execute_directory(self): try: ret = ops.execute() except ExtentError: - contents = os.listdir(self._test_dir) + contents = os.listdir(self.current_dir_output) self.assertEqual(len(contents), 0) diff --git a/src/ocgis/test/test_ocgis/test_api/test_operations.py b/src/ocgis/test/test_ocgis/test_api/test_operations.py index 1d31a0309..e4fc34b8a 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_operations.py +++ b/src/ocgis/test/test_ocgis/test_api/test_operations.py @@ -25,9 +25,9 @@ def setUp(self): env.DIR_DATA = os.path.join(env.DIR_TEST_DATA,'CanCM4') ## data may need to be pulled from remote repository - self.test_data.get_rd('cancm4_tasmin_2001') - self.test_data.get_rd('cancm4_tasmax_2011') - self.test_data.get_rd('cancm4_tas') + self.test_data_nc.get_rd('cancm4_tasmin_2001') + self.test_data_nc.get_rd('cancm4_tasmax_2011') + self.test_data_nc.get_rd('cancm4_tas') uris = [ 'tasmin_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', @@ -50,13 +50,13 @@ def test_init(self): def test_regrid_destination(self): """Test regridding not allowed with clip operation.""" - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') with self.assertRaises(DefinitionValidationError): OcgOperations(dataset=rd, regrid_destination=rd, spatial_operation='clip') def test_conform_units_to(self): - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tas') + rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tas') rd2.alias = 'foo' ops = OcgOperations(dataset=[rd1, rd2], conform_units_to='celsius') for ds in ops.dataset.itervalues(): @@ -68,20 +68,20 @@ def test_conform_units_to(self): self.assertEqual(ds.conform_units_to, 'fahrenheit') def test_conform_units_to_bad_units(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') with self.assertRaises(RequestValidationError): OcgOperations(dataset=rd, conform_units_to='crap') def test_no_calc_grouping_with_string_expression(self): calc = 'es=tas*3' calc_grouping = ['month'] - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') with self.assertRaises(DefinitionValidationError): OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping) def test_time_range(self): - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tas') rd.alias = 'foo' tr = [datetime.datetime(2002,1,1),datetime.datetime(2002,3,1)] ops = ocgis.OcgOperations(dataset=[rd,rd2],time_range=tr) @@ -96,8 +96,8 @@ def test_time_range(self): self.assertEqual(r.time_range,tuple(tr)) def test_time_region(self): - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tas') rd.alias = 'foo' tr = {'month':[6],'year':[2005]} ops = ocgis.OcgOperations(dataset=[rd,rd2],time_region=tr) @@ -112,8 +112,8 @@ def test_time_region(self): self.assertEqual(r.time_region,tr) def test_level_range(self): - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tas') rd.alias = 'foo' lr = [1,2] ops = ocgis.OcgOperations(dataset=[rd,rd2],level_range=lr) @@ -128,8 +128,8 @@ def test_level_range(self): self.assertEqual(r.level_range,tuple(lr)) def test_nc_package_validation_raised_first(self): - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('rotated_pole_ichec',kwds={'alias':'tas2'}) + rd = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('rotated_pole_ichec',kwds={'alias':'tas2'}) try: ocgis.OcgOperations(dataset=[rd,rd2],output_format='nc') except DefinitionValidationError as e: @@ -144,8 +144,8 @@ def callback(perc,msg,app=app): app.append((perc,msg)) # print(perc,msg) - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tasmax_2011') + rd = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tasmax_2011') dataset = [rd,rd2] for ds in dataset: ds.time_region = {'month':[6]} @@ -158,7 +158,7 @@ def callback(perc,msg,app=app): self.assertEqual(app[-1][0],100.0) def test_get_base_request_size(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd) size = ops.get_base_request_size() self.assertEqual(size,{'variables': {'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 116800.0, 'shape': (1, 3650, 1, 64, 128), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 1.0, 'shape': (128,), 'dtype': dtype('float64')}, 'row': {'kb': 0.5, 'shape': (64,), 'dtype': dtype('float64')}}}, 'total': 116830.015625}) @@ -167,49 +167,49 @@ def test_get_base_request_size(self): OcgOperations(dataset=rd, regrid_destination=rd).get_base_request_size() def test_get_base_request_size_with_geom(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[23]) size = ops.get_base_request_size() self.assertEqual(size,{'variables': {'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 171.09375, 'shape': (1, 3650, 1, 4, 3), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 0.0234375, 'shape': (3,), 'dtype': dtype('float64')}, 'row': {'kb': 0.03125, 'shape': (4,), 'dtype': dtype('float64')}}}, 'total': 199.6640625}) def test_get_base_request_size_multifile(self): - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('narccap_pr_wrfg_ncep') + rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('narccap_pr_wrfg_ncep') rds = [rd1,rd2] ops = OcgOperations(dataset=rds) size = ops.get_base_request_size() self.assertEqual({'variables': {'pr': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 228.25, 'shape': (29216,), 'dtype': dtype('float64')}, 'value': {'kb': 1666909.75, 'shape': (1, 29216, 1, 109, 134), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 1.046875, 'shape': (134,), 'dtype': dtype('float64')}, 'row': {'kb': 0.8515625, 'shape': (109,), 'dtype': dtype('float64')}}, 'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 116800.0, 'shape': (1, 3650, 1, 64, 128), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 1.0, 'shape': (128,), 'dtype': dtype('float64')}, 'row': {'kb': 0.5, 'shape': (64,), 'dtype': dtype('float64')}}}, 'total': 1783969.9140625},size) def test_get_base_request_size_multifile_with_geom(self): - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('narccap_pr_wrfg_ncep') + rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('narccap_pr_wrfg_ncep') rds = [rd1,rd2] ops = OcgOperations(dataset=rds,geom='state_boundaries',select_ugid=[23]) size = ops.get_base_request_size() self.assertEqual(size,{'variables': {'pr': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 228.25, 'shape': (29216,), 'dtype': dtype('float64')}, 'value': {'kb': 21341.375, 'shape': (1, 29216, 1, 17, 11), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 0.0859375, 'shape': (11,), 'dtype': dtype('float64')}, 'row': {'kb': 0.1328125, 'shape': (17,), 'dtype': dtype('float64')}}, 'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 171.09375, 'shape': (1, 3650, 1, 4, 3), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 0.0234375, 'shape': (3,), 'dtype': dtype('float64')}, 'row': {'kb': 0.03125, 'shape': (4,), 'dtype': dtype('float64')}}}, 'total': 21769.5078125}) def test_get_base_request_size_test_data(self): - for key in self.test_data.keys(): - rd = self.test_data.get_rd(key) + for key in self.test_data_nc.keys(): + rd = self.test_data_nc.get_rd(key) try: ops = OcgOperations(dataset=rd) ## the project cmip data may raise an exception since projection is ## not associated with a variable except DimensionNotFound: - rd = self.test_data.get_rd(key,kwds=dict(dimension_map={'R':'projection','T':'time','X':'longitude','Y':'latitude'})) + rd = self.test_data_nc.get_rd(key,kwds=dict(dimension_map={'R':'projection','T':'time','X':'longitude','Y':'latitude'})) ops = OcgOperations(dataset=rd) ret = ops.get_base_request_size() self.assertTrue(ret['total'] > 1) def test_get_base_request_size_with_calculation(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd,calc=[{'func':'mean','name':'mean'}], calc_grouping=['month']) size = ops.get_base_request_size() self.assertEqual(size['variables']['tas']['temporal']['shape'][0],3650) def test_str(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd) ret = str(ops) self.assertTrue(str(ret).startswith('OcgOperations')) @@ -277,7 +277,7 @@ def test_geom(self): self.assertEqual(g._shp_key,'mi_watersheds') def test_geom_having_changed_select_ugid(self): - ops = OcgOperations(dataset=self.test_data.get_rd('cancm4_tas'), + ops = OcgOperations(dataset=self.test_data_nc.get_rd('cancm4_tas'), geom='state_boundaries') self.assertEqual(len(list(ops.geom)),51) ops.select_ugid = [16,17] @@ -317,7 +317,7 @@ def test_calc_grouping_none_date_parts(self): self.assertEqual(obj.value,('day',)) ## only month, year, and day combinations are currently supported - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') calcs = [None,[{'func':'mean','name':'mean'}]] acceptable = ['day','month','year'] for calc in calcs: @@ -336,7 +336,7 @@ def test_calc_grouping_none_date_parts(self): def test_calc_grouping_seasonal_with_year(self): calc_grouping = [[1,2,3],'year'] calc = [{'func':'mean','name':'mean'}] - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, geom='state_boundaries',select_ugid=[25]) ret = ops.execute() @@ -345,7 +345,7 @@ def test_calc_grouping_seasonal_with_year(self): def test_calc_grouping_seasonal_with_unique(self): calc_grouping = [[12,1,2],'unique'] calc = [{'func':'mean','name':'mean'}] - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc_grouping=calc_grouping,geom='state_boundaries', select_ugid=[27],output_format='nc',calc=calc) ret = ops.execute() @@ -357,7 +357,7 @@ def test_calc_grouping_seasonal_with_unique(self): def test_dataset(self): env.DIR_DATA = ocgis.env.DIR_TEST_DATA - reference_rd = self.test_data.get_rd('cancm4_tas') + reference_rd = self.test_data_nc.get_rd('cancm4_tas') rd = RequestDataset(reference_rd.uri,reference_rd.variable) ds = definition.Dataset(rd) self.assertEqual(ds.value,RequestDatasetCollection([rd])) @@ -365,7 +365,7 @@ def test_dataset(self): dsa = {'uri':reference_rd.uri,'variable':reference_rd.variable} ds = definition.Dataset(dsa) - reference_rd2 = self.test_data.get_rd('narccap_crcm') + reference_rd2 = self.test_data_nc.get_rd('narccap_crcm') dsb = [dsa,{'uri':reference_rd2.uri,'variable':reference_rd2.variable,'alias':'knight'}] ds = definition.Dataset(dsb) @@ -394,8 +394,8 @@ def test_spatial_operation(self): def test_regridding_to_nc(self): """Write regridded data to netCDF.""" - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tas') + rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd1, regrid_destination=rd2, output_format='nc', snippet=True, geom='state_boundaries', select_ugid=[25]) @@ -408,8 +408,8 @@ def test_regridding_to_nc(self): def test_regridding_to_shp_vector_wrap(self): """Test writing to shapefile with different vector wrap options.""" - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tas') + rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tas') for vector_wrap in [True, False]: ops = OcgOperations(dataset=rd1, regrid_destination=rd2, output_format='shp', snippet=True, diff --git a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py index 99ab90473..1d7c5dec3 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py +++ b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py @@ -177,7 +177,7 @@ def test_calc_grouping_seasonal_aggregation_with_bad_flag(self): CalcGrouping([[1,2,3],[4,5,6],'fod']) def test_dataset(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') dd = Dataset(rd) with open('/tmp/dd.pkl','w') as f: @@ -189,7 +189,7 @@ def test_dataset(self): class TestCalc(TestBase): - _create_dir = False + create_dir = False def test_meta_attrs(self): """Test various forms for meta_attrs in the calculation definition dictionary.""" @@ -299,7 +299,7 @@ def test_bad_key(self): class TestConformUnitsTo(TestBase): - _create_dir = False + create_dir = False def test_constructor(self): cc = ConformUnitsTo() @@ -316,7 +316,7 @@ def test_constructor(self): class TestGeom(TestBase): - _create_dir = False + create_dir = False def test_init(self): geom = make_poly((37.762,38.222),(-102.281,-101.754)) @@ -466,7 +466,7 @@ def possible_datasets(self): return datasets def get_rd(self, **kwargs): - rd = self.test_data.get_rd('cancm4_tas', kwds=kwargs) + rd = self.test_data_nc.get_rd('cancm4_tas', kwds=kwargs) return rd def test_init(self): @@ -539,7 +539,7 @@ def test_get_meta(self): self.assertTrue('numpy.ndarray' in ro._get_meta_()) class TestLevelRange(TestBase): - _create_dir = False + create_dir = False def test_constructor(self): LevelRange() @@ -563,7 +563,7 @@ def test_bad_ordination(self): class TestTimeRange(TestBase): - _create_dir = False + create_dir = False def test_constructor(self): TimeRange() @@ -585,7 +585,7 @@ def test_incorrect_number_of_values(self): class TestTimeRegion(TestBase): - _create_dir = False + create_dir = False def test_constructor(self): TimeRegion() diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py index 216149fa9..0279efc9e 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py @@ -45,7 +45,7 @@ class TestRequestDataset(TestBase): def setUp(self): TestBase.setUp(self) ## download test data - self.test_data.get_rd('cancm4_rhs') + self.test_data_nc.get_rd('cancm4_rhs') self.uri = os.path.join(ocgis.env.DIR_TEST_DATA, 'CanCM4', 'rhs_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc') self.variable = 'rhs' @@ -61,7 +61,7 @@ def test_init(self): self.assertTrue(rd._has_assigned_coordinate_system) def test_str(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ss = str(rd) self.assertTrue(ss.startswith('RequestDataset')) self.assertTrue('crs' in ss) @@ -69,19 +69,19 @@ def test_str(self): def test_crs_overload(self): kwds = {'crs': CoordinateReferenceSystem(epsg=4362)} - rd = self.test_data.get_rd('cancm4_tas', kwds=kwds) + rd = self.test_data_nc.get_rd('cancm4_tas', kwds=kwds) field = rd.get() self.assertDictEqual(kwds['crs'].value, field.spatial.crs.value) def test_uri_cannot_be_set(self): - rd = self.test_data.get_rd('cancm4_tas') - other_uri = self.test_data.get_uri('cancm4_rhs') + rd = self.test_data_nc.get_rd('cancm4_tas') + other_uri = self.test_data_nc.get_uri('cancm4_rhs') with self.assertRaises(AttributeError): rd.uri = other_uri def get_multiple_variable_request_dataset_dictionary(self): - rd_orig = self.test_data.get_rd('cancm4_tas') - dest_uri = os.path.join(self._test_dir, os.path.split(rd_orig.uri)[1]) + rd_orig = self.test_data_nc.get_rd('cancm4_tas') + dest_uri = os.path.join(self.current_dir_output, os.path.split(rd_orig.uri)[1]) shutil.copy2(rd_orig.uri, dest_uri) with nc_scope(dest_uri, 'a') as ds: var = ds.variables['tas'] @@ -93,7 +93,7 @@ def get_multiple_variable_request_dataset_dictionary(self): return {'uri': dest_uri, 'variable': ['tas', 'tasmax']} def test_alias_change_after_init_one_variable(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') self.assertEqual(rd.name, 'tas') rd.alias = 'foo' self.assertEqual(rd.name, 'foo') @@ -141,8 +141,8 @@ def test_init_multiple_variables_with_alias_wrong_count(self): RequestDataset(uri=self.uri, variable=['tas', 'tasmax'], alias='tas_what') def test_init_combinations(self): - rd_orig = self.test_data.get_rd('cancm4_tas') - dest_uri = os.path.join(self._test_dir, os.path.split(rd_orig.uri)[1]) + rd_orig = self.test_data_nc.get_rd('cancm4_tas') + dest_uri = os.path.join(self.current_dir_output, os.path.split(rd_orig.uri)[1]) shutil.copy2(rd_orig.uri, dest_uri) with nc_scope(dest_uri, 'a') as ds: var = ds.variables['tas'] @@ -243,20 +243,20 @@ def itr_products_keywords(keywords): raise def test_variable_not_found(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') rd_bad = RequestDataset(uri=rd.uri, variable='crap') with self.assertRaises(VariableNotFoundError): rd_bad.get() def test_level_subset_without_level(self): lr = [1, 2] - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') rd.level_range = lr with self.assertRaises(ValueError): rd.get() def test_source_dictionary_is_deepcopied(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() self.assertEqual(rd.source_metadata, field.meta) ## the source metadata dictionary should be deepcopied prior to passing @@ -265,41 +265,41 @@ def test_source_dictionary_is_deepcopied(self): self.assertNotEqual(rd.source_metadata, field.meta) def test_source_index_matches_constant_value(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() self.assertEqual(field.temporal._src_idx.dtype, constants.np_int) def test_with_units(self): units = 'celsius' - rd = self.test_data.get_rd('cancm4_tas', kwds={'units': units}) + rd = self.test_data_nc.get_rd('cancm4_tas', kwds={'units': units}) self.assertEqual(rd.units, 'celsius') def test_without_units_attempting_conform(self): ## this will work because the units read from the metadata are equivalent - self.test_data.get_rd('cancm4_tas', kwds={'conform_units_to': 'celsius'}) + self.test_data_nc.get_rd('cancm4_tas', kwds={'conform_units_to': 'celsius'}) ## this will not work because the units are not equivalent - rd = self.test_data.get_rd('cancm4_tas', kwds={'conform_units_to': 'coulomb'}) + rd = self.test_data_nc.get_rd('cancm4_tas', kwds={'conform_units_to': 'coulomb'}) with self.assertRaises(RequestValidationError): rd.get() def test_with_bad_units_attempting_conform(self): ## pass bad units to the init and an attempt a conform. values from ## the source dataset are not used for overload. - rd = self.test_data.get_rd('cancm4_tas', kwds={'conform_units_to': 'celsius', 'units': 'coulomb'}) + rd = self.test_data_nc.get_rd('cancm4_tas', kwds={'conform_units_to': 'celsius', 'units': 'coulomb'}) with self.assertRaises(RequestValidationError): rd.get() def test_nonsense_units(self): with self.assertRaises(RequestValidationError): - self.test_data.get_rd('cancm4_tas', kwds={'units': 'nonsense', 'conform_units_to': 'celsius'}) + self.test_data_nc.get_rd('cancm4_tas', kwds={'units': 'nonsense', 'conform_units_to': 'celsius'}) def test_with_bad_units_passing_to_field(self): - rd = self.test_data.get_rd('cancm4_tas', kwds={'units': 'celsius'}) + rd = self.test_data_nc.get_rd('cancm4_tas', kwds={'units': 'celsius'}) field = rd.get() self.assertEqual(field.variables['tas'].units, 'celsius') def test_get_field_with_overloaded_units(self): - rd = self.test_data.get_rd('cancm4_tas', kwds={'conform_units_to': 'celsius'}) + rd = self.test_data_nc.get_rd('cancm4_tas', kwds={'conform_units_to': 'celsius'}) preload = [False, True] for pre in preload: field = rd.get() @@ -321,7 +321,7 @@ def test_get_field_with_overloaded_units(self): self.assertNumpyAll(to_test, value) def test_get_field_nonequivalent_units_in_source_data(self): - new_path = self.test_data.copy_file('cancm4_tas', self._test_dir) + new_path = self.test_data_nc.copy_file('cancm4_tas', self.current_dir_output) ## put non-equivalent units on the source data and attempto to conform with nc_scope(new_path, 'a') as ds: @@ -380,7 +380,7 @@ def test_inspect_as_dct(self): def test_env_dir_data(self): ## test setting the var to a single directory env.DIR_DATA = ocgis.env.DIR_TEST_DATA - rd = self.test_data.get_rd('cancm4_rhs') + rd = self.test_data_nc.get_rd('cancm4_rhs') target = os.path.join(env.DIR_DATA, 'CanCM4', 'rhs_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc') try: self.assertEqual(rd.uri, target) @@ -415,7 +415,7 @@ def test_level_range(self): self.assertEqual(rd.level_range, tuple([1, 1])) def test_multiple_uris(self): - rd = self.test_data.get_rd('narccap_pr_wrfg_ncep') + rd = self.test_data_nc.get_rd('narccap_pr_wrfg_ncep') self.assertEqual(len(rd.uri), 2) rd.inspect() @@ -432,8 +432,8 @@ def test_time_region(self): class TestRequestDatasetCollection(TestBase): def test_init(self): - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_rhs') + rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_rhs') keywords = dict(request_datasets=[None, rd1, [rd1], [rd1, rd2], {'uri': rd1.uri, 'variable': rd1.variable}]) @@ -445,15 +445,15 @@ def test_init(self): self.assertEqual(len(rdc), 0) def test_str(self): - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_rhs') + rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_rhs') rdc = RequestDatasetCollection(request_datasets=[rd1, rd2]) ss = str(rdc) self.assertTrue(ss.startswith('RequestDatasetCollection')) self.assertGreater(len(ss), 900) def test_name_attribute_used_for_keys(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') rd.name = 'hi_there' rdc = RequestDatasetCollection(request_datasets=[rd]) self.assertEqual(rdc.keys(), ['hi_there']) @@ -461,8 +461,8 @@ def test_name_attribute_used_for_keys(self): def test(self): env.DIR_DATA = ocgis.env.DIR_TEST_DATA - daymet = self.test_data.get_rd('daymet_tmax') - tas = self.test_data.get_rd('cancm4_tas') + daymet = self.test_data_nc.get_rd('daymet_tmax') + tas = self.test_data_nc.get_rd('cancm4_tas') uris = [daymet.uri, tas.uri] @@ -493,7 +493,7 @@ def test(self): self.assertIsInstance(rdc['a2'], RequestDataset) def test_with_overloads(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() ## loaded calendar should match file metadata self.assertEqual(field.temporal.calendar, '365_day') @@ -529,9 +529,9 @@ def test_with_overloads(self): def test_with_overloads_real_data(self): ## copy the test file as the calendar attribute will be modified - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') filename = os.path.split(rd.uri)[1] - dest = os.path.join(self._test_dir, filename) + dest = os.path.join(self.current_dir_output, filename) shutil.copy2(rd.uri, dest) ## modify the calendar attribute with nc_scope(dest, 'a') as ds: diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_base.py index 6e864aec6..e120e4024 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_base.py @@ -9,7 +9,7 @@ class TestAbstractDriver(TestBase): def test_get_field(self): # test updating of regrid source flag - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') driver = DriverNetcdf(rd) field = driver.get_field() self.assertTrue(field._should_regrid) @@ -19,17 +19,17 @@ def test_get_field(self): self.assertFalse(field._should_regrid) # test flag with an assigned coordinate system - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') driver = DriverNetcdf(rd) field = driver.get_field() self.assertFalse(field._has_assigned_coordinate_system) - rd = self.test_data.get_rd('cancm4_tas', kwds={'crs': CFWGS84()}) + rd = self.test_data_nc.get_rd('cancm4_tas', kwds={'crs': CFWGS84()}) driver = DriverNetcdf(rd) field = driver.get_field() self.assertTrue(field._has_assigned_coordinate_system) def test_eq(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') d = DriverNetcdf(rd) d2 = deepcopy(d) self.assertEqual(d, deepcopy(d)) diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index 859d3fe3c..e7372cceb 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -58,7 +58,7 @@ def get_2d_state_boundaries_sdim(self): return(sdim) def test_get_dimensioned_variables_one_variable_in_target_dataset(self): - uri = self.test_data.get_uri('cancm4_tas') + uri = self.test_data_nc.get_uri('cancm4_tas') rd = RequestDataset(uri=uri) driver = DriverNetcdf(rd) ret = driver.get_dimensioned_variables() @@ -66,8 +66,8 @@ def test_get_dimensioned_variables_one_variable_in_target_dataset(self): self.assertEqual(rd._variable, ('tas',)) def test_get_dimensioned_variables_two_variables_in_target_dataset(self): - rd_orig = self.test_data.get_rd('cancm4_tas') - dest_uri = os.path.join(self._test_dir, os.path.split(rd_orig.uri)[1]) + rd_orig = self.test_data_nc.get_rd('cancm4_tas') + dest_uri = os.path.join(self.current_dir_output, os.path.split(rd_orig.uri)[1]) shutil.copy2(rd_orig.uri, dest_uri) with nc_scope(dest_uri, 'a') as ds: var = ds.variables['tas'] @@ -79,7 +79,7 @@ def test_get_dimensioned_variables_two_variables_in_target_dataset(self): self.assertEqual(rd.variable, rd.alias) def test_load_dtype_on_dimensions(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() with nc_scope(rd.uri) as ds: test_dtype_temporal = ds.variables['time'].dtype @@ -89,8 +89,8 @@ def test_load_dtype_on_dimensions(self): self.assertEqual(field.temporal.dtype,np.float64) def test_load(self): - ref_test = self.test_data['cancm4_tas'] - uri = self.test_data.get_uri('cancm4_tas') + ref_test = self.test_data_nc['cancm4_tas'] + uri = self.test_data_nc.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) field = rd.get() ds = nc.Dataset(uri,'r') @@ -116,14 +116,14 @@ def test_load(self): ds.close() def test_multifile_load(self): - uri = self.test_data.get_uri('narccap_pr_wrfg_ncep') + uri = self.test_data_nc.get_uri('narccap_pr_wrfg_ncep') rd = RequestDataset(uri,'pr') field = rd.get() self.assertEqual(field.temporal.extent_datetime,(datetime.datetime(1981, 1, 1, 0, 0), datetime.datetime(1991, 1, 1, 0, 0))) self.assertAlmostEqual(field.temporal.resolution,0.125) def test_load_dtype_fill_value(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() ## dtype and fill_value should be read from metadata. when accessed they ## should not load the value. @@ -132,8 +132,8 @@ def test_load_dtype_fill_value(self): self.assertEqual(field.variables['tas']._value,None) def test_load_datetime_slicing(self): - ref_test = self.test_data['cancm4_tas'] - uri = self.test_data.get_uri('cancm4_tas') + ref_test = self.test_data_nc['cancm4_tas'] + uri = self.test_data_nc.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) field = rd.get() @@ -145,24 +145,24 @@ def test_load_datetime_slicing(self): self.assertNumpyAll(slced.temporal.bounds_datetime,np.array([dt(2001,8,28),dt(2001,8,29)]).reshape(1, 2)) def test_load_value_datetime_after_slicing(self): - ref_test = self.test_data['cancm4_tas'] - uri = self.test_data.get_uri('cancm4_tas') + ref_test = self.test_data_nc['cancm4_tas'] + uri = self.test_data_nc.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) field = rd.get() slced = field[:,10:130,:,4:7,100:37] self.assertEqual(slced.temporal.value_datetime.shape,(120,)) def test_load_bounds_datetime_after_slicing(self): - ref_test = self.test_data['cancm4_tas'] - uri = self.test_data.get_uri('cancm4_tas') + ref_test = self.test_data_nc['cancm4_tas'] + uri = self.test_data_nc.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) field = rd.get() slced = field[:,10:130,:,4:7,100:37] self.assertEqual(slced.temporal.bounds_datetime.shape,(120,2)) def test_load_slice(self): - ref_test = self.test_data['cancm4_tas'] - uri = self.test_data.get_uri('cancm4_tas') + ref_test = self.test_data_nc['cancm4_tas'] + uri = self.test_data_nc.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) field = rd.get() ds = nc.Dataset(uri,'r') @@ -184,8 +184,8 @@ def test_load_slice(self): ds.close() def test_load_time_range(self): - ref_test = self.test_data['cancm4_tas'] - uri = self.test_data.get_uri('cancm4_tas') + ref_test = self.test_data_nc['cancm4_tas'] + uri = self.test_data_nc.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri,time_range=[dt(2005,2,15),dt(2007,4,18)]) field = rd.get() self.assertEqual(field.temporal.value_datetime[0],dt(2005, 2, 15, 12, 0)) @@ -193,8 +193,8 @@ def test_load_time_range(self): self.assertEqual(field.shape,(1,793,1,64,128)) def test_load_time_region(self): - ref_test = self.test_data['cancm4_tas'] - uri = self.test_data.get_uri('cancm4_tas') + ref_test = self.test_data_nc['cancm4_tas'] + uri = self.test_data_nc.get_uri('cancm4_tas') ds = nc.Dataset(uri,'r') rd = RequestDataset(variable=ref_test['variable'],uri=uri,time_region={'month':[8]}) field = rd.get() @@ -215,8 +215,8 @@ def test_load_time_region(self): ds.close() def test_load_time_region_with_years(self): - ref_test = self.test_data['cancm4_tas'] - uri = self.test_data.get_uri('cancm4_tas') + ref_test = self.test_data_nc['cancm4_tas'] + uri = self.test_data_nc.get_uri('cancm4_tas') ds = nc.Dataset(uri,'r') rd = RequestDataset(variable=ref_test['variable'],uri=uri,time_region={'month':[8],'year':[2008,2010]}) field = rd.get() @@ -237,8 +237,8 @@ def test_load_time_region_with_years(self): ds.close() def test_load_geometry_subset(self): - ref_test = self.test_data['cancm4_tas'] - uri = self.test_data.get_uri('cancm4_tas') + ref_test = self.test_data_nc['cancm4_tas'] + uri = self.test_data_nc.get_uri('cancm4_tas') states = self.get_2d_state_boundaries_sdim() ca = states[:,states.properties['STATE_NAME'] == 'California'] @@ -270,8 +270,8 @@ def test_load_geometry_subset(self): import_module('rtree') def test_load_time_region_slicing(self): - ref_test = self.test_data['cancm4_tas'] - uri = self.test_data.get_uri('cancm4_tas') + ref_test = self.test_data_nc['cancm4_tas'] + uri = self.test_data_nc.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri,alias='foo', time_region={'month':[1,10],'year':[2011,2013]}) @@ -311,7 +311,7 @@ def test_load_remote(self): ds.close() def test_load_with_projection(self): - uri = self.test_data.get_uri('narccap_wrfg') + uri = self.test_data_nc.get_uri('narccap_wrfg') rd = RequestDataset(uri,'pr') field = rd.get() self.assertIsInstance(field.spatial.crs,CFLambertConformal) @@ -327,7 +327,7 @@ def test_load_with_projection(self): self.assertAlmostEqual(field.spatial.geom.point.value[0,100].y,21.4615681252577) def test_load_projection_axes(self): - uri = self.test_data.get_uri('cmip3_extraction') + uri = self.test_data_nc.get_uri('cmip3_extraction') variable = 'Tavg' rd = RequestDataset(uri,variable) with self.assertRaises(DimensionNotFound): @@ -346,7 +346,7 @@ def test_load_projection_axes(self): ds.close() def test_load_projection_axes_slicing(self): - uri = self.test_data.get_uri('cmip3_extraction') + uri = self.test_data_nc.get_uri('cmip3_extraction') variable = 'Tavg' rd = RequestDataset(uri,variable,dimension_map={'R':'projection','T':'time','X':'longitude','Y':'latitude'}) field = rd.get() @@ -359,7 +359,7 @@ def test_load_projection_axes_slicing(self): ds.close() def test_load_climatology_bounds(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,output_format='nc',geom='state_boundaries', select_ugid=[27],calc=[{'func':'mean','name':'mean'}], calc_grouping=['month']) @@ -374,7 +374,7 @@ class Test(TestBase): def test_get_dimension_map_1(self): """Test dimension dictionary returned correctly.""" - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') dim_map = get_dimension_map('tas', rd.source_metadata) self.assertDictEqual(dim_map, {'Y': {'variable': u'lat', 'bounds': u'lat_bnds', 'dimension': u'lat', 'pos': 1}, 'X': {'variable': u'lon', 'bounds': u'lon_bnds', 'dimension': u'lon', 'pos': 2}, @@ -395,12 +395,12 @@ def test_get_dimension_map_2(self): def test_get_dimension_map_3(self): """Test when bounds are found but the bounds variable is actually missing.""" - _, to_file = tempfile.mkstemp(dir=self._test_dir) + _, to_file = tempfile.mkstemp(dir=self.current_dir_output) ocgis_lh.configure(to_file=to_file) try: # remove the bounds variable from a standard metadata dictionary - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') metadata = deepcopy(rd.source_metadata) metadata['variables'].pop('lat_bnds') dim_map = get_dimension_map('tas', metadata) diff --git a/src/ocgis/test/test_ocgis/test_api/test_subset.py b/src/ocgis/test/test_ocgis/test_api/test_subset.py index cab2e3112..659148841 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_subset.py +++ b/src/ocgis/test/test_ocgis/test_api/test_subset.py @@ -21,14 +21,14 @@ class TestSubsetOperation(TestBase): def get_operations(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') slc = [None, [0, 100], None, [0, 10], [0, 10]] ops = ocgis.OcgOperations(dataset=rd, slice=slc) return ops def get_subset_operation(self): geom = TestGeom.get_geometry_dictionaries() - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd, geom=geom, select_nearest=True) subset = SubsetOperation(ops) return subset @@ -44,7 +44,7 @@ def test_bounding_box_wrapped(self): """Test subsetting with a wrapped bounding box with the target as a 0-360 global grid.""" bbox = [-104, 36, -95, 44] - rd_global = self.test_data.get_rd('cancm4_tas') + rd_global = self.test_data_nc.get_rd('cancm4_tas') uri = os.path.expanduser('~/climate_data/maurer/bcca/obs/tasmax/1_8deg/gridded_obs.tasmax.OBS_125deg.daily.1991.nc') rd_downscaled = ocgis.RequestDataset(uri=uri) ops = ocgis.OcgOperations(dataset=rd_global, regrid_destination=rd_downscaled, geom=bbox, output_format='nc', @@ -73,7 +73,7 @@ def test_regridding_same_field(self): #todo: what happens with multivariate calculations #todo: test with all masked values - rd_dest = self.test_data.get_rd('cancm4_tas') + rd_dest = self.test_data_nc.get_rd('cancm4_tas') keywords = dict(regrid_destination=[rd_dest, rd_dest.get().spatial, rd_dest.get()], geom=['state_boundaries']) @@ -82,8 +82,8 @@ def test_regridding_same_field(self): for ctr, k in enumerate(itr_products_keywords(keywords, as_namedtuple=True)): - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tas', kwds={'alias': 'tas2'}) + rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tas', kwds={'alias': 'tas2'}) # print ctr # if ctr != 1: continue @@ -96,8 +96,8 @@ def test_regridding_same_field(self): # so the subsetting comes out okay. k = deepcopy(k) k.regrid_destination.crs = Spherical() - rd1 = self.test_data.get_rd('cancm4_tas', kwds={'crs': Spherical()}) - rd2 = self.test_data.get_rd('cancm4_tas', kwds={'alias': 'tas2', 'crs': Spherical()}) + rd1 = self.test_data_nc.get_rd('cancm4_tas', kwds={'crs': Spherical()}) + rd2 = self.test_data_nc.get_rd('cancm4_tas', kwds={'alias': 'tas2', 'crs': Spherical()}) ops = ocgis.OcgOperations(dataset=[rd1, rd2], geom=k.geom, regrid_destination=k.regrid_destination, time_region={'month': [1], 'year': [2002]}, select_ugid=select_ugid) @@ -121,7 +121,7 @@ def test_regridding_same_field(self): def test_regridding_same_field_bad_bounds_raises(self): """Test a regridding error is raised with bad bounds.""" - rd1 = self.test_data.get_rd('cancm4_tas') + rd1 = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd1, regrid_destination=rd1, snippet=True) subset = SubsetOperation(ops) with self.assertRaises(ValueError): @@ -130,7 +130,7 @@ def test_regridding_same_field_bad_bounds_raises(self): def test_regridding_same_field_bad_bounds_without_corners(self): """Test bad bounds may be regridded with_corners as False.""" - rd1 = self.test_data.get_rd('cancm4_tas') + rd1 = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd1, regrid_destination=rd1, snippet=True, regrid_options={'with_corners': False}) subset = SubsetOperation(ops) @@ -146,8 +146,8 @@ def test_regridding_same_field_bad_bounds_without_corners(self): def test_regridding_same_field_value_mask(self): """Test with a value_mask.""" - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tas', kwds={'alias': 'tas2'}) + rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tas', kwds={'alias': 'tas2'}) value_mask = np.zeros(rd2.get().spatial.shape, dtype=bool) value_mask[30, 45] = True regrid_options = {'value_mask': value_mask, 'with_corners': False} @@ -158,8 +158,8 @@ def test_regridding_same_field_value_mask(self): def test_regridding_different_fields_requiring_wrapping(self): """Test with fields requiring wrapping.""" - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('maurer_2010_tas') + rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('maurer_2010_tas') geom = 'state_boundaries' select_ugid = [25] @@ -176,8 +176,8 @@ def test_regridding_different_fields_requiring_wrapping(self): def test_regridding_different_fields_variable_regrid_targets(self): """Test with a request dataset having regrid_source as False.""" - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('maurer_2010_tas', kwds={'time_region': {'year': [1990], 'month': [2]}}) + rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('maurer_2010_tas', kwds={'time_region': {'year': [1990], 'month': [2]}}) rd2.regrid_source = False rd3 = deepcopy(rd2) rd3.regrid_source = True @@ -216,16 +216,16 @@ def test_regridding_update_crs(self): if k.assign_source_crs: # assign the coordinate system changes some regridding behavior. in this case the value read from file # is the same as the assigned value. - rd1 = self.test_data.get_rd('narccap_lambert_conformal', kwds={'crs': rd1.crs}) + rd1 = self.test_data_nc.get_rd('narccap_lambert_conformal', kwds={'crs': rd1.crs}) else: - rd1 = self.test_data.get_rd('narccap_lambert_conformal') + rd1 = self.test_data_nc.get_rd('narccap_lambert_conformal') if k.assign_destination_crs: # assign the coordinate system changes some regridding behavior. in this case the value read from file # is the same as the assigned value. - rd2 = self.test_data.get_rd('cancm4_tas', kwds={'crs': CFWGS84()}) + rd2 = self.test_data_nc.get_rd('cancm4_tas', kwds={'crs': CFWGS84()}) else: - rd2 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tas') if k.destination_type == 'rd': destination = rd2 @@ -249,8 +249,8 @@ def test_regridding_update_crs(self): # swap source and destination grids # test regridding lambert conformal to 0 to 360 grid - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('narccap_lambert_conformal') + rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('narccap_lambert_conformal') actual = np.ma.array([[[[[0.0, 0.0, 0.0, 0.0, 289.3017883300781, 288.7024230957031, 287.9030456542969, 287.1700439453125, 286.50152587890625, 285.89556884765625, 0.0, 0.0, 0.0], [0.0, 288.76837158203125, 288.6125793457031, 288.323486328125, 287.90484619140625, 287.3174743652344, 286.6184387207031, 285.9873046875, 285.422119140625, 284.9210510253906, 284.4820861816406, 0.0, 0.0], [288.41796875, 288.18145751953125, 287.804443359375, 287.2906799316406, 286.6439514160156, 285.8691101074219, 285.2718505859375, 284.7440185546875, 284.2837219238281, 283.88897705078125, 283.557861328125, 0.0, 0.0], [288.1981506347656, 287.7408142089844, 287.1394348144531, 286.3978271484375, 285.5198669433594, 284.5093994140625, 283.86285400390625, 283.4398193359375, 283.08587646484375, 282.79901123046875, 282.5773010253906, 0.0, 0.0], [288.0224914550781, 287.44732666015625, 286.61846923828125, 285.6458435058594, 284.5333557128906, 283.2848815917969, 282.39093017578125, 282.0742492675781, 281.82818603515625, 281.6507568359375, 281.5400390625, 0.0, 0.0], [287.8061218261719, 287.3020935058594, 286.2425231933594, 285.0356140136719, 283.6852722167969, 282.1954345703125, 280.85565185546875, 280.6468200683594, 280.51019287109375, 280.4438171386719, 280.4490966796875, 280.4698181152344, 0.0], [287.58941650390625, 287.3061218261719, 286.0126037597656, 284.5680847167969, 282.9765319824219, 281.241943359375, 279.4717102050781, 279.3304443359375, 279.31854248046875, 279.322509765625, 279.3420715332031, 279.3770751953125, 0.0], [287.37237548828125, 287.3219299316406, 285.88140869140625, 284.1074523925781, 282.3339538574219, 280.56085205078125, 278.7880554199219, 278.173095703125, 278.17498779296875, 278.1929016113281, 278.2267150878906, 278.27618408203125, 0.0], [286.8515319824219, 286.482421875, 285.2543640136719, 283.4666442871094, 281.67938232421875, 279.8925476074219, 278.1060791015625, 277.00677490234375, 277.0225830078125, 277.0546875, 277.1029052734375, 277.1670227050781, 0.0], [286.04193115234375, 285.54132080078125, 284.6296691894531, 282.8280029296875, 281.0268249511719, 279.2261047363281, 277.4257507324219, 275.83135986328125, 275.86126708984375, 275.9077453613281, 275.9705505371094, 276.04949951171875, 0.0], [285.3255920410156, 284.691650390625, 283.970458984375, 282.1915588378906, 280.3763122558594, 278.5615234375, 276.74713134765625, 274.93310546875, 274.6908874511719, 274.75189208984375, 274.8294677734375, 274.9234619140625, 275.03363037109375], [284.70318603515625, 283.93414306640625, 283.0754699707031, 281.557373046875, 279.72784423828125, 277.8988037109375, 276.0702209472656, 274.2420349121094, 273.51129150390625, 273.5870056152344, 273.6817626953125, 273.9592590332031, 274.23809814453125], [284.17547607421875, 283.2694396972656, 282.2713928222656, 280.92547607421875, 279.08148193359375, 277.2380065917969, 275.3950500488281, 273.7021484375, 272.6849060058594, 272.9571533203125, 273.23040771484375, 273.5047607421875, 273.7804260253906], [283.7431945800781, 282.69830322265625, 281.5589904785156, 280.3041687011719, 278.539306640625, 276.8466491699219, 275.2239685058594, 273.6691589355469, 272.2420654296875, 272.5112609863281, 272.7813720703125, 273.05255126953125, 273.324951171875], [283.3773498535156, 282.1432189941406, 280.9111328125, 279.6810607910156, 278.1356201171875, 276.5419006347656, 275.0201110839844, 273.5680236816406, 272.1833801269531, 272.0677185058594, 272.33465576171875, 272.6025695800781, 272.8716735839844], [282.7506408691406, 281.5077209472656, 280.2668762207031, 279.028076171875, 277.65899658203125, 276.1656799316406, 274.7462158203125, 273.3983459472656, 272.1197814941406, 271.6265563964844, 271.8902282714844, 272.15484619140625, 272.4205627441406], [282.1257629394531, 280.8739318847656, 279.62420654296875, 278.3765563964844, 277.1088562011719, 275.7173767089844, 274.40167236328125, 273.1595458984375, 271.9886474609375, 271.187744140625, 271.4480895996094, 271.7093505859375, 271.97161865234375], [281.5027160644531, 280.2418518066406, 278.9831237792969, 277.7264709472656, 276.4718933105469, 275.1964111328125, 273.98602294921875, 272.85113525390625, 271.7894592285156, 270.7986145019531, 271.00823974609375, 271.2593688964844, 271.4490966796875], [280.8815612792969, 279.61151123046875, 278.3436279296875, 277.077880859375, 275.8142395019531, 274.6021423339844, 273.4985656738281, 272.4725036621094, 271.57916259765625, 270.69873046875, 270.55328369140625, 270.6933898925781, 270.78143310546875], [280.2622985839844, 278.98291015625, 277.7057800292969, 276.4308166503906, 275.1949768066406, 274.1751708984375, 273.2945556640625, 272.4111633300781, 271.5249938964844, 270.635986328125, 270.1463928222656, 270.1847839355469, 270.1696472167969], [0.0, 278.40582275390625, 277.3563232421875, 276.3005676269531, 275.2385559082031, 274.17022705078125, 273.26031494140625, 272.3682861328125, 271.4734802246094, 270.5758056640625, 269.7988586425781, 269.7340087890625, 269.6141357421875], [0.0, 278.4913024902344, 277.4296569824219, 276.3616943359375, 275.2873840332031, 274.2066955566406, 273.2289733886719, 272.3282165527344, 271.4246520996094, 270.5182189941406, 269.6088562011719, 269.341552734375, 269.1153259277344], [0.0, 278.5826110839844, 277.5086975097656, 276.4283447265625, 275.3415832519531, 274.2483825683594, 273.2006530761719, 272.2910461425781, 271.37860107421875, 270.4632568359375, 269.54498291015625, 269.00787353515625, 268.6737365722656], [0.0, 0.0, 277.593505859375, 276.5006408691406, 275.4012451171875, 274.29534912109375, 273.1828918457031, 272.25677490234375, 271.3353271484375, 270.4109802246094, 269.48370361328125, 268.7335205078125, 0.0]]]]], mask=[[[[[True, True, True, True, True, True, True, True, True, False, False, False, True], [True, True, True, True, True, True, True, False, False, False, False, False, True], [True, True, True, True, True, True, True, False, False, False, False, False, True], [True, True, True, True, True, True, False, False, False, False, False, False, False], [True, True, True, True, True, False, False, False, False, False, False, False, False], [True, True, True, True, False, False, False, False, False, False, False, False, True], [True, True, False, False, False, False, False, False, False, False, False, False, True], [True, True, False, False, False, False, False, False, False, False, False, True, True], [True, True, False, False, False, False, False, False, False, False, True, True, True], [True, True, False, False, False, False, False, False, False, False, True, True, True], [True, False, False, False, False, False, False, False, False, True, True, True, True], [True, False, False, False, False, False, False, False, True, True, True, True, True], [True, False, False, False, False, False, False, False, True, True, True, True, True], [True, False, False, False, False, False, False, True, True, True, True, True, True], [True, False, False, False, False, False, False, True, True, True, True, True, True], [False, False, False, False, False, False, True, True, True, True, True, True, True], [False, False, False, False, False, False, True, True, True, True, True, True, True], [False, False, False, False, False, False, True, True, True, True, True, True, True], [False, False, False, False, False, False, False, True, True, True, True, True, True], [False, False, False, False, False, False, False, True, True, True, True, True, True], [False, False, False, False, False, False, False, True, True, True, True, True, True], [False, False, False, False, False, False, False, False, True, True, True, True, True], [True, False, False, False, False, False, True, True, True, True, True, True, True], [True, False, False, True, True, True, True, True, True, True, True, True, True]]]]], @@ -275,8 +275,8 @@ def test_regridding_update_crs(self): def test_regridding_with_output_crs(self): """Test with an output coordinate system.""" - rd1 = self.test_data.get_rd('narccap_lambert_conformal') - rd2 = self.test_data.get_rd('cancm4_tas') + rd1 = self.test_data_nc.get_rd('narccap_lambert_conformal') + rd2 = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd2, regrid_destination=rd1, output_crs=rd1.get().spatial.crs, geom='state_boundaries', select_ugid=[16, 25]) ret = ops.execute() @@ -291,8 +291,8 @@ def test_regridding_with_output_crs(self): def test_regridding_two_projected_coordinate_systems(self): """Test with two coordinate systems not in spherical coordinates.""" - rd1 = self.test_data.get_rd('narccap_lambert_conformal') - rd2 = self.test_data.get_rd('narccap_polar_stereographic') + rd1 = self.test_data_nc.get_rd('narccap_lambert_conformal') + rd2 = self.test_data_nc.get_rd('narccap_polar_stereographic') self.assertIsInstance(rd2.crs, CFPolarStereographic) diff --git a/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py b/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py index 6e8d90c64..f63265a73 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py @@ -29,7 +29,7 @@ def run_standard_operations(self,calc,capture=False,output_format=None): aggregate,calc_grouping,output_format = tup if aggregate is True and output_format == 'nc': continue - rd = self.test_data.get_rd('cancm4_tas',kwds={'time_region':{'year':[2001,2002]}}) + rd = self.test_data_nc.get_rd('cancm4_tas',kwds={'time_region':{'year':[2001,2002]}}) try: ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[25], calc=calc,calc_grouping=calc_grouping,output_format=output_format, @@ -78,7 +78,7 @@ class Test(AbstractCalcBase): def test_date_groups_all(self): calc = [{'func':'mean','name':'mean'}] - rd = self.test_data.get_rd('cancm4_tasmax_2011') + rd = self.test_data_nc.get_rd('cancm4_tasmax_2011') calc_grouping = 'all' ops = OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, @@ -92,7 +92,7 @@ def test_date_groups_all(self): def test_time_region(self): kwds = {'time_region':{'year':[2011]}} - rd = self.test_data.get_rd('cancm4_tasmax_2011',kwds=kwds) + rd = self.test_data_nc.get_rd('cancm4_tasmax_2011',kwds=kwds) calc = [{'func':'mean','name':'mean'}] calc_grouping = ['year','month'] @@ -105,7 +105,7 @@ def test_time_region(self): self.assertEqual(tgroup['month'][-1],12) kwds = {'time_region':{'year':[2011,2013],'month':[8]}} - rd = self.test_data.get_rd('cancm4_tasmax_2011',kwds=kwds) + rd = self.test_data_nc.get_rd('cancm4_tasmax_2011',kwds=kwds) calc = [{'func':'threshold','name':'threshold','kwds':{'threshold':0.0,'operation':'gte'}}] calc_grouping = ['month'] aggregate = True @@ -121,7 +121,7 @@ def test_time_region(self): self.assertEqual(threshold.flatten()[0],62) def test_computational_nc_output(self): - rd = self.test_data.get_rd('cancm4_tasmax_2011',kwds={'time_range':[datetime.datetime(2011,1,1),datetime.datetime(2011,12,31)]}) + rd = self.test_data_nc.get_rd('cancm4_tasmax_2011',kwds={'time_range':[datetime.datetime(2011,1,1),datetime.datetime(2011,12,31)]}) calc = [{'func':'mean','name':'tasmax_mean'}] calc_grouping = ['month','year'] @@ -187,7 +187,7 @@ def test_frequency_percentiles(self): def test_date_groups(self): calc = [{'func':'mean','name':'mean'}] - rd = self.test_data.get_rd('cancm4_tasmax_2011') + rd = self.test_data_nc.get_rd('cancm4_tasmax_2011') calc_grouping = ['month'] ops = OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, @@ -238,7 +238,7 @@ def test_date_groups(self): rdt = ref.value_datetime self.assertEqual(rdt[0],dt(2011,1,1,12)) - rd = self.test_data.get_rd('cancm4_tasmax_2011',kwds={'time_region':{'month':[1],'year':[2011]}}) + rd = self.test_data_nc.get_rd('cancm4_tasmax_2011',kwds={'time_region':{'month':[1],'year':[2011]}}) field = rd.get() calc_grouping = ['month','day','year'] ops = OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, @@ -257,7 +257,7 @@ def get_collection(self,aggregate=False): spatial_operation = 'clip' else: spatial_operation = 'intersects' - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[25], spatial_operation=spatial_operation,aggregate=aggregate) ret = ops.execute() diff --git a/src/ocgis/test/test_ocgis/test_calc/test_engine.py b/src/ocgis/test/test_ocgis/test_calc/test_engine.py index 4fbb5a948..f568f51a8 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_engine.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_engine.py @@ -30,7 +30,7 @@ def get_engine(self,kwds=None,funcs=None,grouping="None"): def test_with_eval_function_one_variable(self): funcs = [{'func':'tas2=tas+4','ref':EvalFunction}] engine = self.get_engine(funcs=funcs,grouping=None) - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') coll = ocgis.OcgOperations(dataset=rd,slice=[None,[0,700],None,[0,10],[0,10]]).execute() to_test = deepcopy(coll) engine.execute(coll) @@ -39,8 +39,8 @@ def test_with_eval_function_one_variable(self): def test_with_eval_function_two_variables(self): funcs = [{'func':'tas_out=tas+tas2','ref':EvalFunction}] engine = self.get_engine(funcs=funcs,grouping=None) - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tas') rd2.alias = 'tas2' field = rd.get() field2 = rd2.get() @@ -59,14 +59,14 @@ def test_constructor(self): self.get_engine(kwds=kwds) def test_execute(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') coll = ocgis.OcgOperations(dataset=rd,slice=[None,[0,700],None,[0,10],[0,10]]).execute() engine = self.get_engine() ret = engine.execute(coll) self.assertEqual(ret[1]['tas'].shape,(1, 12, 1, 10, 10)) def test_execute_tgd(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') coll = ocgis.OcgOperations(dataset=rd,slice=[None,[0,700],None,[0,10],[0,10]], calc=self.funcs,calc_grouping=self.grouping).execute() coll_data = ocgis.OcgOperations(dataset=rd,slice=[None,[0,700],None,[0,10],[0,10]]).execute() @@ -78,7 +78,7 @@ def test_execute_tgd(self): self.assertFalse(np.may_share_memory(coll.gvu(1,'mean'),coll_engine.gvu(1,'mean'))) def test_execute_tgd_malformed(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') coll = ocgis.OcgOperations(dataset=rd,slice=[None,[0,700],None,[0,10],[0,10]], calc=self.funcs,calc_grouping=['month','year']).execute() tgds = {'tas':coll[1]['tas'].temporal} diff --git a/src/ocgis/test/test_ocgis/test_calc/test_eval_function.py b/src/ocgis/test/test_ocgis/test_calc/test_eval_function.py index 556152a02..019191060 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_eval_function.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_eval_function.py @@ -80,7 +80,7 @@ def test_get_eval_string_bad_string(self): EvalFunction._get_eval_string_(expr,{'tas':'var.value'}) def test_calculation_one_variable_exp_only(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() field = field[:,0:10,:,:,:] expr = 'es=6.1078*exp(17.08085*(tas-273.16)/(234.175+(tas-273.16)))' @@ -97,8 +97,8 @@ def test_calculation_one_variable_exp_only(self): self.assertNumpyAll(ret['es'].value,actual_value) def test_calculation_two_variables_exp_only(self): - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tasmax_2001') + rd = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tasmax_2001') field = rd.get() field2 = rd2.get() field.variables.add_variable(field2.variables['tasmax'],assign_new_uid=True) @@ -115,7 +115,7 @@ def test_calculation_two_variables_exp_only(self): self.assertNumpyAll(ret['foo'].value,actual_value) def test_calculation_one_variable_exp_and_log(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() field = field[:,0:10,:,:,:] expr = 'es=6.1078*exp(log(17.08085)*(tas-273.16)/(234.175+(tas-273.16)))' @@ -126,7 +126,7 @@ def test_calculation_one_variable_exp_and_log(self): self.assertNumpyAll(ret['es'].value,actual_value) def test_calculation_file_only_one_variable(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() field = field[:,0:10,:,:,:] expr = 'es=6.1078*exp(17.08085*(tas-273.16)/(234.175+(tas-273.16)))' @@ -137,8 +137,8 @@ def test_calculation_file_only_one_variable(self): self.assertEqual(ret['es'].fill_value,field.variables['tas'].fill_value) def test_calculation_file_only_two_variables(self): - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tasmax_2001') + rd = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tasmax_2001') field = rd.get() field2 = rd2.get() field.variables.add_variable(field2.variables['tasmax'],assign_new_uid=True) diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py index e26f6ab8d..b37381990 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py @@ -20,7 +20,7 @@ def get_percentile_reference(self): for year,day in itertools.product(years,days): dates.append(datetime.datetime(year,6,day,12)) - ds = nc.Dataset(self.test_data.get_uri('cancm4_tas')) + ds = nc.Dataset(self.test_data_nc.get_uri('cancm4_tas')) try: calendar = ds.variables['time'].calendar units = ds.variables['time'].units @@ -59,7 +59,7 @@ def test_constructor(self): def test_calculate(self): ## daily data for three years is wanted for the test. subset a CMIP5 ## decadal simulation to use for input into the computation. - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() field = field.get_between('temporal',datetime.datetime(2001,1,1),datetime.datetime(2003,12,31,23,59)) ## the calculation will be for months and years. set the temporal grouping. @@ -85,7 +85,7 @@ def test_calculate(self): @longrunning def test_operations(self): - uri = self.test_data.get_uri('cancm4_tas') + uri = self.test_data_nc.get_uri('cancm4_tas') rd = RequestDataset(uri=uri, variable='tas', # time_range=[datetime.datetime(2001,1,1),datetime.datetime(2003,12,31,23,59)] @@ -103,7 +103,7 @@ def test_operations(self): @longrunning def test_operations_two_steps(self): ## get the request dataset to use as the basis for the percentiles - uri = self.test_data.get_uri('cancm4_tas') + uri = self.test_data_nc.get_uri('cancm4_tas') variable = 'tas' rd = RequestDataset(uri=uri,variable=variable) ## this is the underly OCGIS dataset object diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_heat_index.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_heat_index.py index e987e456e..49f32aedf 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_heat_index.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_heat_index.py @@ -10,7 +10,7 @@ class TestHeatIndex(AbstractTestField): def test_units_raise_exception(self): kwds = {'time_range':[dt(2011,1,1),dt(2011,12,31,23,59,59)]} - ds = [self.test_data.get_rd('cancm4_tasmax_2011',kwds=kwds),self.test_data.get_rd('cancm4_rhsmax',kwds=kwds)] + ds = [self.test_data_nc.get_rd('cancm4_tasmax_2011',kwds=kwds),self.test_data_nc.get_rd('cancm4_rhsmax',kwds=kwds)] calc = [{'func':'heat_index','name':'heat_index','kwds':{'tas':'tasmax','rhs':'rhsmax'}}] ops = ocgis.OcgOperations(dataset=ds,calc=calc,slice=[0,0,0,0,0]) self.assertEqual(ops.calc_grouping,None) @@ -20,7 +20,7 @@ def test_units_raise_exception(self): def test_units_conform_to(self): ocgis.env.OVERWRITE = True kwds = {'time_range':[dt(2011,1,1),dt(2011,12,31,23,59,59)]} - ds = [self.test_data.get_rd('cancm4_tasmax_2011',kwds=kwds),self.test_data.get_rd('cancm4_rhsmax',kwds=kwds)] + ds = [self.test_data_nc.get_rd('cancm4_tasmax_2011',kwds=kwds),self.test_data_nc.get_rd('cancm4_rhsmax',kwds=kwds)] ## set the conform to units ds[0].conform_units_to = 'fahrenheit' diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py index 5f750b690..0e9677878 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py @@ -202,7 +202,7 @@ def test_execute_valid(self): def test_execute_valid_through_operations(self): """Test executing a "valid" convolution mode through operations ensuring the data is appropriately truncated.""" - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') calc = [{'func': 'convolve_1d', 'name': 'convolve', 'kwds': {'v': np.array([1, 1, 1, 1, 1]), 'mode': 'valid'}}] ops = ocgis.OcgOperations(dataset=rd, calc=calc, slice=[None, [0, 365], None, [0, 10], [0, 10]]) ret = ops.execute() diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py index 5cb742451..7cb3de520 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py @@ -47,7 +47,7 @@ def test_execute(self): def test_execute_valid_through_operations(self): """Test executing a "valid" convolution mode through operations ensuring the data is appropriately truncated.""" - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') calc = [{'func': 'moving_window', 'name': 'ma', 'kwds': {'k': 5, 'mode': 'valid', 'operation': 'mean'}}] ops = ocgis.OcgOperations(dataset=rd, calc=calc, slice=[None, [0, 365], None, [0, 10], [0, 10]]) ret = ops.execute() @@ -200,7 +200,7 @@ def test_two_variables_sample_size(self): set(ret.keys())) def test_file_only(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() field = field[:,10:20,:,20:30,40:50] grouping = ['month'] @@ -219,7 +219,7 @@ def test_file_only(self): def test_output_datatype(self): ## ensure the output data type is the same as the input data type of ## the variable. - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=[{'func':'mean','name':'mean'}], calc_grouping=['month'],geom='state_boundaries', select_ugid=[27]) @@ -229,7 +229,7 @@ def test_output_datatype(self): self.assertEqual(ret[27]['tas'].variables['mean'].dtype,var_dtype) def test_file_only_by_operations(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=[{'func':'mean','name':'mean'}], calc_grouping=['month'],geom='state_boundaries', select_ugid=[27],file_only=True,output_format='nc') diff --git a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py index ef075762c..0f283b084 100644 --- a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py +++ b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py @@ -36,12 +36,12 @@ def test_standard_AbstractIcclimFunction(self): for cg in CalcGrouping.iter_possible(): calc = [{'func':subclass.key,'name':subclass.key.split('_')[1]}] if klass == AbstractIcclimUnivariateSetFunction: - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') rd.time_region = {'year':[2001,2002]} calc = [{'func':subclass.key,'name':subclass.key.split('_')[1]}] else: - tasmin = self.test_data.get_rd('cancm4_tasmin_2001') - tasmax = self.test_data.get_rd('cancm4_tasmax_2001') + tasmin = self.test_data_nc.get_rd('cancm4_tasmin_2001') + tasmax = self.test_data_nc.get_rd('cancm4_tasmax_2001') rd = [tasmin,tasmax] for r in rd: r.time_region = {'year':[2001,2002]} @@ -90,8 +90,8 @@ def test_bad_icclim_key_to_operations(self): class TestDTR(TestBase): def test_calculate(self): - tasmin = self.test_data.get_rd('cancm4_tasmin_2001') - tasmax = self.test_data.get_rd('cancm4_tasmax_2001') + tasmin = self.test_data_nc.get_rd('cancm4_tasmin_2001') + tasmax = self.test_data_nc.get_rd('cancm4_tasmax_2001') field = tasmin.get() field.variables.add_variable(deepcopy(tasmax.get().variables['tasmax']), assign_new_uid=True) field = field[:,0:600,:,25:50,25:50] @@ -101,8 +101,8 @@ def test_calculate(self): self.assertEqual(ret['icclim_DTR'].value.shape,(1, 12, 1, 25, 25)) def test_bad_keyword_mapping(self): - tasmin = self.test_data.get_rd('cancm4_tasmin_2001') - tas = self.test_data.get_rd('cancm4_tas') + tasmin = self.test_data_nc.get_rd('cancm4_tasmin_2001') + tas = self.test_data_nc.get_rd('cancm4_tas') rds = [tasmin,tas] calc = [{'func':'icclim_DTR','name':'DTR','kwds':{'tas':'tasmin','tasmax':'tasmax'}}] with self.assertRaises(DefinitionValidationError): @@ -118,9 +118,9 @@ def test_calculation_operations(self): ## note the kwds must contain a map of the required variables to their ## associated aliases. calc = [{'func':'icclim_DTR','name':'DTR','kwds':{'tasmin':'tasmin','tasmax':'tasmax'}}] - tasmin = self.test_data.get_rd('cancm4_tasmin_2001') + tasmin = self.test_data_nc.get_rd('cancm4_tasmin_2001') tasmin.time_region = {'year':[2002]} - tasmax = self.test_data.get_rd('cancm4_tasmax_2001') + tasmax = self.test_data_nc.get_rd('cancm4_tasmax_2001') tasmax.time_region = {'year':[2002]} rds = [tasmin,tasmax] ops = ocgis.OcgOperations(dataset=rds,calc=calc,calc_grouping=['month'], @@ -131,8 +131,8 @@ def test_calculation_operations(self): class TestETR(TestBase): def test_calculate(self): - tasmin = self.test_data.get_rd('cancm4_tasmin_2001') - tasmax = self.test_data.get_rd('cancm4_tasmax_2001') + tasmin = self.test_data_nc.get_rd('cancm4_tasmin_2001') + tasmax = self.test_data_nc.get_rd('cancm4_tasmax_2001') field = tasmin.get() field.variables.add_variable(tasmax.get().variables['tasmax'], assign_new_uid=True) field = field[:,0:600,:,25:50,25:50] @@ -142,7 +142,7 @@ def test_calculate(self): self.assertEqual(ret['icclim_ETR'].value.shape,(1, 12, 1, 25, 25)) def test_calculate_rotated_pole(self): - tasmin_fake = self.test_data.get_rd('rotated_pole_ichec') + tasmin_fake = self.test_data_nc.get_rd('rotated_pole_ichec') tasmin_fake.alias = 'tasmin' tasmax_fake = deepcopy(tasmin_fake) tasmax_fake.alias = 'tasmax' @@ -163,7 +163,7 @@ def test_calculate_rotated_pole(self): class TestTx(TestBase): def test_calculate_operations(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') slc = [None,None,None,[0,10],[0,10]] calc_icclim = [{'func':'icclim_TG','name':'TG'}] calc_ocgis = [{'func':'mean','name':'mean'}] @@ -179,7 +179,7 @@ def test_calculate_operations(self): ret_icclim[1]['tas'].variables['TG'].value) def test_calculation_operations_to_nc(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') slc = [None,None,None,[0,10],[0,10]] ops_ocgis = OcgOperations(calc=[{'func':'icclim_TG','name':'TG'}], calc_grouping=['month'], @@ -199,7 +199,7 @@ def test_calculation_operations_to_nc(self): self.assertEqual(dict(var.__dict__),{'_FillValue':np.float32(1e20),u'units': u'K', u'standard_name': AbstractIcclimFunction.standard_name, u'long_name': u'Mean of daily mean temperature'}) def test_calculate(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() field = field[:,:,:,0:10,0:10] klasses = [IcclimTG,IcclimTN,IcclimTX] @@ -217,7 +217,7 @@ def test_calculate(self): class TestSU(TestBase): def test_calculate(self): - rd = self.test_data.get_rd('cancm4_tasmax_2011') + rd = self.test_data_nc.get_rd('cancm4_tasmax_2011') field = rd.get() field = field[:,:,:,0:10,0:10] for calc_grouping in [['month'],['month','year']]: @@ -229,14 +229,14 @@ def test_calculate(self): self.assertNumpyAll(ret_icclim['icclim_SU'].value,ret_ocgis['threshold'].value) def test_calculation_operations_bad_units(self): - rd = self.test_data.get_rd('daymet_tmax') + rd = self.test_data_nc.get_rd('daymet_tmax') calc_icclim = [{'func':'icclim_SU','name':'SU'}] ops_icclim = OcgOperations(calc=calc_icclim,calc_grouping=['month','year'],dataset=rd) with self.assertRaises(UnitsValidationError): ops_icclim.execute() def test_calculation_operations_to_nc(self): - rd = self.test_data.get_rd('cancm4_tasmax_2011') + rd = self.test_data_nc.get_rd('cancm4_tasmax_2011') slc = [None,None,None,[0,10],[0,10]] ops_ocgis = OcgOperations(calc=[{'func':'icclim_SU','name':'SU'}], calc_grouping=['month'], diff --git a/src/ocgis/test/test_ocgis/test_conv/test_base.py b/src/ocgis/test/test_ocgis/test_conv/test_base.py index 87d798224..60c924e79 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_base.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_base.py @@ -16,7 +16,7 @@ class AbstractTestConverter(TestBase): def get_spatial_collection(self, field=None): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = field or rd.get()[:, 0, :, 0, 0] coll = SpatialCollection() coll.add_field(1, None, field) @@ -28,7 +28,7 @@ class TestAbstractConverter(AbstractTestConverter): def run_auxiliary_file_tst(self,Converter,file_list,auxiliary_file_list=None): auxiliary_file_list = auxiliary_file_list or self._auxiliary_file_list - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,output_format='numpy',slice=[None,0,None,[0,10],[0,10]]) coll = ops.execute() @@ -36,7 +36,7 @@ def run_auxiliary_file_tst(self,Converter,file_list,auxiliary_file_list=None): _add_auxiliary_files = [True,False] for ops_arg,add_auxiliary_files in itertools.product(_ops,_add_auxiliary_files): ## make a new output directory as to not deal with overwrites - outdir = tempfile.mkdtemp(dir=self._test_dir) + outdir = tempfile.mkdtemp(dir=self.current_dir_output) try: conv = Converter([coll],outdir,'ocgis_output',add_auxiliary_files=add_auxiliary_files,ops=ops_arg) ## CsvPlusConverter requires an operations argument @@ -56,12 +56,12 @@ def run_auxiliary_file_tst(self,Converter,file_list,auxiliary_file_list=None): self.assertEqual(set(files),set(to_test)) def run_overwrite_true_tst(self,Converter,include_ops=False): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') _ops = ocgis.OcgOperations(dataset=rd,output_format='numpy',slice=[None,0,None,[0,10],[0,10]]) coll = _ops.execute() ops = _ops if include_ops else None - outdir = tempfile.mkdtemp(dir=self._test_dir) + outdir = tempfile.mkdtemp(dir=self.current_dir_output) conv = Converter([coll],outdir,'ocgis_output',ops=ops) conv.write() mtimes = [os.path.getmtime(os.path.join(outdir,f)) for f in os.listdir(outdir)] @@ -73,7 +73,7 @@ def run_overwrite_true_tst(self,Converter,include_ops=False): def test_multiple_variables(self): conv_klasses = [CsvConverter, NcConverter] - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() var2 = deepcopy(field.variables['tas']) var2.alias = 'tas2' @@ -81,7 +81,7 @@ def test_multiple_variables(self): field = field[:, 0:2, :, 0:5, 0:5] coll = self.get_spatial_collection(field=field) for conv_klass in conv_klasses: - conv = conv_klass([coll], self._test_dir, 'ocgis_output_{0}'.format(conv_klass.__name__)) + conv = conv_klass([coll], self.current_dir_output, 'ocgis_output_{0}'.format(conv_klass.__name__)) ret = conv.write() if conv_klass == CsvConverter: with open(ret, 'r') as f: @@ -94,11 +94,11 @@ def test_multiple_variables(self): self.assertNumpyAll(ds.variables['tas'][:], ds.variables['tas2'][:]) def test_overwrite_false_csv(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd, output_format='numpy', slice=[None, 0, None, [0, 10], [0, 10]]) coll = ops.execute() - outdir = tempfile.mkdtemp(dir=self._test_dir) + outdir = tempfile.mkdtemp(dir=self.current_dir_output) conv = CsvConverter([coll], outdir, 'ocgis_output') conv.write() diff --git a/src/ocgis/test/test_ocgis/test_conv/test_csv_shp.py b/src/ocgis/test/test_ocgis/test_conv/test_csv_shp.py index a41ee3a8c..c9fd691ca 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_csv_shp.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_csv_shp.py @@ -12,14 +12,14 @@ class Test(TestBase): def test_geometries_not_duplicated_with_equivalent_ugid(self): ## if geometries are equivalent, they should not have duplicates in the ## output shapefile. - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tasmax_2011') + rd = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tasmax_2011') ops = OcgOperations(dataset=[rd,rd2],geom='state_boundaries',select_ugid=[16], output_format='csv+',snippet=True) ops.execute() - path_shp = os.path.join(self._test_dir,ops.prefix,'shp',ops.prefix+'_ugid.shp') - path_csv = os.path.join(self._test_dir,ops.prefix,'shp',ops.prefix+'_ugid.csv') + path_shp = os.path.join(self.current_dir_output,ops.prefix,'shp',ops.prefix+'_ugid.shp') + path_csv = os.path.join(self.current_dir_output,ops.prefix,'shp',ops.prefix+'_ugid.csv') with fiona.open(path_shp) as source: self.assertEqual(len(list(source)),1) with open(path_csv) as source: @@ -32,13 +32,13 @@ def test_geometries_different_ugid(self): row = list(ShpCabinetIterator(key='state_boundaries', select_ugid=[16])) row.append(deepcopy(row[0])) row[1]['properties']['UGID'] = 17 - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tasmax_2011') + rd = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_tasmax_2011') ops = OcgOperations(dataset=[rd, rd2], geom=row, output_format='csv+', snippet=True) ops.execute() - path_shp = os.path.join(self._test_dir,ops.prefix,'shp',ops.prefix+'_ugid.shp') - path_csv = os.path.join(self._test_dir,ops.prefix,'shp',ops.prefix+'_ugid.csv') + path_shp = os.path.join(self.current_dir_output,ops.prefix,'shp',ops.prefix+'_ugid.shp') + path_csv = os.path.join(self.current_dir_output,ops.prefix,'shp',ops.prefix+'_ugid.csv') with fiona.open(path_shp) as source: self.assertEqual(len(list(source)),2) with open(path_csv) as source: diff --git a/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py b/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py index 530e661d5..5885c345f 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py @@ -12,7 +12,7 @@ class TestShpConverter(TestBase): def get_subset_operation(self): geom = TestGeom.get_geometry_dictionaries() - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd, geom=geom, select_nearest=True, snippet=True) subset = SubsetOperation(ops) return subset @@ -21,10 +21,10 @@ def test_attributes_copied(self): """Test attributes in geometry dictionaries are properly accounted for in the converter.""" subset = self.get_subset_operation() - conv = ShpConverter(subset, self._test_dir, prefix='shpconv') + conv = ShpConverter(subset, self.current_dir_output, prefix='shpconv') ret = conv.write() - path_ugid = os.path.join(self._test_dir, conv.prefix+'_ugid.shp') + path_ugid = os.path.join(self.current_dir_output, conv.prefix+'_ugid.shp') with fiona.open(path_ugid) as source: self.assertEqual(source.schema['properties'], OrderedDict([(u'COUNTRY', 'str'), (u'UGID', 'int:10')])) @@ -32,11 +32,11 @@ def test_attributes_copied(self): def test_none_geom(self): """Test a NoneType geometry will pass through the Fiona converter.""" - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') slc = [None, 0, None, [10, 20], [10, 20]] ops = ocgis.OcgOperations(dataset=rd, slice=slc) subset = SubsetOperation(ops) - conv = ShpConverter(subset, self._test_dir, prefix='shpconv') + conv = ShpConverter(subset, self.current_dir_output, prefix='shpconv') ret = conv.write() - contents = os.listdir(self._test_dir) + contents = os.listdir(self.current_dir_output) self.assertEqual(len(contents), 5) diff --git a/src/ocgis/test/test_ocgis/test_conv/test_meta.py b/src/ocgis/test/test_ocgis/test_conv/test_meta.py index 01cff7f0d..4d7dc0290 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_meta.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_meta.py @@ -6,7 +6,7 @@ class TestMetaConverter(TestBase): def test_init(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd) conv = MetaConverter(ops) self.assertTrue(len(conv.write()) > 4000) diff --git a/src/ocgis/test/test_ocgis/test_conv/test_nc.py b/src/ocgis/test/test_ocgis/test_conv/test_nc.py index 73c78a129..7552f6e24 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_nc.py @@ -14,7 +14,7 @@ def test_fill_value_modified(self): ref._dtype = np.int32 ref._value = ref.value.astype(np.int32) ref._fill_value = None - ncconv = NcConverter([coll],self._test_dir,'ocgis_output') + ncconv = NcConverter([coll],self.current_dir_output,'ocgis_output') ret = ncconv.write() with nc_scope(ret) as ds: var = ds.variables['tas'] @@ -23,7 +23,7 @@ def test_fill_value_modified(self): self.assertEqual(var._FillValue,np.ma.array([],dtype=np.dtype('int32')).fill_value) def test_fill_value_copied(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') with nc_scope(rd.uri) as ds: fill_value_test = ds.variables['tas']._FillValue ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format='nc') diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py index cbefdaa8c..c3a63c628 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py @@ -97,7 +97,7 @@ def test_place_prime_meridian_array(self): def test_wrap_unwrap_with_mask(self): """Test wrapped and unwrapped geometries with a mask ensuring that masked values are wrapped and unwrapped.""" - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[23]) ret = ops.execute() sdim = ret[23]['tas'].spatial @@ -210,7 +210,7 @@ def _get_sdim_(value, bounds): self.assertEqual(orig_sdim.geom.polygon.value[0, 0].bounds, sdim.geom.polygon.value[0, 0].bounds) # for target in ['point', 'polygon']: - # path = get_temp_path(name=target, suffix='.shp', wd=self._test_dir) + # path = get_temp_path(name=target, suffix='.shp', wd=self.current_dir_output) # sdim.write_fiona(path, target) @@ -244,7 +244,7 @@ def test_bad_parms(self): class TestCFLambertConformalConic(TestBase): def test_load_from_metadata(self): - uri = self.test_data.get_uri('narccap_wrfg') + uri = self.test_data_nc.get_uri('narccap_wrfg') ds = nc.Dataset(uri,'r') meta = NcMetadata(ds) crs = CFLambertConformal.load_from_metadata('pr',meta) @@ -259,16 +259,16 @@ def test_load_from_metadata(self): class TestCFRotatedPole(TestBase): def test_load_from_metadata(self): - rd = self.test_data.get_rd('rotated_pole_ichec') + rd = self.test_data_nc.get_rd('rotated_pole_ichec') self.assertIsInstance(rd.get().spatial.crs, CFRotatedPole) def test_equal(self): - rd = self.test_data.get_rd('rotated_pole_ichec') + rd = self.test_data_nc.get_rd('rotated_pole_ichec') rd2 = deepcopy(rd) self.assertEqual(rd.get().spatial.crs, rd2.get().spatial.crs) def test_in_operations(self): - rd = self.test_data.get_rd('rotated_pole_ichec') + rd = self.test_data_nc.get_rd('rotated_pole_ichec') rd2 = deepcopy(rd) rd2.alias = 'tas2' # # these projections are equivalent so it is okay to write them to a @@ -279,7 +279,7 @@ def test_in_operations(self): def test_get_rotated_pole_transformation(self): """Test SpatialDimension objects are appropriately transformed.""" - rd = self.test_data.get_rd('rotated_pole_ichec') + rd = self.test_data_nc.get_rd('rotated_pole_ichec') field = rd.get() field = field[:, 10:20, :, 40:55, 55:65] spatial = field.spatial diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py index 7b86fa935..1e7a6a293 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py @@ -807,7 +807,7 @@ def test_update_crs_general_error(self): def test_update_crs_rotated_pole(self): """Test moving between rotated pole and WGS84.""" - rd = self.test_data.get_rd('rotated_pole_ichec') + rd = self.test_data_nc.get_rd('rotated_pole_ichec') field = rd.get() """:type: ocgis.interface.base.field.Field""" self.assertIsInstance(field.spatial.crs, CFRotatedPole) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py index a75fed33a..8a309e6e2 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py @@ -114,7 +114,7 @@ def test_seasonal_get_grouping(self): self.assertEqual(tg.value[0],dt(2012,12,16)) ## grab real data - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() td = TemporalDimension(value=field.temporal.value_datetime) tg = td.get_grouping([[3,4,5]]) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py index 6916a1f18..84bd74898 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py @@ -116,7 +116,7 @@ def test_should_regrid(self): def test_loading_from_source_spatial_bounds(self): """Test row bounds may be set to None when loading from source.""" - field = self.test_data.get_rd('cancm4_tas').get() + field = self.test_data_nc.get_rd('cancm4_tas').get() field.spatial.grid.row.bounds field.spatial.grid.row.bounds = None self.assertIsNone(field.spatial.grid.row.bounds) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py index f85c8c9b9..b765ba4ea 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py @@ -106,7 +106,7 @@ def test_get_empty_like(self): self.assertDictEqual(var.meta, {'foo': 5}) class TestVariableCollection(TestBase): - _create_dir = False + create_dir = False def get_variable(self, alias='tas_foo'): var = Variable(name='tas', alias=alias, value=np.array([4, 5, 6])) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_metadata.py b/src/ocgis/test/test_ocgis/test_interface/test_metadata.py index 8f1a9d223..02a310847 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_metadata.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_metadata.py @@ -7,7 +7,7 @@ class TestNcMetadata(TestBase): def setUp(self): - uri = self.test_data.get_rd('cancm4_tasmax_2001').uri + uri = self.test_data_nc.get_rd('cancm4_tasmax_2001').uri self.rootgrp = nc.Dataset(uri) def tearDown(self): diff --git a/src/ocgis/test/test_ocgis/test_regrid/test_base.py b/src/ocgis/test/test_ocgis/test_regrid/test_base.py index f986a7ca4..80a41dd15 100644 --- a/src/ocgis/test/test_ocgis/test_regrid/test_base.py +++ b/src/ocgis/test/test_ocgis/test_regrid/test_base.py @@ -48,7 +48,7 @@ def get_coords(mpoly): all_coords = np.array(all_coords) return all_coords - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() field.spatial.crs = Spherical() odd = field[:, :, :, 32, 64] @@ -73,7 +73,7 @@ def get_coords(mpoly): import ipdb;ipdb.set_trace() def atest_to_spherical(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') coll = ocgis.OcgOperations(dataset=rd, vector_wrap=True).execute() #, geom='state_boundaries', agg_selection=True).execute() field = coll[1]['tas'] grid_original = deepcopy(field.spatial.grid) @@ -186,7 +186,7 @@ def test_check_fields_for_regridding(self): def test_iter_regridded_fields_different_grid_shapes(self): """Test regridding a downscaled dataset to GCM output. The input and output grids have different shapes.""" - downscaled = self.test_data.get_rd('maurer_2010_tas') + downscaled = self.test_data_nc.get_rd('maurer_2010_tas') downscaled.time_region = {'month': [2], 'year': [1990]} downscaled = downscaled.get() poly = make_poly([37, 43], [-104, -94]) @@ -194,7 +194,7 @@ def test_iter_regridded_fields_different_grid_shapes(self): downscaled.spatial.unwrap() downscaled.spatial.crs = Spherical() - gcm = self.test_data.get_rd('cancm4_tas') + gcm = self.test_data_nc.get_rd('cancm4_tas') gcm = gcm.get() poly = make_poly([37, 43], [-104+360, -94+360]) gcm = gcm.get_intersects(poly) @@ -226,7 +226,7 @@ def test_iter_regridded_fields_different_grid_shapes(self): def test_iter_regridded_fields_problem_bounds(self): """Test a dataset with crap bounds will work when with_corners is False.""" - dst = self.test_data.get_rd('cancm4_tas').get()[:, :, :, 20:25, 30:35] + dst = self.test_data_nc.get_rd('cancm4_tas').get()[:, :, :, 20:25, 30:35] dst.spatial.crs = Spherical() src = deepcopy(dst[0, 0, 0, :, :]) @@ -362,7 +362,7 @@ def test_iter_regridded_fields_value_mask(self): def test_iter_regridded_fields_nonoverlapping_extents(self): """Test regridding with fields that do not spatially overlap.""" - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') # nebraska and california coll = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[16, 25], snippet=True, vector_wrap=False).execute() @@ -377,7 +377,7 @@ def test_iter_regridded_fields_nonoverlapping_extents(self): def test_iter_regridded_fields_partial_extents(self): """Test regridding with fields that partially overlap.""" - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') # california and nevada coll = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[23, 25], snippet=True, vector_wrap=False).execute() @@ -457,7 +457,7 @@ def test_get_sdim_from_esmf_grid(self): def test_get_esmf_grid_from_sdim_with_mask(self): """Test with masked data.""" - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[23], snippet=True, vector_wrap=False) ret = ops.execute() field = ret[23]['tas'] @@ -509,7 +509,7 @@ def test_get_esmf_grid_from_sdim(self): def test_get_esmf_grid_from_sdim_real_data(self): """Test creating ESMF field from real data using an OCGIS spatial dimension.""" - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() egrid = get_esmf_grid_from_sdim(field.spatial) diff --git a/src/ocgis/test/test_ocgis/test_util/test_environment.py b/src/ocgis/test/test_ocgis/test_util/test_environment.py index 0862bd74e..b1183b4b3 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_environment.py +++ b/src/ocgis/test/test_ocgis/test_util/test_environment.py @@ -8,7 +8,7 @@ class TestEnvImportParm(TestBase): - _reset_env = False + reset_env = False def test_constructor(self): pm = EnvParmImport('USE_NUMPY',None,'numpy') @@ -30,7 +30,7 @@ def test_environment_variable(self): class Test(TestBase): - _reset_env = False + reset_env = False def get_is_available(self,module_name): try: @@ -99,7 +99,7 @@ def test_env_overload(self): try: env.DIR_OUTPUT = out env.PREFIX = 'my_prefix' - rd = self.test_data.get_rd('daymet_tmax') + rd = self.test_data_nc.get_rd('daymet_tmax') ops = OcgOperations(dataset=rd,snippet=True) self.assertEqual(env.DIR_OUTPUT,ops.dir_output) self.assertEqual(env.PREFIX,ops.prefix) diff --git a/src/ocgis/test/test_ocgis/test_util/test_helpers.py b/src/ocgis/test/test_ocgis/test_util/test_helpers.py index 734863b73..42491b4d4 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_helpers.py +++ b/src/ocgis/test/test_ocgis/test_util/test_helpers.py @@ -13,8 +13,8 @@ class Test(TestBase): def test_get_sorted_uris_by_time_dimension(self): - rd_2001 = self.test_data.get_rd('cancm4_tasmax_2001') - rd_2011 = self.test_data.get_rd('cancm4_tasmax_2011') + rd_2001 = self.test_data_nc.get_rd('cancm4_tasmax_2001') + rd_2011 = self.test_data_nc.get_rd('cancm4_tasmax_2011') not_sorted = [rd_2011.uri, rd_2001.uri] actual = ['tasmax_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', diff --git a/src/ocgis/test/test_ocgis/test_util/test_inspect.py b/src/ocgis/test/test_ocgis/test_util/test_inspect.py index 3858145b6..77cf3ec25 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_inspect.py +++ b/src/ocgis/test/test_ocgis/test_util/test_inspect.py @@ -52,7 +52,7 @@ def test_calendar_attribute_none(self): """Test that the empty string is correctly interpreted as None.""" # path to the test data file - out_nc = os.path.join(self._test_dir, self.fn) + out_nc = os.path.join(self.current_dir_output, self.fn) ## case of calendar being set to an empty string with nc_scope(out_nc, 'a') as ds: @@ -66,7 +66,7 @@ def test_unknown_calendar_attribute(self): """Test a calendar attribute with an unknown calendar attribute.""" # path to the test data file - out_nc = os.path.join(self._test_dir, self.fn) + out_nc = os.path.join(self.current_dir_output, self.fn) ## case of a calendar being set a bad value but read anyway with nc_scope(out_nc, 'a') as ds: diff --git a/src/ocgis/test/test_ocgis/test_util/test_large_array.py b/src/ocgis/test/test_ocgis/test_util/test_large_array.py index 58d79872b..045975e3c 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_large_array.py +++ b/src/ocgis/test/test_ocgis/test_util/test_large_array.py @@ -20,7 +20,7 @@ def test_with_callback(self): def callback(a, b): percentages.append(a) - rd = self.test_data.get_rd('cancm4_tas', kwds={'time_region': {'month': [3]}}) + rd = self.test_data_nc.get_rd('cancm4_tas', kwds={'time_region': {'month': [3]}}) ops = ocgis.OcgOperations(dataset=rd, calc=[{'func': 'mean', 'name': 'mean'}], calc_grouping=['month'], output_format='nc', geom='state_boundaries', @@ -40,7 +40,7 @@ def test_timing_use_optimizations(self): for use_optimizations in [True,False]: for ii in n: t1 = time.time() - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=[{'func':'mean','name':'mean'}], calc_grouping=['month'],output_format='nc', geom='state_boundaries', @@ -54,7 +54,7 @@ def test_timing_use_optimizations(self): self.assertTrue(tmean[True]['mean'] < tmean[False]['mean']) def test_multivariate_computation(self): - rd = self.test_data.get_rd('cancm4_tas',kwds={'time_region':{'month':[3]}}) + rd = self.test_data_nc.get_rd('cancm4_tas',kwds={'time_region':{'month':[3]}}) rd2 = deepcopy(rd) rd2.alias = 'tas2' calc = [{'func':'divide','name':'ln','kwds':{'arr1':'tas','arr2':'tas2'}}] @@ -70,7 +70,7 @@ def test_multivariate_computation(self): self.assertNcEqual(ret,ret_ocgis,ignore_attributes={'global': ['history']}) def test_with_no_calc_grouping(self): - rd = self.test_data.get_rd('cancm4_tas',kwds={'time_region':{'month':[3]}}) + rd = self.test_data_nc.get_rd('cancm4_tas',kwds={'time_region':{'month':[3]}}) ops = ocgis.OcgOperations(dataset=rd,calc=[{'func':'ln','name':'ln'}], calc_grouping=None,output_format='nc', geom='state_boundaries', @@ -83,7 +83,7 @@ def test_with_no_calc_grouping(self): self.assertNcEqual(ret,ret_ocgis,ignore_attributes={'global': ['history']}) def test_compute_with_time_region(self): - rd = self.test_data.get_rd('cancm4_tas',kwds={'time_region':{'month':[3]}}) + rd = self.test_data_nc.get_rd('cancm4_tas',kwds={'time_region':{'month':[3]}}) ops = ocgis.OcgOperations(dataset=rd,calc=[{'func':'mean','name':'mean'}], calc_grouping=['month'],output_format='nc', geom='state_boundaries', @@ -96,7 +96,7 @@ def test_compute_with_time_region(self): self.assertNcEqual(ret,ret_ocgis,ignore_attributes={'global': ['history']}) def test_compute_with_geom(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=[{'func':'mean','name':'mean'}], calc_grouping=['month'],output_format='nc', geom='state_boundaries', @@ -109,7 +109,7 @@ def test_compute_with_geom(self): self.assertNcEqual(ret,ret_ocgis,ignore_attributes={'global': ['history']}) def test_compute_small(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ## use a smaller netCDF as target ops = ocgis.OcgOperations(dataset=rd, @@ -144,7 +144,7 @@ def test_compute_large(self): n_tile_dimensions = 1 tile_range = [100, 100] - rd = RequestDatasetCollection(self.test_data.get_rd('cancm4_tasmax_2011')) + rd = RequestDatasetCollection(self.test_data_nc.get_rd('cancm4_tasmax_2011')) calc = [{'func': 'mean', 'name': 'my_mean'}, {'func': 'freq_perc', 'name': 'perc_90', 'kwds': {'percentile': 90}}, diff --git a/src/ocgis/test/test_ocgis/test_util/test_logging_ocgis.py b/src/ocgis/test/test_ocgis/test_util/test_logging_ocgis.py index 6cbae912e..e9523c30f 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_logging_ocgis.py +++ b/src/ocgis/test/test_ocgis/test_util/test_logging_ocgis.py @@ -72,7 +72,7 @@ def tearDown(self): TestBase.tearDown(self) def test_with_callback(self): - fp = get_temp_path(wd=self._test_dir) + fp = get_temp_path(wd=self.current_dir_output) def callback(message,path=fp): with open(fp,'a') as sink: @@ -145,7 +145,7 @@ def test_exc(self): ocgis_lh('something happened',exc=e) def test_writing(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format='csv') ret = ops.execute() folder = os.path.split(ret)[0] diff --git a/src/ocgis/test/test_ocgis/test_util/test_shp_cabinet.py b/src/ocgis/test/test_ocgis/test_util/test_shp_cabinet.py index 213f5ff36..4004cb056 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_shp_cabinet.py +++ b/src/ocgis/test/test_ocgis/test_util/test_shp_cabinet.py @@ -49,7 +49,7 @@ def test_select_ugids_absent_raises_exception(self): with self.assertRaises(ValueError): list(sci) - ops = ocgis.OcgOperations(dataset=self.test_data.get_rd('cancm4_tas'), + ops = ocgis.OcgOperations(dataset=self.test_data_nc.get_rd('cancm4_tas'), geom='state_boundaries', select_ugid=[9999]) with self.assertRaises(ValueError): @@ -98,7 +98,7 @@ def test_number_in_shapefile_name(self): sc = ShpCabinet() path = sc.get_shp_path('state_boundaries') - out_path = os.path.join(self._test_dir, '51_states.shp') + out_path = os.path.join(self.current_dir_output, '51_states.shp') with fiona.open(path) as source: with fiona.open(out_path, mode='w', driver='ESRI Shapefile', schema=source.meta['schema'], crs=source.meta['crs']) as sink: for record in source: @@ -168,14 +168,14 @@ def test_shapefiles_not_in_folders(self): for dirpath,dirnames,filenames in os.walk(ocgis.env.DIR_SHPCABINET): for filename in filenames: if filename.startswith('state_boundaries') or filename.startswith('world_countries'): - dst = os.path.join(self._test_dir,filename) + dst = os.path.join(self.current_dir_output,filename) src = os.path.join(dirpath,filename) shutil.copy2(src,dst) - self.test_get_keys(dir_shpcabinet=self._test_dir) + self.test_get_keys(dir_shpcabinet=self.current_dir_output) - sc = ShpCabinet(path=self._test_dir) + sc = ShpCabinet(path=self.current_dir_output) path = sc.get_shp_path('world_countries') - self.assertEqual(path,os.path.join(self._test_dir,'world_countries.shp')) + self.assertEqual(path,os.path.join(self.current_dir_output,'world_countries.shp')) if __name__ == '__main__': diff --git a/src/ocgis/test/test_ocgis/test_util/test_shp_process.py b/src/ocgis/test/test_ocgis/test_util/test_shp_process.py index 6e2802ab0..636aadc65 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_shp_process.py +++ b/src/ocgis/test/test_ocgis/test_util/test_shp_process.py @@ -8,13 +8,13 @@ class TestShpProcess(TestBase): - _test_path = '/home/ben.koziol/Dropbox/NESII/project/ocg/bin/test_data/test_shp_process' + _test_path = '/home/ben.koziol/Dropbox/NESII/project/ocg/bin/test_data_nc/test_shp_process' def test_shp_process(self): - copy_path = os.path.join(self._test_dir,'test_shp_process') + copy_path = os.path.join(self.current_dir_output,'test_shp_process') shutil.copytree(self._test_path,copy_path) shp_path = os.path.join(copy_path,'wc_4326.shp') - out_folder = tempfile.mkdtemp(dir=self._test_dir) + out_folder = tempfile.mkdtemp(dir=self.current_dir_output) sp = ShpProcess(shp_path,out_folder) sp.process(key='world_countries',ugid=None) diff --git a/src/ocgis/test/test_ocgis/test_util/test_shp_scanner/test_shp_scanner.py b/src/ocgis/test/test_ocgis/test_util/test_shp_scanner/test_shp_scanner.py index 84b2cdfb1..2e5a31afb 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_shp_scanner/test_shp_scanner.py +++ b/src/ocgis/test/test_ocgis/test_util/test_shp_scanner/test_shp_scanner.py @@ -28,13 +28,13 @@ def get_geometry(self,select_ugid): return(geoms[0]['geom']) def get_subset_rd(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ret = OcgOperations(dataset=rd,geom=self.nevada,snippet=True,output_format='nc').execute() rd_sub = RequestDataset(uri=ret,variable='tas') return(rd_sub) def test_get_does_intersect_true(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') for geom in [self.nevada,self.new_york]: self.assertTrue(get_does_intersect(rd,geom)) @@ -53,7 +53,7 @@ def test_build_database(self): from ocgis.util.shp_scanner.labels import StateBoundaries keys = {'state_boundaries':['US State Boundaries',StateBoundaries]} build_database(keys=keys,filter_request_dataset=self.get_subset_rd()) - handle,path = tempfile.mkstemp(suffix='.json',dir=self._test_dir) + handle,path = tempfile.mkstemp(suffix='.json',dir=self.current_dir_output) write_json(path) with open(path,'r') as fp: data = json.load(fp) diff --git a/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py b/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py index e46854e20..c030ee7b2 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py +++ b/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py @@ -51,7 +51,7 @@ def nebraska(self): @property def rd_rotated_pole(self): - rd = self.test_data.get_rd('rotated_pole_cccma') + rd = self.test_data_nc.get_rd('rotated_pole_cccma') return rd @property @@ -86,7 +86,7 @@ def get_subset_sdim(self): def get_target(self): # 1: standard input file - geographic coordinate system, unwrapped - rd_standard = self.test_data.get_rd('cancm4_tas') + rd_standard = self.test_data_nc.get_rd('cancm4_tas') # 2: standard field - geographic coordinate system field_standard = rd_standard.get() @@ -95,7 +95,7 @@ def get_target(self): field_rotated_pole = self.rd_rotated_pole.get() # 4: field with lambert conformal coordinate system - rd = self.test_data.get_rd('narccap_lambert_conformal') + rd = self.test_data_nc.get_rd('narccap_lambert_conformal') field_lambert = rd.get() # 5: standard input field - geographic coordinate system, wrapped @@ -156,7 +156,7 @@ def test_get_buffered_subset_sdim(self): def test_get_should_wrap(self): # a 360 dataset - field_360 = self.test_data.get_rd('cancm4_tas').get() + field_360 = self.test_data_nc.get_rd('cancm4_tas').get() ss = SpatialSubsetOperation(field_360, wrap=True) self.assertTrue(ss._get_should_wrap_(ss.target)) ss = SpatialSubsetOperation(field_360, wrap=False) @@ -218,7 +218,7 @@ def test_get_spatial_subset_circular_geometries(self): """Test circular geometries. They were causing wrapping errors.""" geoms = TestGeom.get_geometry_dictionaries() - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ss = SpatialSubsetOperation(rd, wrap=True) buffered = [element['geom'].buffer(rd.get().spatial.grid.resolution*2) for element in geoms] for buff in buffered: @@ -234,7 +234,7 @@ def test_get_spatial_subset_output_crs(self): proj4 = '+proj=aea +lat_1=20 +lat_2=60 +lat_0=40 +lon_0=-96 +x_0=0 +y_0=0 +ellps=GRS80 +datum=NAD83 +units=m +no_defs' output_crs = CoordinateReferenceSystem(proj4=proj4) subset_sdim = SpatialDimension.from_records([self.nebraska]) - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ss = SpatialSubsetOperation(rd, output_crs=output_crs) ret = ss.get_spatial_subset('intersects', subset_sdim) self.assertEqual(ret.spatial.crs, output_crs) @@ -261,7 +261,7 @@ def test_get_spatial_subset_wrap(self): """Test subsetting with wrap set to a boolean value.""" subset_sdim = SpatialDimension.from_records([self.nebraska]) - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') self.assertTrue(rd.get().spatial.is_unwrapped) ss = SpatialSubsetOperation(rd, wrap=True) ret = ss.get_spatial_subset('intersects', subset_sdim) @@ -303,7 +303,7 @@ def test_prepare_subset_sdim(self): # test nebraska against an unwrapped dataset specifically nebraska = SpatialDimension.from_records([self.nebraska]) - field = self.test_data.get_rd('cancm4_tas').get() + field = self.test_data_nc.get_rd('cancm4_tas').get() ss = SpatialSubsetOperation(field) prepared = ss._prepare_subset_sdim_(nebraska) self.assertTrue(prepared.is_unwrapped) @@ -314,7 +314,7 @@ def test_sdim(self): def test_should_update_crs(self): # no output crs provided - target = self.test_data.get_rd('cancm4_tas') + target = self.test_data_nc.get_rd('cancm4_tas') ss = SpatialSubsetOperation(target) self.assertFalse(ss.should_update_crs) diff --git a/src/ocgis/test/test_ocgis/test_util/test_units.py b/src/ocgis/test/test_ocgis/test_util/test_units.py index 7abf9ab2d..eea1bfeae 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_units.py +++ b/src/ocgis/test/test_ocgis/test_util/test_units.py @@ -11,12 +11,12 @@ class TestField(TestBase): def test_units_read_from_file(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() self.assertEqual(field.variables['tas'].cfunits,Units('K')) def test_units_conform_from_file(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') field = rd.get() sub = field.get_time_region({'month':[5],'year':[2005]}) sub.variables['tas'].cfunits_conform(Units('celsius')) @@ -71,7 +71,7 @@ def test_get_are_units_equal_by_string_or_cfunits(self): class TestVariableUnits(TestBase): - _create_dir = False + create_dir = False @property def value(self): diff --git a/src/ocgis/test/test_ocgis/test_util/test_zipper.py b/src/ocgis/test/test_ocgis/test_util/test_zipper.py index 1f4fad7cb..1aa7f4785 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_zipper.py +++ b/src/ocgis/test/test_ocgis/test_util/test_zipper.py @@ -10,7 +10,7 @@ def test(): - tdata = TestBase.get_tdata() + tdata = TestBase.get_tst_data_nc() rd = tdata.get_rd('cancm4_tas') output_formats = [ diff --git a/src/ocgis/test/test_real_data/test_cf.py b/src/ocgis/test/test_real_data/test_cf.py index d4f1234c2..e60b342d1 100644 --- a/src/ocgis/test/test_real_data/test_cf.py +++ b/src/ocgis/test/test_real_data/test_cf.py @@ -10,13 +10,13 @@ class Test(TestBase): def test_missing_bounds(self): - rd = self.test_data.get_rd('snippet_maurer_dtr') + rd = self.test_data_nc.get_rd('snippet_maurer_dtr') ip = rd.inspect_as_dct() def test_climatology(self): ## http://cf-pcmdi.llnl.gov/documents/cf-conventions/1.6/cf-conventions.html#idp5996336 - path = os.path.join(self._test_dir,'climatology.nc') + path = os.path.join(self.current_dir_output,'climatology.nc') ds = nc.Dataset(path,'w') try: dim_time = ds.createDimension('time',size=None) diff --git a/src/ocgis/test/test_real_data/test_cf_exceptions.py b/src/ocgis/test/test_real_data/test_cf_exceptions.py index 5fcda1eab..df900e7bb 100644 --- a/src/ocgis/test/test_real_data/test_cf_exceptions.py +++ b/src/ocgis/test/test_real_data/test_cf_exceptions.py @@ -8,7 +8,7 @@ class Test(TestBase): def test_months_in_units(self): - rd = self.test_data.get_rd('clt_month_units') + rd = self.test_data_nc.get_rd('clt_month_units') field = rd.get() self.assertEqual(field.temporal.units,'months since 1979-1-1 0') self.assertEqual(field.temporal.value_datetime[50],datetime.datetime(1983,3,16)) @@ -16,7 +16,7 @@ def test_months_in_units(self): self.assertEqual(field.temporal.shape,(120,)) def test_months_in_units_time_range_subsets(self): - rd = self.test_data.get_rd('clt_month_units') + rd = self.test_data_nc.get_rd('clt_month_units') field = rd.get() time_range = [field.temporal.value_datetime[0], field.temporal.value_datetime[0]] ops = ocgis.OcgOperations(dataset=rd, time_range=time_range) @@ -24,7 +24,7 @@ def test_months_in_units_time_range_subsets(self): self.assertEqual((1, 1, 1, 46, 72), ret[1]['clt'].shape) def test_months_in_units_convert_to_shapefile(self): - uri = self.test_data.get_uri('clt_month_units') + uri = self.test_data_nc.get_uri('clt_month_units') variable = 'clt' ## select the month of may for two years time_region = {'month':[5],'year':[1982,1983]} @@ -38,7 +38,7 @@ def test_months_in_units_convert_to_shapefile(self): self.assertEqual(len(source),6624) def test_months_in_units_convert_to_netcdf(self): - uri = self.test_data.get_uri('clt_month_units') + uri = self.test_data_nc.get_uri('clt_month_units') variable = 'clt' rd = ocgis.RequestDataset(uri=uri,variable=variable) ## subset the clt dataset by the state of nevada and write to netcdf @@ -53,7 +53,7 @@ def test_months_in_units_convert_to_netcdf(self): self.assertNumpyAll(field.temporal.value,field2.temporal.value) def test_months_in_units_calculation(self): - rd = self.test_data.get_rd('clt_month_units') + rd = self.test_data_nc.get_rd('clt_month_units') calc = [{'func': 'mean', 'name': 'mean'}] calc_grouping = ['month'] ops = ocgis.OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping) diff --git a/src/ocgis/test/test_real_data/test_multiple_datasets.py b/src/ocgis/test/test_real_data/test_multiple_datasets.py index 489f2df27..07217ee77 100644 --- a/src/ocgis/test/test_real_data/test_multiple_datasets.py +++ b/src/ocgis/test/test_real_data/test_multiple_datasets.py @@ -16,10 +16,10 @@ class Test(TestBase): def setUp(self): TestBase.setUp(self) - self.maurer = self.test_data.get_rd('maurer_bccr_1950') - self.cancm4 = self.test_data.get_rd('cancm4_tasmax_2001') - self.tasmin = self.test_data.get_rd('cancm4_tasmin_2001') -# self.albisccp = self.test_data.get_rd('ccsm4') + self.maurer = self.test_data_nc.get_rd('maurer_bccr_1950') + self.cancm4 = self.test_data_nc.get_rd('cancm4_tasmax_2001') + self.tasmin = self.test_data_nc.get_rd('cancm4_tasmin_2001') +# self.albisccp = self.test_data_nc.get_rd('ccsm4') @property def california(self): @@ -68,7 +68,7 @@ def test_vector_wrap(self): for key in keys: prev_value = None for vector_wrap in [True,False]: - rd = self.test_data.get_rd(key[0]) + rd = self.test_data_nc.get_rd(key[0]) prefix = 'vw_{0}_{1}'.format(vector_wrap,rd.variable) ops = ocgis.OcgOperations(dataset=rd,geom=geom,snippet=False, vector_wrap=vector_wrap,prefix=prefix) @@ -152,9 +152,9 @@ def assert_projection(path,check_ugid=True): for src in source: src.close() - rd1 = self.test_data.get_rd('narccap_rcm3') + rd1 = self.test_data_nc.get_rd('narccap_rcm3') rd1.alias = 'rcm3' - rd2 = self.test_data.get_rd('narccap_crcm') + rd2 = self.test_data_nc.get_rd('narccap_crcm') rd2.alias = 'crcm' rd = [ rd1, diff --git a/src/ocgis/test/test_real_data/test_narccap.py b/src/ocgis/test/test_real_data/test_narccap.py index 80dab12b6..257020a41 100644 --- a/src/ocgis/test/test_real_data/test_narccap.py +++ b/src/ocgis/test/test_real_data/test_narccap.py @@ -16,17 +16,17 @@ class TestRotatedPole(TestBase): def test_validation(self): ## CFRotatedPole is not an appropriate output crs. it may also not be ## transformed to anything but WGS84 - rd = self.test_data.get_rd('narccap_rotated_pole') + rd = self.test_data_nc.get_rd('narccap_rotated_pole') with self.assertRaises(DefinitionValidationError): OcgOperations(dataset=rd,output_crs=CFRotatedPole(grid_north_pole_latitude=5, grid_north_pole_longitude=5)) ## this is an okay output coordinate system for the two input coordinate ## systems - rd2 = self.test_data.get_rd('narccap_lambert_conformal') + rd2 = self.test_data_nc.get_rd('narccap_lambert_conformal') OcgOperations(dataset=[rd,rd2],output_crs=CFWGS84()) def test_calculation(self): - rd = self.test_data.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) + rd = self.test_data_nc.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) calc = [{'func':'mean','name':'mean'}] calc_grouping = ['month'] ops = OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, @@ -37,7 +37,7 @@ def test_calculation(self): self.assertEqual(field.shape,(1,1,1,130,155)) def test_intersects(self): - rd = self.test_data.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) + rd = self.test_data_nc.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[16]) ret = ops.execute() ref = ret.gvu(16,'tas') @@ -53,7 +53,7 @@ def test_intersects(self): [True,False,False,False,False,False,False,False,False,False,True,True,True,True,True]],dtype=bool)) def test_clip_aggregate(self): - rd = self.test_data.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) + rd = self.test_data_nc.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[16], spatial_operation='clip',aggregate=True,output_format='numpy') ## the output CRS should be automatically updated for this operation @@ -64,12 +64,12 @@ def test_clip_aggregate(self): self.assertAlmostEqual(ret.mean(),269.83058215725805) def test_read(self): - rd = self.test_data.get_rd('narccap_rotated_pole') + rd = self.test_data_nc.get_rd('narccap_rotated_pole') field = rd.get() self.assertIsInstance(field.spatial.crs,CFRotatedPole) def test_to_netcdf(self): - rd = self.test_data.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) + rd = self.test_data_nc.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) ## it does not care about slices or no geometries ops = OcgOperations(dataset=rd,output_format='nc') ret = ops.execute() @@ -77,7 +77,7 @@ def test_to_netcdf(self): self.assertEqual(rd2.get().temporal.extent,(5444.0,5474.875)) def test_to_netcdf_with_geometry(self): - rd = self.test_data.get_rd('narccap_rotated_pole') + rd = self.test_data_nc.get_rd('narccap_rotated_pole') ## this bounding box covers the entire spatial domain. the software will ## move between rotated pole and CFWGS84 using this operation. it can then ## be compared against the "null" result which just does a snippet. @@ -96,7 +96,7 @@ def test_to_netcdf_with_geometry(self): self.assertTrue(diff.max() <= 1.02734374963e-06) def test_to_netcdf_with_slice(self): - rd = self.test_data.get_rd('narccap_rotated_pole') + rd = self.test_data_nc.get_rd('narccap_rotated_pole') ops = OcgOperations(dataset=rd, output_format='nc', slice=[None,[0,10],None,[0,10],[0,10]], @@ -109,7 +109,7 @@ def test_to_netcdf_with_slice(self): class Test(TestBase): def test_cf_lambert_conformal(self): - rd = self.test_data.get_rd('narccap_lambert_conformal') + rd = self.test_data_nc.get_rd('narccap_lambert_conformal') field = rd.get() crs = field.spatial.crs self.assertDictEqual(crs.value,{'lon_0': -97, 'ellps': 'WGS84', 'y_0': 2700000, 'no_defs': True, 'proj': 'lcc', 'x_0': 3325000, 'units': 'm', 'lat_2': 60, 'lat_1': 30, 'lat_0': 47.5}) diff --git a/src/ocgis/test/test_real_data/test_package.py b/src/ocgis/test/test_real_data/test_package.py index 6515f9d60..3055c7245 100644 --- a/src/ocgis/test/test_real_data/test_package.py +++ b/src/ocgis/test/test_real_data/test_package.py @@ -7,8 +7,8 @@ class Test(TestBase): def test_nc(self): - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_rhsmax') + rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('cancm4_rhsmax') rd = [rd1,rd2] for output_format in ['shp','csv','csv+','nc']: if output_format == 'nc': diff --git a/src/ocgis/test/test_real_data/test_random_datasets.py b/src/ocgis/test/test_real_data/test_random_datasets.py index dd7412d73..3de2db699 100644 --- a/src/ocgis/test/test_real_data/test_random_datasets.py +++ b/src/ocgis/test/test_real_data/test_random_datasets.py @@ -23,7 +23,7 @@ class TestCMIP3Masking(TestBase): @longrunning def test_many_request_datasets(self): - rd_base = self.test_data.get_rd('subset_test_Prcp') + rd_base = self.test_data_nc.get_rd('subset_test_Prcp') geom = [-74.0, 40.0, -72.0, 42.0] rds = [deepcopy(rd_base) for ii in range(500)] for rd in rds: @@ -33,7 +33,7 @@ def test_many_request_datasets(self): def test(self): for key in ['subset_test_Prcp','subset_test_Tavg_sresa2','subset_test_Tavg']: ## test method to return a RequestDataset - rd = self.test_data.get_rd(key) + rd = self.test_data_nc.get_rd(key) geoms = [[-74.0, 40.0, -72.0, 42.0], [-74.0, 38.0, -72.0, 40.0]] for geom in geoms: @@ -55,7 +55,7 @@ class TestCnrmCerfacs(TestBase): @property def rd(self): - return self.test_data.get_rd('rotated_pole_cnrm_cerfacs') + return self.test_data_nc.get_rd('rotated_pole_cnrm_cerfacs') def test_subset(self): """Test data may be subsetted and that coordinate transformations return the same value arrays.""" @@ -97,7 +97,7 @@ def test_cccma_rotated_pole(self): ## with rotated pole, the uid mask was not being updated correctly following ## a transformation back to rotated pole. this needed to be updated explicitly ## in subset.py - rd = self.test_data.get_rd('rotated_pole_cccma') + rd = self.test_data_nc.get_rd('rotated_pole_cccma') geom = (5.87161922454834, 47.26985931396479, 15.03811264038086, 55.05652618408209) ops = ocgis.OcgOperations(dataset=rd,output_format='shp',geom=geom, select_ugid=[1],snippet=True) @@ -112,7 +112,7 @@ def test_cccma_rotated_pole(self): def test_ichec_rotated_pole(self): ## this point is far outside the domain ocgis.env.OVERWRITE = True - rd = self.test_data.get_rd('rotated_pole_ichec') + rd = self.test_data_nc.get_rd('rotated_pole_ichec') for geom in [[-100.,45.],[-100,45,-99,46]]: ops = ocgis.OcgOperations(dataset=rd,output_format='nc', calc=[{'func':'mean','name':'mean'}], @@ -122,8 +122,8 @@ def test_ichec_rotated_pole(self): ops.execute() def test_narccap_cancm4_point_subset_no_abstraction(self): - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('narccap_tas_rcm3_gfdl') + rd = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('narccap_tas_rcm3_gfdl') rd.alias = 'tas_narccap' rds = [rd,rd2] geom = [-105.2751,39.9782] @@ -134,8 +134,8 @@ def test_narccap_cancm4_point_subset_no_abstraction(self): ops.execute() def test_narccap_cancm4_point_subset_with_abstraction(self): - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('narccap_tas_rcm3_gfdl') + rd = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('narccap_tas_rcm3_gfdl') rd2.alias = 'tas_narccap' rds = [ rd, @@ -157,8 +157,8 @@ def test_narccap_cancm4_point_subset_with_abstraction(self): self.assertTrue(ret.geoms[1].area > ret.geoms[2].area) def test_narccap_cancm4_point_subset_with_abstraction_to_csv_shp(self): - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('narccap_tas_rcm3_gfdl') + rd = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data_nc.get_rd('narccap_tas_rcm3_gfdl') rd.alias = 'tas_narccap' rds = [ rd, @@ -175,7 +175,7 @@ def test_narccap_cancm4_point_subset_with_abstraction_to_csv_shp(self): self.assertEqual(set([row['properties']['UGID'] for row in rows]),set([1,2])) def test_collection_field_geometries_equivalent(self): - rd = self.test_data.get_rd('cancm4_tas',kwds=dict(time_region={'month':[6,7,8]})) + rd = self.test_data_nc.get_rd('cancm4_tas',kwds=dict(time_region={'month':[6,7,8]})) geom = ['state_boundaries',[{'properties':{'UGID':16},'geom':Point([-99.80780059778753,41.52315831343389])}]] for vw,g in itertools.product([True,False],geom): ops = ocgis.OcgOperations(dataset=rd,select_ugid=[16,32],geom=g, @@ -189,7 +189,7 @@ def test_collection_field_geometries_equivalent(self): def test_empty_subset_multi_geometry_wrapping(self): ## adjacent state boundaries were causing an error with wrapping where ## a reference to the source field was being updated. - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[5,6,7]) ret = ops.execute() self.assertEqual(set(ret.keys()),set([5,6,7])) @@ -197,7 +197,7 @@ def test_empty_subset_multi_geometry_wrapping(self): def test_seasonal_calc(self): calc = [{'func':'mean','name':'my_mean'},{'func':'std','name':'my_std'}] calc_grouping = [[3,4,5]] - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, calc_sample_size=True,geom='state_boundaries', select_ugid=[23]) @@ -207,7 +207,7 @@ def test_seasonal_calc(self): calc = [{'func':'mean','name':'my_mean'},{'func':'std','name':'my_std'}] calc_grouping = [[12,1,2],[3,4,5],[6,7,8],[9,10,11]] - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, calc_sample_size=True,geom='state_boundaries', select_ugid=[23]) @@ -217,7 +217,7 @@ def test_seasonal_calc(self): calc = [{'func':'mean','name':'my_mean'},{'func':'std','name':'my_std'}] calc_grouping = [[12,1],[2,3]] - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, calc_sample_size=True,geom='state_boundaries', select_ugid=[23]) @@ -229,7 +229,7 @@ def test_seasonal_calc_dkp(self): key = 'dynamic_kernel_percentile_threshold' calc = [{'func':key,'name':'dkp','kwds':{'operation':'lt','percentile':90,'width':5}}] calc_grouping = [[3,4,5]] - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, calc_sample_size=False,geom='state_boundaries', select_ugid=[23]) @@ -241,7 +241,7 @@ def test_seasonal_calc_dkp(self): self.assertNumpyAll(to_test,reference) def test_selecting_single_value(self): - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') lat_index = 32 lon_index = 97 with nc_scope(rd.uri) as ds: @@ -360,7 +360,7 @@ def test_maurer_concatenated_shp(self): def test_point_shapefile_subset(self): _output_format = ['numpy','nc','csv','csv+'] for output_format in _output_format: - rd = self.test_data.get_rd('cancm4_tas') + rd = self.test_data_nc.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd,geom='qed_city_centroids',output_format=output_format, prefix=output_format) ret = ops.execute() @@ -402,7 +402,7 @@ def test_time_region_subset(self): _year = [[2011],None,[2012],[2011,2013]] def run_test(month,year): - rd = self.test_data.get_rd('cancm4_rhs',kwds={'time_region':{'month':month,'year':year}}) + rd = self.test_data_nc.get_rd('cancm4_rhs',kwds={'time_region':{'month':month,'year':year}}) ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries', select_ugid=[25]) @@ -425,7 +425,7 @@ def test_time_range_time_region_subset(self): time_range = [dt(2013,1,1),dt(2015,12,31)] time_region = {'month':[6,7,8],'year':[2013,2014]} kwds = {'time_range':time_range,'time_region':time_region} - rd = self.test_data.get_rd('cancm4_rhs',kwds=kwds) + rd = self.test_data_nc.get_rd('cancm4_rhs',kwds=kwds) ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[25]) ret = ops.execute() ref = ret[25]['rhs'] @@ -437,7 +437,7 @@ def test_time_range_time_region_do_not_overlap(self): time_region = {'month':[6,7,8],'year':[2013,2014,2018]} kwds = {'time_range':time_range,'time_region':time_region} with self.assertRaises(RequestValidationError): - self.test_data.get_rd('cancm4_rhs',kwds=kwds) + self.test_data_nc.get_rd('cancm4_rhs',kwds=kwds) @longrunning def test_maurer_2010(self): @@ -446,7 +446,7 @@ def test_maurer_2010(self): calc = [{'func':'mean','name':'mean'},{'func':'median','name':'median'}] calc_grouping = ['month'] for key in keys: - rd = self.test_data.get_rd(key) + rd = self.test_data_nc.get_rd(key) dct = rd.inspect_as_dct() self.assertEqual(dct['derived']['Count'],'102564') @@ -471,14 +471,14 @@ def test_maurer_2010(self): def test_clip_aggregate(self): ## this geometry was hanging - rd = self.test_data.get_rd('cancm4_tas',kwds={'time_region':{'year':[2003]}}) + rd = self.test_data_nc.get_rd('cancm4_tas',kwds={'time_region':{'year':[2003]}}) ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[14,16], aggregate=False,spatial_operation='clip',output_format='csv+') ret = ops.execute() @longrunning def test_narccap_point_subset_small(self): - rd = self.test_data.get_rd('narccap_pr_wrfg_ncep') + rd = self.test_data_nc.get_rd('narccap_pr_wrfg_ncep') geom = [-97.74278,30.26694] # ocgis.env.VERBOSE = True # ocgis.env.DEBUG = True @@ -565,7 +565,7 @@ def test_time_region_climatology(self): ref = ret[16][variable] def test_mfdataset_to_nc(self): - rd = self.test_data.get_rd('maurer_2010_pr') + rd = self.test_data_nc.get_rd('maurer_2010_pr') ops = OcgOperations(dataset=rd,output_format='nc',calc=[{'func':'mean','name':'my_mean'}], calc_grouping=['year'],geom='state_boundaries',select_ugid=[23]) ret = ops.execute() diff --git a/src/ocgis/test/test_simple/make_test_data_subset.py b/src/ocgis/test/test_simple/make_test_data_subset.py deleted file mode 100644 index c9f382b42..000000000 --- a/src/ocgis/test/test_simple/make_test_data_subset.py +++ /dev/null @@ -1,93 +0,0 @@ -import subprocess -import netCDF4 as nc -from ocgis.test.base import TestBase -import tempfile -import os -import numpy as np -from nose.plugins.skip import SkipTest -from argparse import ArgumentParser -from ocgis.test.test_base import dev - - -class SingleYearFile(Exception): - pass - - -def subset(in_nc,out_nc,n_years=None): - ds = nc.Dataset(in_nc,'r') - try: - tvar = ds.variables['time'] - if n_years is not None: - dts = nc.num2date(tvar[:],tvar.units,calendar=tvar.calendar) - years = np.array([dt.year for dt in dts.flat]) - years_to_use = np.unique(years) - years_to_use.sort() - years_to_use = years_to_use[0:n_years] - try: - idx_time = np.zeros(years.shape,dtype=bool) - for year in years_to_use.flat: - idx_time = np.logical_or(idx_time,years == year) - ## likely a single year in the file - except IndexError: - if years_to_use.shape[0] == 1: - raise(SingleYearFile) - else: - raise - else: - idx_time = 0 - dts_float = tvar[:][idx_time] - try: - start_float,end_float = dts_float[0],dts_float[-1] - except IndexError: - ## assume scalar variable - start_float,end_float = dts_float,dts_float - subprocess.check_call(['ncea','-O','-F','-d','time,{0},{1}'.format(start_float,end_float),in_nc,out_nc]) - finally: - ds.close() - -@dev -def test_subset_years_two_years(): - tdata = TestBase.get_tdata() - rd = tdata.get_rd('cancm4_tas') - f,out_nc = tempfile.mkstemp(suffix='_test_nc.nc') - try: - subset(rd.uri,out_nc,2) - ds = nc.Dataset(out_nc,'r') - try: - tvar = ds.variables['time'] - dts = nc.num2date(tvar[:],tvar.units,calendar=tvar.calendar) - uyears = np.unique([dt.year for dt in dts.flat]) - assert(uyears.shape[0] == 2) - finally: - ds.close() - finally: - os.remove(out_nc) - -@dev -def test_subset_years_one_year(): - tdata = TestBase.get_tdata() - rd = tdata.get_rd('cancm4_tas') - f,out_nc = tempfile.mkstemp(suffix='_test_nc.nc') - try: - subset(rd.uri,out_nc,1) - ds = nc.Dataset(out_nc,'r') - try: - tvar = ds.variables['time'] - dts = nc.num2date(tvar[:],tvar.units,calendar=tvar.calendar) - uyears = np.unique([dt.year for dt in dts.flat]) - assert(uyears.shape[0] == 1) - finally: - ds.close() - finally: - os.remove(out_nc) - - -if __name__ == '__main__': - parser = ArgumentParser() - parser.add_argument('in_nc') - parser.add_argument('out_nc') - parser.add_argument('-y','--years',type=int,default=None) - - pargs = parser.parse_args() - - subset(pargs.in_nc,pargs.out_nc,n_years=pargs.years) \ No newline at end of file diff --git a/src/ocgis/test/test_simple/test_cfunits.py b/src/ocgis/test/test_simple/test_cfunits.py new file mode 100644 index 000000000..d227dbe3c --- /dev/null +++ b/src/ocgis/test/test_simple/test_cfunits.py @@ -0,0 +1,22 @@ +from cfunits import Units +from ocgis import RequestDataset +from ocgis.test.test_simple.make_test_data import SimpleNcMultivariate +from ocgis.test.test_simple.test_simple import TestSimpleBase +import numpy as np + + +class TestSimpleMultivariate(TestSimpleBase): + base_value = np.array([[1.0, 1.0, 2.0, 2.0], + [1.0, 1.0, 2.0, 2.0], + [3.0, 3.0, 4.0, 4.0], + [3.0, 3.0, 4.0, 4.0]]) + nc_factory = SimpleNcMultivariate + fn = 'test_simple_multivariate_01.nc' + var = ['foo', 'foo2'] + + def test_variable_has_appropriate_units(self): + """Test multiple variables loaded from a netCDF file are assigned the appropriate units.""" + + field = RequestDataset(**self.get_dataset()).get() + self.assertDictEqual({v.name: v.cfunits for v in field.variables.itervalues()}, + {'foo': Units('K'), 'foo2': Units('mm/s')}) \ No newline at end of file diff --git a/src/ocgis/test/test_simple/test_simple.py b/src/ocgis/test/test_simple/test_simple.py index ff2f81e75..312f2590d 100644 --- a/src/ocgis/test/test_simple/test_simple.py +++ b/src/ocgis/test/test_simple/test_simple.py @@ -1,6 +1,5 @@ import re import unittest -from cfunits import Units from fiona.crs import from_string from osgeo.osr import SpatialReference from ocgis.api.operations import OcgOperations @@ -8,19 +7,16 @@ import itertools import numpy as np import datetime -from ocgis.api.parms.definition import SpatialOperation, OutputFormat -from ocgis.conv.fiona_ import ShpConverter -from ocgis.util.helpers import make_poly, FionaMaker, project_shapely_geometry, write_geom_dict +from ocgis.api.parms.definition import SpatialOperation +from ocgis.util.helpers import make_poly, FionaMaker, project_shapely_geometry from ocgis import exc, env, constants import os.path -from ocgis.util.inspect import Inspect from abc import ABCMeta, abstractproperty import netCDF4 as nc from ocgis.test.base import TestBase from shapely.geometry.point import Point import ocgis -from ocgis.exc import ExtentError, DefinitionValidationError,\ - ImproperPolygonBoundsError +from ocgis.exc import ExtentError, DefinitionValidationError, ImproperPolygonBoundsError from shapely.geometry.polygon import Polygon import csv import fiona @@ -32,36 +28,40 @@ from ocgis.api.request.base import RequestDataset, RequestDatasetCollection from copy import deepcopy from contextlib import contextmanager -from ocgis.test.test_simple.make_test_data import SimpleNcNoLevel, SimpleNc,\ - SimpleNcNoBounds, SimpleMaskNc, SimpleNc360, SimpleNcProjection,\ - SimpleNcNoSpatialBounds, SimpleNcMultivariate +from ocgis.test.test_simple.make_test_data import SimpleNcNoLevel, SimpleNc, SimpleNcNoBounds, SimpleMaskNc, \ + SimpleNc360, SimpleNcProjection, SimpleNcNoSpatialBounds, SimpleNcMultivariate from csv import DictReader from ocgis.test.test_base import longrunning -import webbrowser import tempfile from ocgis.api.parms.definition import OutputFormat from ocgis.interface.base.field import DerivedMultivariateField -from ocgis.api.collection import SpatialCollection from ocgis.util.itester import itr_products_keywords from ocgis.util.shp_cabinet import ShpCabinetIterator @contextmanager -def nc_scope(path,mode='r'): - """Provide a transactional scope around a series of operations.""" - ds = nc.Dataset(path,mode=mode) +def nc_scope(path, mode='r'): + """ + Provide a transactional scope around a :class:`netCDF4.Dataset` object. + + >>> with nc_scope('/my/file.nc') as ds: + >>> print ds.variables + + :param str path: The full path to the netCDF dataset. + :param str mode: The file mode to use when opening the dataset. + :returns: An open dataset object that will be closed after leaving the ``with statement``. + :rtype: :class:`netCDF4.Dataset` + """ + + ds = nc.Dataset(path, mode=mode) try: - yield(ds) + yield ds except: raise finally: ds.close() -class ToTest(Exception): - pass - - class TestSimpleBase(TestBase): __metaclass__ = ABCMeta @@ -313,7 +313,7 @@ def test_units_calendar_on_time_bounds_calculation(self): def test_add_auxiliary_files_false_csv_nc(self): rd = self.get_dataset() for output_format in ['csv','nc']: - dir_output = tempfile.mkdtemp(dir=self._test_dir) + dir_output = tempfile.mkdtemp(dir=self.current_dir_output) ops = ocgis.OcgOperations(dataset=rd,output_format=output_format,add_auxiliary_files=False, dir_output=dir_output) ret = ops.execute() @@ -579,7 +579,7 @@ def test_snippet(self): ret = self.get_ret(kwds={'snippet':True}) ref = ret.gvu(1,self.var) self.assertEqual(ref.shape,(1,1,1,4,4)) - with nc_scope(os.path.join(self._test_dir,self.fn)) as ds: + with nc_scope(os.path.join(self.current_dir_output,self.fn)) as ds: to_test = ds.variables['foo'][0,0,:,:].reshape(1, 1, 1, 4, 4) self.assertNumpyAll(to_test,ref.data) @@ -665,34 +665,30 @@ def test_calc_sample_size(self): rd2['alias'] = 'var2' dataset = [ -# RequestDatasetCollection([rd1]), + # RequestDatasetCollection([rd1]), RequestDatasetCollection([rd1,rd2]) ] calc_sample_size = [ True, -# False + # False ] calc = [ [{'func':'mean','name':'mean'},{'func':'max','name':'max'}], -# [{'func':'ln','name':'ln'}], -# None, -# [{'func':'divide','name':'divide','kwds':{'arr1':'var1','arr2':'var2'}}] + # [{'func':'ln','name':'ln'}], + # None, + # [{'func':'divide','name':'divide','kwds':{'arr1':'var1','arr2':'var2'}}] ] calc_grouping = [ -# None, + # None, ['month'], -# ['month','year'] + # ['month','year'] ] -# output_format = constants.output_formats output_format = ['numpy'] for ii,tup in enumerate(itertools.product(dataset,calc_sample_size,calc,calc_grouping,output_format)): kwds = dict(zip(['dataset','calc_sample_size','calc','calc_grouping','output_format'],tup)) kwds['prefix'] = str(ii) -# print kwds -# print len(kwds['dataset']) - try: ops = OcgOperations(**kwds) except DefinitionValidationError: @@ -840,15 +836,16 @@ def test_limiting_headers(self): @longrunning def test_combinatorial_projection_with_geometries(self): -# self.get_ret(kwds={'output_format':'shp','prefix':'as_polygon'}) -# self.get_ret(kwds={'output_format':'shp','prefix':'as_point','abstraction':'point'}) - + # self.get_ret(kwds={'output_format':'shp','prefix':'as_polygon'}) + # self.get_ret(kwds={'output_format':'shp','prefix':'as_point','abstraction':'point'}) + features = [ {'NAME':'a','wkt':'POLYGON((-105.020430 40.073118,-105.810753 39.327957,-105.660215 38.831183,-104.907527 38.763441,-104.004301 38.816129,-103.643011 39.802151,-103.643011 39.802151,-103.643011 39.802151,-103.643011 39.802151,-103.959140 40.118280,-103.959140 40.118280,-103.959140 40.118280,-103.959140 40.118280,-104.327957 40.201075,-104.327957 40.201075,-105.020430 40.073118))'}, {'NAME':'b','wkt':'POLYGON((-102.212903 39.004301,-102.905376 38.906452,-103.311828 37.694624,-103.326882 37.295699,-103.898925 37.220430,-103.846237 36.746237,-102.619355 37.107527,-102.634409 37.724731,-101.874194 37.882796,-102.212903 39.004301))'}, {'NAME':'c','wkt':'POLYGON((-105.336559 37.175269,-104.945161 37.303226,-104.726882 37.175269,-104.696774 36.844086,-105.043011 36.693548,-105.283871 36.640860,-105.336559 37.175269))'}, {'NAME':'d','wkt':'POLYGON((-102.318280 39.741935,-103.650538 39.779570,-103.620430 39.448387,-103.349462 39.433333,-103.078495 39.606452,-102.325806 39.613978,-102.325806 39.613978,-102.333333 39.741935,-102.318280 39.741935))'}, ] + for filename in ['polygon','point']: if filename == 'point': geometry = 'Point' @@ -860,7 +857,7 @@ def test_combinatorial_projection_with_geometries(self): to_write = features geometry = 'Polygon' - path = os.path.join(self._test_dir,'ab_{0}.shp'.format(filename)) + path = os.path.join(self.current_dir_output,'ab_{0}.shp'.format(filename)) with FionaMaker(path,geometry=geometry) as fm: fm.write(to_write) @@ -872,12 +869,10 @@ def test_combinatorial_projection_with_geometries(self): no_level_nc.write() no_level_uri = os.path.join(env.DIR_OUTPUT,no_level_nc.filename) - ocgis.env.DIR_SHPCABINET = self._test_dir + ocgis.env.DIR_SHPCABINET = self.current_dir_output # ocgis.env.DEBUG = True # ocgis.env.VERBOSE = True -################################################################################ - aggregate = [ False, True @@ -915,14 +910,11 @@ def test_combinatorial_projection_with_geometries(self): [{'func':'mean','name':'my_mean'}] ] calc_grouping = ['month'] - -################################################################################ - + args = (aggregate,spatial_operation,epsg,output_format,abstraction,geom,calc,dataset) for ii,tup in enumerate(itertools.product(*args)): a,s,e,o,ab,g,c,d = tup -# print(tup[0:-1],tup[-1]['uri']) - + if os.path.split(d['uri'])[1] == 'test_simple_spatial_no_bounds_01.nc': unbounded = True else: @@ -960,9 +952,9 @@ def test_combinatorial_projection_with_geometries(self): raise if o == 'shp': - ugid_path = os.path.join(self._test_dir,ops.prefix,ops.prefix+'_ugid.shp') + ugid_path = os.path.join(self.current_dir_output,ops.prefix,ops.prefix+'_ugid.shp') else: - ugid_path = os.path.join(self._test_dir,ops.prefix,'shp',ops.prefix+'_ugid.shp') + ugid_path = os.path.join(self.current_dir_output,ops.prefix,'shp',ops.prefix+'_ugid.shp') if o != 'nc': with fiona.open(ugid_path,'r') as f: @@ -1029,7 +1021,7 @@ def _make_record_(wkt_str,ugid,state_name): kansas = 'POLYGON((-95.071931 37.001478,-95.406622 37.000615,-95.526019 37.001018,-95.785748 36.998114,-95.957961 37.000083,-96.006049 36.998333,-96.519187 37.000577,-96.748696 37.000166,-97.137693 36.999808,-97.465405 36.996467,-97.804250 36.998567,-98.104529 36.998671,-98.347143 36.999061,-98.540219 36.998376,-98.999516 36.998072,-99.437473 36.994558,-99.544639 36.995463,-99.999261 36.995417,-100.088574 36.997652,-100.634245 36.997832,-100.950587 36.996661,-101.071604 36.997466,-101.553676 36.996693,-102.024519 36.988875,-102.037207 36.988994,-102.042010 37.386279,-102.044456 37.641474,-102.043976 37.734398,-102.046061 38.253822,-102.045549 38.263343,-102.047584 38.615499,-102.047568 38.692550,-102.048972 39.037003,-102.047874 39.126753,-102.048801 39.562803,-102.049442 39.568693,-102.051535 39.998918,-101.407393 40.001003,-101.322148 40.001821,-100.754856 40.000198,-100.735049 39.999172,-100.191111 40.000585,-100.180910 40.000478,-99.627859 40.002987,-99.178201 39.999577,-99.064747 39.998338,-98.720632 39.998461,-98.504479 39.997129,-98.264165 39.998434,-97.929588 39.998452,-97.816589 39.999729,-97.361912 39.997380,-96.908287 39.996154,-96.801420 39.994476,-96.454038 39.994172,-96.240598 39.994503,-96.001253 39.995159,-95.780700 39.993489,-95.329701 39.992595,-95.308697 39.999407,-95.240961 39.942105,-95.207597 39.938176,-95.193963 39.910180,-95.150551 39.908054,-95.100722 39.869865,-95.063246 39.866538,-95.033506 39.877844,-95.021772 39.896978,-94.965023 39.900823,-94.938243 39.896081,-94.936511 39.849386,-94.923876 39.833131,-94.898324 39.828332,-94.888505 39.817400,-94.899323 39.793775,-94.933267 39.782773,-94.935114 39.775426,-94.921800 39.757841,-94.877067 39.760679,-94.871185 39.754118,-94.877860 39.739305,-94.905678 39.726755,-94.930856 39.727026,-94.953142 39.736501,-94.961786 39.732038,-94.978570 39.684988,-95.028292 39.661913,-95.056017 39.625689,-95.053613 39.586776,-95.108988 39.560692,-95.102037 39.532848,-95.047599 39.485328,-95.040511 39.462940,-94.986204 39.439461,-94.958494 39.411447,-94.925748 39.381266,-94.898281 39.380640,-94.911343 39.340121,-94.907681 39.323028,-94.881107 39.286046,-94.833476 39.261766,-94.820819 39.211004,-94.790049 39.196883,-94.730531 39.171256,-94.675514 39.174922,-94.646407 39.158427,-94.612653 39.151649,-94.601224 39.141227,-94.608137 39.112801,-94.609281 39.044667,-94.612469 38.837109,-94.613148 38.737222,-94.618717 38.471473,-94.619053 38.392032,-94.617330 38.055784,-94.616735 38.030387,-94.619293 37.679869,-94.618996 37.650374,-94.618764 37.360766,-94.618977 37.327732,-94.620664 37.060147,-94.620379 36.997046,-95.032745 37.000779,-95.071931 37.001478))' fiona_crs = crs.WGS84().value fiona_properties = {'UGID':'int','STATE_NAME':'str'} - fiona_path = os.path.join(self._test_dir,'states.shp') + fiona_path = os.path.join(self.current_dir_output,'states.shp') fiona_schema = {'geometry':'Polygon', 'properties':fiona_properties} with fiona.open(fiona_path,'w',driver='ESRI Shapefile',crs=fiona_crs,schema=fiona_schema) as f: @@ -1038,13 +1030,13 @@ def _make_record_(wkt_str,ugid,state_name): f.write(record_nebraska) f.write(record_kansas) - ocgis.env.DIR_SHPCABINET = self._test_dir + ocgis.env.DIR_SHPCABINET = self.current_dir_output ops = OcgOperations(dataset=self.get_dataset(), geom='states', output_format='shp') ret = ops.execute() - output_folder = os.path.join(self._test_dir,ops.prefix) + output_folder = os.path.join(self.current_dir_output,ops.prefix) ugid_csv_name = 'ocgis_output_ugid.csv' contents = os.listdir(output_folder) self.assertEqual(set(contents), @@ -1084,7 +1076,7 @@ def _make_record_(wkt_str,ugid,state_name): self.assertEqual(row['properties']['UGID'],row['properties']['GID']) self.assertEqual(set([row['properties']['GID'] for row in rows]),set([1,2])) self.assertEqual(len(rows),244) - self.assertEqual(set(os.listdir(os.path.join(self._test_dir,ops.prefix))),set(['aggregation_clip_ugid.shp', 'aggregation_clip.cpg', 'aggregation_clip_ugid.csv', 'aggregation_clip_metadata.txt', 'aggregation_clip_did.csv', 'aggregation_clip.log', 'aggregation_clip.dbf', 'aggregation_clip.shx', 'aggregation_clip_ugid.prj', 'aggregation_clip_ugid.cpg', 'aggregation_clip_ugid.shx', 'aggregation_clip.shp', 'aggregation_clip_ugid.dbf', 'aggregation_clip.prj', 'aggregation_clip_source_metadata.txt'])) + self.assertEqual(set(os.listdir(os.path.join(self.current_dir_output,ops.prefix))),set(['aggregation_clip_ugid.shp', 'aggregation_clip.cpg', 'aggregation_clip_ugid.csv', 'aggregation_clip_metadata.txt', 'aggregation_clip_did.csv', 'aggregation_clip.log', 'aggregation_clip.dbf', 'aggregation_clip.shx', 'aggregation_clip_ugid.prj', 'aggregation_clip_ugid.cpg', 'aggregation_clip_ugid.shx', 'aggregation_clip.shp', 'aggregation_clip_ugid.dbf', 'aggregation_clip.prj', 'aggregation_clip_source_metadata.txt'])) def test_csv_conversion(self): ocgis.env.OVERWRITE = True @@ -1096,7 +1088,7 @@ def test_csv_conversion(self): ops = OcgOperations(dataset=self.get_dataset(),output_format='csv',geom=geom) ret = ops.execute() - output_dir = os.path.join(self._test_dir,ops.prefix) + output_dir = os.path.join(self.current_dir_output,ops.prefix) contents = set(os.listdir(output_dir)) self.assertEqual(contents,set(['ocgis_output_source_metadata.txt', 'ocgis_output_metadata.txt', 'ocgis_output.log', 'ocgis_output_did.csv', 'ocgis_output.csv'])) with open(ret,'r') as f: @@ -1105,7 +1097,7 @@ def test_csv_conversion(self): self.assertDictEqual(row,{'LID': '1', 'UGID': '1', 'VID': '1', 'ALIAS': 'foo', 'DID': '1', 'YEAR': '2000', 'VALUE': '1.0', 'MONTH': '3', 'VARIABLE': 'foo', 'GID': '6', 'TIME': '2000-03-01 12:00:00', 'TID': '1', 'LEVEL': '50', 'DAY': '1'}) did_file = os.path.join(output_dir,ops.prefix+'_did.csv') - uri = os.path.join(self._test_dir,self.fn) + uri = os.path.join(self.current_dir_output,self.fn) with open(did_file,'r') as f: reader = csv.DictReader(f) row = reader.next() @@ -1141,7 +1133,7 @@ def test_csv_calc_conversion_two_calculations(self): ret = ops.execute() with open(ret,'r') as f: - with open(os.path.join(self._test_bin_dir,'test_csv_calc_conversion_two_calculations.csv')) as f2: + with open(os.path.join(self.path_bin,'test_csv_calc_conversion_two_calculations.csv')) as f2: reader = csv.DictReader(f) reader2 = csv.DictReader(f2) for row,row2 in zip(reader,reader2): @@ -1189,7 +1181,7 @@ def test_csv_plus_conversion(self): prefix='with_ugid') ret = ops.execute() - path = os.path.join(self._test_dir,'with_ugid') + path = os.path.join(self.current_dir_output,'with_ugid') contents = os.listdir(path) self.assertEqual(set(contents),set(['with_ugid_metadata.txt', 'with_ugid.log', 'with_ugid.csv', 'with_ugid_source_metadata.txt', 'shp', 'with_ugid_did.csv'])) @@ -1287,8 +1279,6 @@ def run_field_tst(self, field): self.assertEqual([v.name for v in field.variables.itervalues()], ['foo', 'foo2']) self.assertEqual(field.variables.values()[0].value.mean(), 2.5) self.assertEqual(field.variables.values()[1].value.mean(), 5.5) - self.assertDictEqual({v.name: v.cfunits for v in field.variables.itervalues()}, - {'foo': Units('K'), 'foo2': Units('mm/s')}) sub = field[:, 3, 1, 1:3, 1:3] self.assertNumpyAll(sub.variables.values()[0].value.data.flatten(), np.array([1.0, 2.0, 3.0, 4.0])) self.assertNumpyAll(sub.variables.values()[1].value.data.flatten(), np.array([1.0, 2.0, 3.0, 4.0])+3) @@ -1334,7 +1324,7 @@ def test_operations_convert_nc_one_request_dataset(self): reader = csv.DictReader(f) lines = list(reader) for row in lines: - self.assertEqual(row.pop('URI'), os.path.join(self._test_dir, self.fn)) + self.assertEqual(row.pop('URI'), os.path.join(self.current_dir_output, self.fn)) actual = [{'ALIAS': 'foo', 'DID': '1', 'UNITS': 'K', 'STANDARD_NAME': 'Maximum Temperature Foo', 'VARIABLE': 'foo', 'LONG_NAME': 'foo_foo'}, {'ALIAS': 'foo2', 'DID': '1', 'UNITS': 'mm/s', 'STANDARD_NAME': 'Precipitation Foo', 'VARIABLE': 'foo2', 'LONG_NAME': 'foo_foo_pr'}] for a, l in zip(actual, lines): self.assertDictEqual(a, l) @@ -1346,7 +1336,7 @@ def test_operations_convert_multiple_request_datasets(self): rds = self.get_multiple_request_datasets() ops = OcgOperations(dataset=rds, output_format=o, prefix=o, slice=[None, [0, 2], None, None, None]) ret = ops.execute() - path_source_metadata = os.path.join(self._test_dir, ops.prefix, '{0}_source_metadata.txt'.format(ops.prefix)) + path_source_metadata = os.path.join(self.current_dir_output, ops.prefix, '{0}_source_metadata.txt'.format(ops.prefix)) if o != 'numpy': self.assertTrue(os.path.exists(ret)) with open(path_source_metadata, 'r') as f: @@ -1485,7 +1475,7 @@ class TestSimpleProjected(TestSimpleBase): def test_differing_projection_no_output_crs(self): nc_normal = SimpleNc() nc_normal.write() - uri = os.path.join(self._test_dir,nc_normal.filename) + uri = os.path.join(self.current_dir_output,nc_normal.filename) rd_projected = self.get_dataset() rd_projected['alias'] = 'projected' @@ -1504,7 +1494,7 @@ def test_differing_projection_no_output_crs(self): def test_differing_projection_with_output_crs(self): nc_normal = SimpleNc() nc_normal.write() - uri = os.path.join(self._test_dir,nc_normal.filename) + uri = os.path.join(self.current_dir_output,nc_normal.filename) rd_projected = self.get_dataset() rd_projected['alias'] = 'projected' @@ -1579,10 +1569,10 @@ def test_with_geometry(self): geom = project_shapely_geometry(geom,from_crs.sr,to_sr) feature['wkt'] = geom.wkt - path = os.path.join(self._test_dir,'ab_{0}.shp'.format('polygon')) + path = os.path.join(self.current_dir_output,'ab_{0}.shp'.format('polygon')) with FionaMaker(path,geometry='Polygon') as fm: fm.write(features) - ocgis.env.DIR_SHPCABINET = self._test_dir + ocgis.env.DIR_SHPCABINET = self.current_dir_output ops = OcgOperations(dataset=self.get_dataset(),output_format='shp', geom='ab_polygon') From abb4b979443f0975fe2fbdf40868364d203db3da Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Wed, 15 Oct 2014 10:42:32 -0600 Subject: [PATCH 04/71] moved environment import fixing circular reference --- src/ocgis/__init__.py | 3 ++- src/ocgis/api/operations.py | 5 ++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/ocgis/__init__.py b/src/ocgis/__init__.py index e8d5ffaf3..67cfb5fc8 100644 --- a/src/ocgis/__init__.py +++ b/src/ocgis/__init__.py @@ -1,5 +1,7 @@ from osgeo import ogr, osr +from ocgis.util.environment import env + from ocgis.api.collection import SpatialCollection from ocgis.api.operations import OcgOperations from ocgis.api.request.base import RequestDataset, RequestDatasetCollection @@ -8,7 +10,6 @@ from ocgis.interface.base.crs import CoordinateReferenceSystem from ocgis.interface.base.dimension.spatial import SpatialDimension from ocgis.interface.base.field import Field -from ocgis.util.environment import env from ocgis.util.inspect import Inspect from ocgis.util.shp_cabinet import ShpCabinet, ShpCabinetIterator from ocgis.util.zipper import format_return diff --git a/src/ocgis/api/operations.py b/src/ocgis/api/operations.py index ac79234b6..c4b3dde7b 100644 --- a/src/ocgis/api/operations.py +++ b/src/ocgis/api/operations.py @@ -1,10 +1,9 @@ -from ocgis.api.parms.definition import * # @UnusedWildImport +from ocgis.api.parms.definition import * from ocgis.api.interpreter import OcgInterpreter from ocgis import env from ocgis.api.parms.base import OcgParameter from ocgis.conv.meta import MetaConverter -from ocgis.calc.base import AbstractMultivariateFunction,\ - AbstractKeyedOutputFunction +from ocgis.calc.base import AbstractMultivariateFunction, AbstractKeyedOutputFunction from ocgis.interface.base.crs import CFRotatedPole, WGS84 from ocgis.api.subset import SubsetOperation import numpy as np From 0609b8704c0ccf23a6224eab3a0e83718f94d542 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Thu, 16 Oct 2014 09:45:09 -0600 Subject: [PATCH 05/71] upgrade fiona to newest version #334 Supported version is now 1.4.4. --- doc/install.rst | 6 +- src/ocgis/conv/fiona_.py | 129 ++++++++++------- src/ocgis/test/base.py | 3 +- src/ocgis/test/test_misc/test_conversion.py | 3 - .../test_ocgis/test_calc/test_calc_general.py | 1 - .../test_contrib/test_library_icclim.py | 2 +- .../test/test_ocgis/test_conv/test_fiona_.py | 2 +- .../test_ocgis/test_util/test_shp_process.py | 2 +- .../test_spatial/test_fiona_maker.py | 40 ++++++ src/ocgis/test/test_simple/test_simple.py | 132 ++++++++++++------ src/ocgis/util/helpers.py | 54 ++----- src/ocgis/util/shp_cabinet.py | 1 - src/ocgis/util/spatial/fiona_maker.py | 51 +++++++ 13 files changed, 278 insertions(+), 148 deletions(-) create mode 100644 src/ocgis/test/test_ocgis/test_util/test_spatial/test_fiona_maker.py create mode 100644 src/ocgis/util/spatial/fiona_maker.py diff --git a/doc/install.rst b/doc/install.rst index d5ed9b35c..108179f54 100644 --- a/doc/install.rst +++ b/doc/install.rst @@ -11,9 +11,9 @@ Required Dependencies Package Name Version URL ============== ======= ======================================================================= Python 2.7.6 http://www.python.org/download/releases/2.7.6/ -``osgeo`` 1.9.1 http://pypi.python.org/pypi/GDAL/ -``shapely`` 1.2 http://pypi.python.org/pypi/Shapely -``fiona`` 1.0.2 https://pypi.python.org/pypi/Fiona +``osgeo`` 1.9.1 https://pypi.python.org/pypi/GDAL/ +``shapely`` 1.2 https://pypi.python.org/pypi/Shapely +``fiona`` 1.4.4 https://pypi.python.org/pypi/Fiona ``numpy`` 1.8.2 http://sourceforge.net/projects/numpy/files/NumPy/1.8.2/ ``netCDF4`` 1.1.1 http://unidata.github.io/netcdf4-python/ ============== ======= ======================================================================= diff --git a/src/ocgis/conv/fiona_.py b/src/ocgis/conv/fiona_.py index 70a3ff871..1a193becf 100644 --- a/src/ocgis/conv/fiona_.py +++ b/src/ocgis/conv/fiona_.py @@ -5,7 +5,7 @@ import fiona from collections import OrderedDict from shapely.geometry.geo import mapping -from fiona.rfc3339 import FionaTimeType, FionaDateType +from fiona.rfc3339 import FionaTimeType, FionaDateType, FionaDateTimeType import abc from ocgis.util.logging_ocgis import ocgis_lh @@ -15,35 +15,38 @@ class FionaConverter(AbstractConverter): _add_ugeom = True _add_ugeom_nest = False - _fiona_conversion = {np.int32:int, - np.int16:int, - np.int64:int, - np.float64:float, - np.float32:float, - np.float16:float, - datetime.datetime:FionaTimeType, - datetime.date:FionaDateType} - _fiona_type_mapping = {datetime.date:'date', - datetime.datetime:'datetime', - np.int64:'int', - NoneType:None, - np.int32:'int', - np.float64:'float', - np.float32:'float', - np.float16:'float', - np.int16:'int', - np.int32:'int', - str:'str'} + _fiona_conversion = {np.int32: int, + np.int16: int, + np.int64: int, + np.float64: float, + np.float32: float, + np.float16: float, + datetime.datetime: str, + datetime.date: str} + _fiona_type_mapping = {datetime.date: 'str', + datetime.datetime: 'str', + np.int64: 'int', + NoneType: None, + np.int32: 'int', + np.float64: 'float', + np.float32: 'float', + np.float16: 'float', + np.int16: 'int', + np.int32: 'int', + str: 'str'} @classmethod def get_field_type(cls, the_type, key=None, fiona_conversion=None): """ :param the_type: The target type object to map to a Fiona field type. - :type the_type: type object + :type the_type: type :param key: The key to update the Fiona conversion map. :type key: str :param fiona_conversion: A dictionary used to convert Python values to Fiona-expected values. :type fiona_conversion: dict + :returns: The appropriate ``fiona`` field type. + :rtype: str or NoneType + :raises: AttributeError """ ret = None @@ -63,67 +66,91 @@ def get_field_type(cls, the_type, key=None, fiona_conversion=None): return ret - def _finalize_(self,f): + def _finalize_(self, f): + """ + Perform any final operations on file objects. + + :param dict f: A dictionary containing file-level metadata and potentially the file object itself. + """ + f['fiona_object'].close() - def _build_(self,coll): + def _build_(self, coll): + """ + :param coll: An archetypical spatial collection that will be written to file. + :type coll: :class:`~ocgis.SpatialCollection` + :returns: A dictionary with all the file object metadata and the file object itself. + :rtype: dict + """ + fiona_conversion = {} - - ## pull the fiona schema properties together by mapping fiona types to - ## the data types of the first row of the output data file + + # pull the fiona schema properties together by mapping fiona types to the data types of the first row of the + # output data file archetype_field = coll._archetype_field fiona_crs = archetype_field.spatial.crs.value - geom,arch_row = coll.get_iter_dict().next() + geom, arch_row = coll.get_iter_dict().next() fiona_properties = OrderedDict() for header in coll.headers: fiona_field_type = self.get_field_type(type(arch_row[header]), key=header, fiona_conversion=fiona_conversion) - fiona_properties.update({header.upper():fiona_field_type}) - - ## we always want to convert the value. if the data is masked, it comes - ## through as a float when unmasked data is in fact a numpy data type. - ## however, this should only occur if 'value' is in the output headers! + fiona_properties.update({header.upper(): fiona_field_type}) + + # we always want to convert the value. if the data is masked, it comes through as a float when unmasked data is + # in fact a numpy data type. however, this should only occur if 'value' is in the output headers! if 'value' in coll.headers and 'value' not in fiona_conversion: value_dtype = archetype_field.variables.values()[0].value.dtype try: to_update = self._fiona_conversion[value_dtype] - ## may have to do type comparisons + # may have to do type comparisons except KeyError as e: to_update = None - for k,v in self._fiona_conversion.iteritems(): + for k, v in self._fiona_conversion.iteritems(): if value_dtype == k: to_update = v break if to_update is None: - ocgis_lh(exc=e,logger='fiona_') - fiona_conversion.update({'value':to_update}) - - ## polygon geometry types are always converted to multipolygons to avoid - ## later collections having multipolygon geometries. + ocgis_lh(exc=e, logger='fiona_') + fiona_conversion.update({'value': to_update}) + + # polygon geometry types are always converted to multipolygons to avoid later collections having multipolygon + # geometries. geometry_type = archetype_field.spatial.abstraction_geometry._geom_type if geometry_type == 'Polygon': geometry_type = 'MultiPolygon' - - fiona_schema = {'geometry':geometry_type, - 'properties':fiona_properties} - - ## if there is no data for a header, it may be empty. in this case, the - ## value comes through as none and it should be replaced with bool. - for k,v in fiona_schema['properties'].iteritems(): + + fiona_schema = {'geometry': geometry_type, + 'properties': fiona_properties} + + # if there is no data for a header, it may be empty. in this case, the value comes through as none and it should + # be replaced with bool. + for k, v in fiona_schema['properties'].iteritems(): if v is None: fiona_schema['properties'][k] = 'str:1' - fiona_object = fiona.open(self.path,'w',driver=self._driver,crs=fiona_crs,schema=fiona_schema) - - ret = {'fiona_object':fiona_object,'fiona_conversion':fiona_conversion} + fiona_object = fiona.open(self.path, 'w', driver=self._driver, crs=fiona_crs, schema=fiona_schema) + + ret = {'fiona_object': fiona_object, 'fiona_conversion': fiona_conversion} - return(ret) + return ret def _write_coll_(self, f, coll): + """ + Write a spatial collection using file information from ``f``. + + :param dict f: A dictionary containing all the necessary variables to write the spatial collection to a file + object. + :param coll: The spatial collection to write. + :type coll: :class:`~ocgis.SpatialCollection` + """ + fiona_object = f['fiona_object'] for geom, properties in coll.get_iter_dict(use_upper_keys=True, conversion_map=f['fiona_conversion']): to_write = {'geometry': mapping(geom), 'properties': properties} - fiona_object.write(to_write) + try: + fiona_object.write(to_write) + except Exception as e: + import ipdb;ipdb.set_trace() class ShpConverter(FionaConverter): diff --git a/src/ocgis/test/base.py b/src/ocgis/test/base.py index 3e583fd7d..03c56229e 100644 --- a/src/ocgis/test/base.py +++ b/src/ocgis/test/base.py @@ -63,7 +63,8 @@ def assertDictEqual(self, d1, d2, msg=None): unittest.TestCase.assertDictEqual(self, d1, d2, msg=msg) except AssertionError: for k, v in d1.iteritems(): - self.assertEqual(v, d2[k]) + msg = 'Issue with key "{0}". Values are {1}.'.format(k, (v, d2[k])) + self.assertEqual(v, d2[k], msg=msg) self.assertEqual(set(d1.keys()), set(d2.keys())) def assertNumpyAll(self, arr1, arr2, check_fill_value_dtype=True, check_arr_dtype=True): diff --git a/src/ocgis/test/test_misc/test_conversion.py b/src/ocgis/test/test_misc/test_conversion.py index 36630b243..00fddd9c2 100644 --- a/src/ocgis/test/test_misc/test_conversion.py +++ b/src/ocgis/test/test_misc/test_conversion.py @@ -3,10 +3,7 @@ import ocgis import netCDF4 as nc import os -from ocgis.api.operations import OcgOperations -from collections import OrderedDict import fiona -from ocgis.test.test_simple.test_simple import ToTest class Test(TestBase): diff --git a/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py b/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py index f63265a73..2befe955a 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py @@ -9,7 +9,6 @@ import itertools from ocgis.calc.engine import OcgCalculationEngine from ocgis.calc.library.thresholds import Threshold -from ocgis.test.test_simple.test_simple import ToTest class AbstractCalcBase(TestBase): diff --git a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py index 0f283b084..6e67e9335 100644 --- a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py +++ b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py @@ -9,7 +9,7 @@ from ocgis.exc import DefinitionValidationError, UnitsValidationError from ocgis.api.operations import OcgOperations from ocgis.calc.library.thresholds import Threshold -from ocgis.test.test_simple.test_simple import nc_scope, ToTest +from ocgis.test.test_simple.test_simple import nc_scope import ocgis from ocgis.test.test_base import longrunning import numpy as np diff --git a/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py b/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py index 5885c345f..b3166538f 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py @@ -27,7 +27,7 @@ def test_attributes_copied(self): path_ugid = os.path.join(self.current_dir_output, conv.prefix+'_ugid.shp') with fiona.open(path_ugid) as source: - self.assertEqual(source.schema['properties'], OrderedDict([(u'COUNTRY', 'str'), (u'UGID', 'int:10')])) + self.assertEqual(source.schema['properties'], OrderedDict([(u'COUNTRY', 'str:80'), (u'UGID', 'int:10')])) def test_none_geom(self): """Test a NoneType geometry will pass through the Fiona converter.""" diff --git a/src/ocgis/test/test_ocgis/test_util/test_shp_process.py b/src/ocgis/test/test_ocgis/test_util/test_shp_process.py index 636aadc65..5cbc433d6 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_shp_process.py +++ b/src/ocgis/test/test_ocgis/test_util/test_shp_process.py @@ -8,7 +8,7 @@ class TestShpProcess(TestBase): - _test_path = '/home/ben.koziol/Dropbox/NESII/project/ocg/bin/test_data_nc/test_shp_process' + _test_path = '/home/ben.koziol/Dropbox/NESII/project/ocg/bin/test_data/test_shp_process' def test_shp_process(self): copy_path = os.path.join(self.current_dir_output,'test_shp_process') diff --git a/src/ocgis/test/test_ocgis/test_util/test_spatial/test_fiona_maker.py b/src/ocgis/test/test_ocgis/test_util/test_spatial/test_fiona_maker.py new file mode 100644 index 000000000..9d9a25c74 --- /dev/null +++ b/src/ocgis/test/test_ocgis/test_util/test_spatial/test_fiona_maker.py @@ -0,0 +1,40 @@ +import os +import fiona +from shapely.geometry import Point +from ocgis import CoordinateReferenceSystem, ShpCabinetIterator +from ocgis.test.base import TestBase +from ocgis.util.spatial.fiona_maker import FionaMaker + + +class TestFionaMaker(TestBase): + + def get(self, **kwargs): + path = os.path.join(self.current_dir_output, 'test.shp') + fm = FionaMaker(path, **kwargs) + return fm + + def test_init(self): + fm = self.get() + self.assertEqual(CoordinateReferenceSystem(epsg=4326), CoordinateReferenceSystem(value=fm.crs)) + + def test_writing_point(self): + """Test writing a point shapefile.""" + + with self.get(geometry='Point') as source: + source.write({'geom': Point(-130, 50), 'UGID': 1, 'NAME': 'the point'}) + with fiona.open(source.path) as source: + crs = CoordinateReferenceSystem(value=source.crs) + self.assertEqual(crs, CoordinateReferenceSystem(epsg=4326)) + + def test_through_shpcabinet(self): + """Test reading the shapefile into a spatial dimension object.""" + + with self.get(geometry='Point') as source: + source.write({'geom': Point(-130, 50), 'UGID': 1, 'NAME': 'the point'}) + sci = ShpCabinetIterator(path=source.path, as_spatial_dimension=True) + sdims = list(sci) + self.assertEqual(len(sdims), 1) + sdim = sdims[0] + point = sdim.geom.point.value[0, 0] + self.assertTrue(point.almost_equals(Point(-130, 50))) + self.assertEqual(CoordinateReferenceSystem(value=source.crs), sdim.crs) diff --git a/src/ocgis/test/test_simple/test_simple.py b/src/ocgis/test/test_simple/test_simple.py index 312f2590d..a77a70c2b 100644 --- a/src/ocgis/test/test_simple/test_simple.py +++ b/src/ocgis/test/test_simple/test_simple.py @@ -8,7 +8,7 @@ import numpy as np import datetime from ocgis.api.parms.definition import SpatialOperation -from ocgis.util.helpers import make_poly, FionaMaker, project_shapely_geometry +from ocgis.util.helpers import make_poly, project_shapely_geometry from ocgis import exc, env, constants import os.path from abc import ABCMeta, abstractproperty @@ -37,6 +37,7 @@ from ocgis.interface.base.field import DerivedMultivariateField from ocgis.util.itester import itr_products_keywords from ocgis.util.shp_cabinet import ShpCabinetIterator +from ocgis.util.spatial.fiona_maker import FionaMaker @contextmanager @@ -988,10 +989,10 @@ def test_empty_dataset_for_operations(self): def test_shp_conversion(self): ocgis.env.OVERWRITE = True calc = [ - None, - [{'func':'mean','name':'my_mean'}], - ] - group = ['month','year'] + None, + [{'func': 'mean', 'name': 'my_mean'}], + ] + group = ['month', 'year'] for c in calc: ops = OcgOperations(dataset=self.get_dataset(), output_format='shp', @@ -1001,13 +1002,40 @@ def test_shp_conversion(self): if c is None: with fiona.open(ret) as f: - self.assertDictEqual(f.meta,{'crs': {u'no_defs': True, u'ellps': u'WGS84', u'proj': u'longlat'}, 'driver': u'ESRI Shapefile', 'schema': {'geometry': 'Polygon', 'properties': OrderedDict([(u'DID', 'int:10'), (u'VID', 'int:10'), (u'UGID', 'int:10'), (u'TID', 'int:10'), (u'LID', 'int:10'), (u'GID', 'int:10'), (u'VARIABLE', 'str'), (u'ALIAS', 'str'), (u'TIME', 'date'), (u'YEAR', 'int:10'), (u'MONTH', 'int:10'), (u'DAY', 'int:10'), (u'LEVEL', 'int:10'), (u'VALUE', 'float')])}}) - self.assertEqual(len(f),1952) - self.assertDictEqual(list(f)[340],{'geometry': {'type': 'Polygon', 'coordinates': [[(-105.5, 37.5), (-105.5, 38.5), (-104.5, 38.5), (-104.5, 37.5), (-105.5, 37.5)]]}, 'type': 'Feature', 'id': '340', 'properties': OrderedDict([(u'DID', 1), (u'VID', 1), (u'UGID', 1), (u'TID', 11), (u'LID', 2), (u'GID', 5.0), (u'VARIABLE', u'foo'), (u'ALIAS', u'foo'), (u'TIME', '2000-03-11'), (u'YEAR', 2000), (u'MONTH', 3), (u'DAY', 11), (u'LEVEL', 150), (u'VALUE', 1.0)])}) + schema_properties = OrderedDict( + [(u'DID', 'int:10'), (u'VID', 'int:10'), (u'UGID', 'int:10'), (u'TID', 'int:10'), + (u'LID', 'int:10'), (u'GID', 'int:10'), (u'VARIABLE', 'str:80'), (u'ALIAS', 'str:80'), + (u'TIME', 'str:80'), (u'YEAR', 'int:10'), (u'MONTH', 'int:10'), (u'DAY', 'int:10'), + (u'LEVEL', 'int:10'), (u'VALUE', 'float:24.15')]) + self.assertDictEqual(f.meta['schema']['properties'], schema_properties) + self.assertDictEqual(f.meta, {'crs': {'init': u'epsg:4326'}, + 'driver': u'ESRI Shapefile', + 'schema': {'geometry': 'Polygon', 'properties': schema_properties}}) + self.assertEqual(len(f), 1952) + record_properties = OrderedDict( + [(u'DID', 1), (u'VID', 1), (u'UGID', 1), (u'TID', 11), (u'LID', 2), (u'GID', 5.0), + (u'VARIABLE', u'foo'), (u'ALIAS', u'foo'), (u'TIME', '2000-03-11 12:00:00'), (u'YEAR', 2000), + (u'MONTH', 3), (u'DAY', 11), (u'LEVEL', 150), (u'VALUE', 1.0)]) + record = list(f)[340] + self.assertDictEqual(record['properties'], record_properties) + record_coordinates = [[(-105.5, 37.5), (-105.5, 38.5), (-104.5, 38.5), (-104.5, 37.5), (-105.5, 37.5)]] + self.assertDictEqual(record, {'geometry': {'type': 'Polygon', + 'coordinates': record_coordinates}, + 'type': 'Feature', + 'id': '340', + 'properties': record_properties}) else: with fiona.open(ret) as f: - self.assertDictEqual(f.meta,{'crs': {u'no_defs': True, u'ellps': u'WGS84', u'proj': u'longlat'}, 'driver': u'ESRI Shapefile', 'schema': {'geometry': 'Polygon', 'properties': OrderedDict([(u'DID', 'int:10'), (u'VID', 'int:10'), (u'CID', 'int:10'), (u'UGID', 'int:10'), (u'TID', 'int:10'), (u'LID', 'int:10'), (u'GID', 'int:10'), (u'VARIABLE', 'str'), (u'ALIAS', 'str'), (u'CALC_KEY', 'str'), (u'CALC_ALIAS', 'str'), (u'TIME', 'date'), (u'YEAR', 'int:10'), (u'MONTH', 'int:10'), (u'DAY', 'int:10'), (u'LEVEL', 'int:10'), (u'VALUE', 'float')])}}) - self.assertEqual(len(f),64) + self.assertDictEqual(f.meta, {'crs': {'init': u'epsg:4326'}, 'driver': u'ESRI Shapefile', + 'schema': {'geometry': 'Polygon', 'properties': OrderedDict( + [(u'DID', 'int:10'), (u'VID', 'int:10'), (u'CID', 'int:10'), + (u'UGID', 'int:10'), (u'TID', 'int:10'), (u'LID', 'int:10'), + (u'GID', 'int:10'), (u'VARIABLE', 'str:80'), + (u'ALIAS', 'str:80'), (u'CALC_KEY', 'str:80'), + (u'CALC_ALIAS', 'str:80'), (u'TIME', 'str:80'), + (u'YEAR', 'int:10'), (u'MONTH', 'int:10'), (u'DAY', 'int:10'), + (u'LEVEL', 'int:10'), (u'VALUE', 'float:24.15')])}}) + self.assertEqual(len(f), 64) def test_shp_conversion_with_external_geometries(self): @@ -1050,15 +1078,36 @@ def _make_record_(wkt_str,ugid,state_name): with fiona.open(ret) as f: rows = list(f) - self.assertDictEqual(f.meta,{'crs': {u'no_defs': True, u'ellps': u'WGS84', u'proj': u'longlat'}, 'driver': u'ESRI Shapefile', 'schema': {'geometry': 'Polygon', 'properties': OrderedDict([(u'DID', 'int:10'), (u'VID', 'int:10'), (u'UGID', 'int:10'), (u'TID', 'int:10'), (u'LID', 'int:10'), (u'GID', 'int:10'), (u'VARIABLE', 'str'), (u'ALIAS', 'str'), (u'TIME', 'date'), (u'YEAR', 'int:10'), (u'MONTH', 'int:10'), (u'DAY', 'int:10'), (u'LEVEL', 'int:10'), (u'VALUE', 'float')])}}) - self.assertEqual(len(rows),610) + fiona_meta = deepcopy(f.meta) + fiona_crs = fiona_meta.pop('crs') + self.assertEqual(CoordinateReferenceSystem(value=fiona_crs), WGS84()) + properties = OrderedDict( + [(u'DID', 'int:10'), (u'VID', 'int:10'), (u'UGID', 'int:10'), (u'TID', 'int:10'), (u'LID', 'int:10'), + (u'GID', 'int:10'), (u'VARIABLE', 'str:80'), (u'ALIAS', 'str:80'), (u'TIME', 'str:80'), + (u'YEAR', 'int:10'), (u'MONTH', 'int:10'), (u'DAY', 'int:10'), (u'LEVEL', 'int:10'), + (u'VALUE', 'float:24.15')]) + self.assertDictEqual(fiona_meta['schema']['properties'], properties) + self.assertDictEqual(fiona_meta, {'driver': u'ESRI Shapefile', + 'schema': {'geometry': 'Polygon', + 'properties': properties}}) + + self.assertEqual(len(rows), 610) ugids = set([r['properties']['UGID'] for r in rows]) - self.assertEqual(ugids,set([1,2])) - self.assertEqual(rows[325],{'geometry': {'type': 'Polygon', 'coordinates': [[(-102.5, 39.5), (-102.5, 40.5), (-101.5, 40.5), (-101.5, 39.5), (-102.5, 39.5)]]}, 'type': 'Feature', 'id': '325', 'properties': OrderedDict([(u'DID', 1), (u'VID', 1), (u'UGID', 2), (u'TID', 26), (u'LID', 1), (u'GID', 16.0), (u'VARIABLE', u'foo'), (u'ALIAS', u'foo'), (u'TIME', '2000-03-26'), (u'YEAR', 2000), (u'MONTH', 3), (u'DAY', 26), (u'LEVEL', 50), (u'VALUE', 4.0)])}) + self.assertEqual(ugids, set([1, 2])) + properties = OrderedDict( + [(u'DID', 1), (u'VID', 1), (u'UGID', 2), (u'TID', 26), (u'LID', 1), (u'GID', 16.0), (u'VARIABLE', u'foo'), + (u'ALIAS', u'foo'), (u'TIME', '2000-03-26 12:00:00'), (u'YEAR', 2000), (u'MONTH', 3), (u'DAY', 26), (u'LEVEL', 50), + (u'VALUE', 4.0)]) + self.assertDictEqual(properties, rows[325]['properties']) + self.assertEqual(rows[325], {'geometry': {'type': 'Polygon', 'coordinates': [[(-102.5, 39.5), (-102.5, 40.5), (-101.5, 40.5), (-101.5, 39.5), (-102.5, 39.5)]]}, 'type': 'Feature', 'id': '325', 'properties': properties}) with fiona.open(os.path.join(output_folder,ops.prefix+'_ugid.shp')) as f: - self.assertDictEqual(f.meta,{'crs': {u'no_defs': True, u'ellps': u'WGS84', u'proj': u'longlat'}, 'driver': u'ESRI Shapefile', 'schema': {'geometry': 'Polygon', 'properties': OrderedDict([(u'UGID', 'int:10'), (u'STATE_NAME', 'str')])}}) - self.assertEqual(list(f),[{'geometry': {'type': 'Polygon', 'coordinates': [[(-101.407393, 40.001003), (-102.051535, 39.998918), (-102.047545, 40.342644), (-102.04762, 40.431077), (-102.046031, 40.697319), (-102.046992, 40.74313), (-102.047739, 40.998071), (-102.621257, 41.000214), (-102.652271, 40.998124), (-103.382956, 41.000316), (-103.572316, 40.999648), (-104.051705, 41.003211), (-104.054012, 41.388085), (-104.0555, 41.564222), (-104.053615, 41.698218), (-104.053513, 41.999815), (-104.056219, 42.614669), (-104.056199, 43.003062), (-103.501464, 42.998618), (-103.005875, 42.999354), (-102.788384, 42.995303), (-102.086701, 42.989887), (-101.231737, 42.986843), (-100.198142, 42.991095), (-99.53279, 42.992335), (-99.253971, 42.992389), (-98.497651, 42.991778), (-98.457444, 42.93716), (-98.391204, 42.920135), (-98.310339, 42.881794), (-98.167826, 42.839571), (-98.144869, 42.835794), (-98.123117, 42.820223), (-98.12182, 42.80836), (-98.03314, 42.769192), (-97.995144, 42.766812), (-97.963558, 42.77369), (-97.929477, 42.792324), (-97.889941, 42.831271), (-97.888659, 42.855807), (-97.818643, 42.866587), (-97.797028, 42.849597), (-97.772186, 42.846164), (-97.72525, 42.858008), (-97.685752, 42.836837), (-97.63497, 42.861285), (-97.570654, 42.84799), (-97.506132, 42.860136), (-97.483159, 42.857157), (-97.457263, 42.850443), (-97.389306, 42.867433), (-97.311414, 42.861771), (-97.271457, 42.850014), (-97.243189, 42.851826), (-97.224443, 42.841202), (-97.211831, 42.812573), (-97.161422, 42.798619), (-97.130469, 42.773923), (-97.015139, 42.759542), (-96.979593, 42.758313), (-96.970003, 42.752065), (-96.977869, 42.727308), (-96.970773, 42.721147), (-96.908234, 42.731699), (-96.81014, 42.704084), (-96.810437, 42.681341), (-96.799344, 42.670019), (-96.722658, 42.668592), (-96.69906, 42.657715), (-96.694596, 42.641163), (-96.715273, 42.621907), (-96.714059, 42.612302), (-96.636672, 42.550731), (-96.629294, 42.522693), (-96.605467, 42.507236), (-96.584753, 42.518287), (-96.547215, 42.520499), (-96.494701, 42.488459), (-96.439394, 42.48924), (-96.396074, 42.467401), (-96.39789, 42.441793), (-96.417628, 42.414777), (-96.411761, 42.380918), (-96.424175, 42.349279), (-96.389781, 42.328789), (-96.3687, 42.298023), (-96.342881, 42.282081), (-96.332658, 42.260307), (-96.337708, 42.229522), (-96.363512, 42.214042), (-96.352165, 42.168185), (-96.285123, 42.123452), (-96.265483, 42.048897), (-96.238725, 42.028438), (-96.236093, 42.001258), (-96.202842, 41.996615), (-96.185217, 41.980685), (-96.147328, 41.966254), (-96.14587, 41.924907), (-96.15997, 41.904151), (-96.135623, 41.86262), (-96.076417, 41.791469), (-96.099321, 41.752975), (-96.099771, 41.731563), (-96.085557, 41.704987), (-96.122202, 41.694913), (-96.120264, 41.684094), (-96.099306, 41.65468), (-96.111307, 41.599006), (-96.080835, 41.576), (-96.091936, 41.563145), (-96.08584, 41.537522), (-96.050172, 41.524335), (-96.004592, 41.536663), (-95.993965, 41.528103), (-95.996688, 41.511517), (-96.013451, 41.492994), (-96.006897, 41.481954), (-95.953185, 41.472387), (-95.935065, 41.462381), (-95.940056, 41.394805), (-95.942895, 41.340077), (-95.889107, 41.301389), (-95.897591, 41.286863), (-95.911202, 41.308469), (-95.93023, 41.302056), (-95.910981, 41.225245), (-95.92225, 41.207854), (-95.9161, 41.194063), (-95.859198, 41.180537), (-95.859801, 41.166865), (-95.876685, 41.164202), (-95.858274, 41.109187), (-95.878804, 41.065871), (-95.859539, 41.035002), (-95.860897, 41.00265), (-95.837603, 40.974258), (-95.836541, 40.901108), (-95.834396, 40.8703), (-95.846435, 40.848332), (-95.85179, 40.7926), (-95.876616, 40.730436), (-95.767999, 40.643117), (-95.757546, 40.620904), (-95.767479, 40.589048), (-95.763412, 40.549707), (-95.737036, 40.532373), (-95.692066, 40.524129), (-95.687413, 40.56117), (-95.675693, 40.565835), (-95.662944, 40.558729), (-95.65806, 40.530332), (-95.68497, 40.512205), (-95.695361, 40.485338), (-95.636817, 40.39639), (-95.634185, 40.3588), (-95.616201, 40.346497), (-95.617933, 40.331418), (-95.645553, 40.322346), (-95.646827, 40.309109), (-95.595532, 40.309776), (-95.547137, 40.266215), (-95.476822, 40.226855), (-95.466636, 40.213255), (-95.460952, 40.173995), (-95.422476, 40.131743), (-95.392813, 40.115416), (-95.384542, 40.095362), (-95.403784, 40.080379), (-95.413764, 40.048111), (-95.390532, 40.04375), (-95.371244, 40.028751), (-95.345067, 40.024974), (-95.308697, 39.999407), (-95.329701, 39.992595), (-95.7807, 39.993489), (-96.001253, 39.995159), (-96.240598, 39.994503), (-96.454038, 39.994172), (-96.80142, 39.994476), (-96.908287, 39.996154), (-97.361912, 39.99738), (-97.816589, 39.999729), (-97.929588, 39.998452), (-98.264165, 39.998434), (-98.504479, 39.997129), (-98.720632, 39.998461), (-99.064747, 39.998338), (-99.178201, 39.999577), (-99.627859, 40.002987), (-100.18091, 40.000478), (-100.191111, 40.000585), (-100.735049, 39.999172), (-100.754856, 40.000198), (-101.322148, 40.001821), (-101.407393, 40.001003)]]}, 'type': 'Feature', 'id': '0', 'properties': OrderedDict([(u'UGID', 1), (u'STATE_NAME', u'Nebraska')])}, {'geometry': {'type': 'Polygon', 'coordinates': [[(-95.071931, 37.001478), (-95.406622, 37.000615), (-95.526019, 37.001018), (-95.785748, 36.998114), (-95.957961, 37.000083), (-96.006049, 36.998333), (-96.519187, 37.000577), (-96.748696, 37.000166), (-97.137693, 36.999808), (-97.465405, 36.996467), (-97.80425, 36.998567), (-98.104529, 36.998671), (-98.347143, 36.999061), (-98.540219, 36.998376), (-98.999516, 36.998072), (-99.437473, 36.994558), (-99.544639, 36.995463), (-99.999261, 36.995417), (-100.088574, 36.997652), (-100.634245, 36.997832), (-100.950587, 36.996661), (-101.071604, 36.997466), (-101.553676, 36.996693), (-102.024519, 36.988875), (-102.037207, 36.988994), (-102.04201, 37.386279), (-102.044456, 37.641474), (-102.043976, 37.734398), (-102.046061, 38.253822), (-102.045549, 38.263343), (-102.047584, 38.615499), (-102.047568, 38.69255), (-102.048972, 39.037003), (-102.047874, 39.126753), (-102.048801, 39.562803), (-102.049442, 39.568693), (-102.051535, 39.998918), (-101.407393, 40.001003), (-101.322148, 40.001821), (-100.754856, 40.000198), (-100.735049, 39.999172), (-100.191111, 40.000585), (-100.18091, 40.000478), (-99.627859, 40.002987), (-99.178201, 39.999577), (-99.064747, 39.998338), (-98.720632, 39.998461), (-98.504479, 39.997129), (-98.264165, 39.998434), (-97.929588, 39.998452), (-97.816589, 39.999729), (-97.361912, 39.99738), (-96.908287, 39.996154), (-96.80142, 39.994476), (-96.454038, 39.994172), (-96.240598, 39.994503), (-96.001253, 39.995159), (-95.7807, 39.993489), (-95.329701, 39.992595), (-95.308697, 39.999407), (-95.240961, 39.942105), (-95.207597, 39.938176), (-95.193963, 39.91018), (-95.150551, 39.908054), (-95.100722, 39.869865), (-95.063246, 39.866538), (-95.033506, 39.877844), (-95.021772, 39.896978), (-94.965023, 39.900823), (-94.938243, 39.896081), (-94.936511, 39.849386), (-94.923876, 39.833131), (-94.898324, 39.828332), (-94.888505, 39.8174), (-94.899323, 39.793775), (-94.933267, 39.782773), (-94.935114, 39.775426), (-94.9218, 39.757841), (-94.877067, 39.760679), (-94.871185, 39.754118), (-94.87786, 39.739305), (-94.905678, 39.726755), (-94.930856, 39.727026), (-94.953142, 39.736501), (-94.961786, 39.732038), (-94.97857, 39.684988), (-95.028292, 39.661913), (-95.056017, 39.625689), (-95.053613, 39.586776), (-95.108988, 39.560692), (-95.102037, 39.532848), (-95.047599, 39.485328), (-95.040511, 39.46294), (-94.986204, 39.439461), (-94.958494, 39.411447), (-94.925748, 39.381266), (-94.898281, 39.38064), (-94.911343, 39.340121), (-94.907681, 39.323028), (-94.881107, 39.286046), (-94.833476, 39.261766), (-94.820819, 39.211004), (-94.790049, 39.196883), (-94.730531, 39.171256), (-94.675514, 39.174922), (-94.646407, 39.158427), (-94.612653, 39.151649), (-94.601224, 39.141227), (-94.608137, 39.112801), (-94.609281, 39.044667), (-94.612469, 38.837109), (-94.613148, 38.737222), (-94.618717, 38.471473), (-94.619053, 38.392032), (-94.61733, 38.055784), (-94.616735, 38.030387), (-94.619293, 37.679869), (-94.618996, 37.650374), (-94.618764, 37.360766), (-94.618977, 37.327732), (-94.620664, 37.060147), (-94.620379, 36.997046), (-95.032745, 37.000779), (-95.071931, 37.001478)]]}, 'type': 'Feature', 'id': '1', 'properties': OrderedDict([(u'UGID', 2), (u'STATE_NAME', u'Kansas')])}]) + rows = list(f) + fiona_meta = deepcopy(f.meta) + fiona_crs = fiona_meta.pop('crs') + self.assertEqual(CoordinateReferenceSystem(value=fiona_crs), WGS84()) + self.assertDictEqual(fiona_meta,{'driver': u'ESRI Shapefile', 'schema': {'geometry': 'Polygon', 'properties': OrderedDict([(u'UGID', 'int:10'), (u'STATE_NAME', 'str:80')])}}) + self.assertEqual(rows,[{'geometry': {'type': 'Polygon', 'coordinates': [[(-101.407393, 40.001003), (-102.051535, 39.998918), (-102.047545, 40.342644), (-102.04762, 40.431077), (-102.046031, 40.697319), (-102.046992, 40.74313), (-102.047739, 40.998071), (-102.621257, 41.000214), (-102.652271, 40.998124), (-103.382956, 41.000316), (-103.572316, 40.999648), (-104.051705, 41.003211), (-104.054012, 41.388085), (-104.0555, 41.564222), (-104.053615, 41.698218), (-104.053513, 41.999815), (-104.056219, 42.614669), (-104.056199, 43.003062), (-103.501464, 42.998618), (-103.005875, 42.999354), (-102.788384, 42.995303), (-102.086701, 42.989887), (-101.231737, 42.986843), (-100.198142, 42.991095), (-99.53279, 42.992335), (-99.253971, 42.992389), (-98.497651, 42.991778), (-98.457444, 42.93716), (-98.391204, 42.920135), (-98.310339, 42.881794), (-98.167826, 42.839571), (-98.144869, 42.835794), (-98.123117, 42.820223), (-98.12182, 42.80836), (-98.03314, 42.769192), (-97.995144, 42.766812), (-97.963558, 42.77369), (-97.929477, 42.792324), (-97.889941, 42.831271), (-97.888659, 42.855807), (-97.818643, 42.866587), (-97.797028, 42.849597), (-97.772186, 42.846164), (-97.72525, 42.858008), (-97.685752, 42.836837), (-97.63497, 42.861285), (-97.570654, 42.84799), (-97.506132, 42.860136), (-97.483159, 42.857157), (-97.457263, 42.850443), (-97.389306, 42.867433), (-97.311414, 42.861771), (-97.271457, 42.850014), (-97.243189, 42.851826), (-97.224443, 42.841202), (-97.211831, 42.812573), (-97.161422, 42.798619), (-97.130469, 42.773923), (-97.015139, 42.759542), (-96.979593, 42.758313), (-96.970003, 42.752065), (-96.977869, 42.727308), (-96.970773, 42.721147), (-96.908234, 42.731699), (-96.81014, 42.704084), (-96.810437, 42.681341), (-96.799344, 42.670019), (-96.722658, 42.668592), (-96.69906, 42.657715), (-96.694596, 42.641163), (-96.715273, 42.621907), (-96.714059, 42.612302), (-96.636672, 42.550731), (-96.629294, 42.522693), (-96.605467, 42.507236), (-96.584753, 42.518287), (-96.547215, 42.520499), (-96.494701, 42.488459), (-96.439394, 42.48924), (-96.396074, 42.467401), (-96.39789, 42.441793), (-96.417628, 42.414777), (-96.411761, 42.380918), (-96.424175, 42.349279), (-96.389781, 42.328789), (-96.3687, 42.298023), (-96.342881, 42.282081), (-96.332658, 42.260307), (-96.337708, 42.229522), (-96.363512, 42.214042), (-96.352165, 42.168185), (-96.285123, 42.123452), (-96.265483, 42.048897), (-96.238725, 42.028438), (-96.236093, 42.001258), (-96.202842, 41.996615), (-96.185217, 41.980685), (-96.147328, 41.966254), (-96.14587, 41.924907), (-96.15997, 41.904151), (-96.135623, 41.86262), (-96.076417, 41.791469), (-96.099321, 41.752975), (-96.099771, 41.731563), (-96.085557, 41.704987), (-96.122202, 41.694913), (-96.120264, 41.684094), (-96.099306, 41.65468), (-96.111307, 41.599006), (-96.080835, 41.576), (-96.091936, 41.563145), (-96.08584, 41.537522), (-96.050172, 41.524335), (-96.004592, 41.536663), (-95.993965, 41.528103), (-95.996688, 41.511517), (-96.013451, 41.492994), (-96.006897, 41.481954), (-95.953185, 41.472387), (-95.935065, 41.462381), (-95.940056, 41.394805), (-95.942895, 41.340077), (-95.889107, 41.301389), (-95.897591, 41.286863), (-95.911202, 41.308469), (-95.93023, 41.302056), (-95.910981, 41.225245), (-95.92225, 41.207854), (-95.9161, 41.194063), (-95.859198, 41.180537), (-95.859801, 41.166865), (-95.876685, 41.164202), (-95.858274, 41.109187), (-95.878804, 41.065871), (-95.859539, 41.035002), (-95.860897, 41.00265), (-95.837603, 40.974258), (-95.836541, 40.901108), (-95.834396, 40.8703), (-95.846435, 40.848332), (-95.85179, 40.7926), (-95.876616, 40.730436), (-95.767999, 40.643117), (-95.757546, 40.620904), (-95.767479, 40.589048), (-95.763412, 40.549707), (-95.737036, 40.532373), (-95.692066, 40.524129), (-95.687413, 40.56117), (-95.675693, 40.565835), (-95.662944, 40.558729), (-95.65806, 40.530332), (-95.68497, 40.512205), (-95.695361, 40.485338), (-95.636817, 40.39639), (-95.634185, 40.3588), (-95.616201, 40.346497), (-95.617933, 40.331418), (-95.645553, 40.322346), (-95.646827, 40.309109), (-95.595532, 40.309776), (-95.547137, 40.266215), (-95.476822, 40.226855), (-95.466636, 40.213255), (-95.460952, 40.173995), (-95.422476, 40.131743), (-95.392813, 40.115416), (-95.384542, 40.095362), (-95.403784, 40.080379), (-95.413764, 40.048111), (-95.390532, 40.04375), (-95.371244, 40.028751), (-95.345067, 40.024974), (-95.308697, 39.999407), (-95.329701, 39.992595), (-95.7807, 39.993489), (-96.001253, 39.995159), (-96.240598, 39.994503), (-96.454038, 39.994172), (-96.80142, 39.994476), (-96.908287, 39.996154), (-97.361912, 39.99738), (-97.816589, 39.999729), (-97.929588, 39.998452), (-98.264165, 39.998434), (-98.504479, 39.997129), (-98.720632, 39.998461), (-99.064747, 39.998338), (-99.178201, 39.999577), (-99.627859, 40.002987), (-100.18091, 40.000478), (-100.191111, 40.000585), (-100.735049, 39.999172), (-100.754856, 40.000198), (-101.322148, 40.001821), (-101.407393, 40.001003)]]}, 'type': 'Feature', 'id': '0', 'properties': OrderedDict([(u'UGID', 1), (u'STATE_NAME', u'Nebraska')])}, {'geometry': {'type': 'Polygon', 'coordinates': [[(-95.071931, 37.001478), (-95.406622, 37.000615), (-95.526019, 37.001018), (-95.785748, 36.998114), (-95.957961, 37.000083), (-96.006049, 36.998333), (-96.519187, 37.000577), (-96.748696, 37.000166), (-97.137693, 36.999808), (-97.465405, 36.996467), (-97.80425, 36.998567), (-98.104529, 36.998671), (-98.347143, 36.999061), (-98.540219, 36.998376), (-98.999516, 36.998072), (-99.437473, 36.994558), (-99.544639, 36.995463), (-99.999261, 36.995417), (-100.088574, 36.997652), (-100.634245, 36.997832), (-100.950587, 36.996661), (-101.071604, 36.997466), (-101.553676, 36.996693), (-102.024519, 36.988875), (-102.037207, 36.988994), (-102.04201, 37.386279), (-102.044456, 37.641474), (-102.043976, 37.734398), (-102.046061, 38.253822), (-102.045549, 38.263343), (-102.047584, 38.615499), (-102.047568, 38.69255), (-102.048972, 39.037003), (-102.047874, 39.126753), (-102.048801, 39.562803), (-102.049442, 39.568693), (-102.051535, 39.998918), (-101.407393, 40.001003), (-101.322148, 40.001821), (-100.754856, 40.000198), (-100.735049, 39.999172), (-100.191111, 40.000585), (-100.18091, 40.000478), (-99.627859, 40.002987), (-99.178201, 39.999577), (-99.064747, 39.998338), (-98.720632, 39.998461), (-98.504479, 39.997129), (-98.264165, 39.998434), (-97.929588, 39.998452), (-97.816589, 39.999729), (-97.361912, 39.99738), (-96.908287, 39.996154), (-96.80142, 39.994476), (-96.454038, 39.994172), (-96.240598, 39.994503), (-96.001253, 39.995159), (-95.7807, 39.993489), (-95.329701, 39.992595), (-95.308697, 39.999407), (-95.240961, 39.942105), (-95.207597, 39.938176), (-95.193963, 39.91018), (-95.150551, 39.908054), (-95.100722, 39.869865), (-95.063246, 39.866538), (-95.033506, 39.877844), (-95.021772, 39.896978), (-94.965023, 39.900823), (-94.938243, 39.896081), (-94.936511, 39.849386), (-94.923876, 39.833131), (-94.898324, 39.828332), (-94.888505, 39.8174), (-94.899323, 39.793775), (-94.933267, 39.782773), (-94.935114, 39.775426), (-94.9218, 39.757841), (-94.877067, 39.760679), (-94.871185, 39.754118), (-94.87786, 39.739305), (-94.905678, 39.726755), (-94.930856, 39.727026), (-94.953142, 39.736501), (-94.961786, 39.732038), (-94.97857, 39.684988), (-95.028292, 39.661913), (-95.056017, 39.625689), (-95.053613, 39.586776), (-95.108988, 39.560692), (-95.102037, 39.532848), (-95.047599, 39.485328), (-95.040511, 39.46294), (-94.986204, 39.439461), (-94.958494, 39.411447), (-94.925748, 39.381266), (-94.898281, 39.38064), (-94.911343, 39.340121), (-94.907681, 39.323028), (-94.881107, 39.286046), (-94.833476, 39.261766), (-94.820819, 39.211004), (-94.790049, 39.196883), (-94.730531, 39.171256), (-94.675514, 39.174922), (-94.646407, 39.158427), (-94.612653, 39.151649), (-94.601224, 39.141227), (-94.608137, 39.112801), (-94.609281, 39.044667), (-94.612469, 38.837109), (-94.613148, 38.737222), (-94.618717, 38.471473), (-94.619053, 38.392032), (-94.61733, 38.055784), (-94.616735, 38.030387), (-94.619293, 37.679869), (-94.618996, 37.650374), (-94.618764, 37.360766), (-94.618977, 37.327732), (-94.620664, 37.060147), (-94.620379, 36.997046), (-95.032745, 37.000779), (-95.071931, 37.001478)]]}, 'type': 'Feature', 'id': '1', 'properties': OrderedDict([(u'UGID', 2), (u'STATE_NAME', u'Kansas')])}]) ## test aggregation ops = OcgOperations(dataset=self.get_dataset(), @@ -1197,7 +1246,10 @@ def test_csv_plus_conversion(self): ugid_path = os.path.join(shp_path,'with_ugid_ugid.shp') with fiona.open(ugid_path) as f: to_test = list(f) - self.assertEqual(f.meta,{'crs': {u'no_defs': True, u'ellps': u'WGS84', u'proj': u'longlat'}, 'driver': u'ESRI Shapefile', 'schema': {'geometry': 'Polygon', 'properties': OrderedDict([(u'UGID', 'int:10')])}}) + fiona_meta = deepcopy(f.meta) + fiona_crs = fiona_meta.pop('crs') + self.assertEqual(CoordinateReferenceSystem(value=fiona_crs), WGS84()) + self.assertEqual(fiona_meta,{'driver': u'ESRI Shapefile', 'schema': {'geometry': 'Polygon', 'properties': OrderedDict([(u'UGID', 'int:10')])}}) self.assertEqual(to_test,[{'geometry': {'type': 'Polygon', 'coordinates': [[(-104.0, 38.0), (-104.0, 39.0), (-103.0, 39.0), (-103.0, 38.0), (-104.0, 38.0)]]}, 'type': 'Feature', 'id': '0', 'properties': OrderedDict([(u'UGID', 1)])}]) @@ -1555,42 +1607,44 @@ def test_nc_projection_to_shp(self): self.assertEqual(f.meta['crs']['proj'],'lcc') def test_with_geometry(self): - self.get_ret(kwds={'output_format':'shp','prefix':'as_polygon'}) - + self.get_ret(kwds={'output_format': 'shp', 'prefix': 'as_polygon'}) + features = [ - {'NAME':'a','wkt':'POLYGON((-425985.928175 -542933.565515,-425982.789465 -542933.633257,-425982.872261 -542933.881644,-425985.837852 -542933.934332,-425985.837852 -542933.934332,-425985.928175 -542933.565515))'}, - {'NAME':'b','wkt':'POLYGON((-425982.548605 -542936.839709,-425982.315272 -542936.854762,-425982.322799 -542936.937558,-425982.526024 -542936.937558,-425982.548605 -542936.839709))'}, - ] - + {'NAME': 'a', + 'wkt': 'POLYGON((-425985.928175 -542933.565515,-425982.789465 -542933.633257,-425982.872261 -542933.881644,-425985.837852 -542933.934332,-425985.837852 -542933.934332,-425985.928175 -542933.565515))'}, + {'NAME': 'b', + 'wkt': 'POLYGON((-425982.548605 -542936.839709,-425982.315272 -542936.854762,-425982.322799 -542936.937558,-425982.526024 -542936.937558,-425982.548605 -542936.839709))'}, + ] + from_crs = RequestDataset(**self.get_dataset()).get().spatial.crs to_sr = CoordinateReferenceSystem(epsg=4326).sr for feature in features: geom = wkt.loads(feature['wkt']) - geom = project_shapely_geometry(geom,from_crs.sr,to_sr) + geom = project_shapely_geometry(geom, from_crs.sr, to_sr) feature['wkt'] = geom.wkt - - path = os.path.join(self.current_dir_output,'ab_{0}.shp'.format('polygon')) - with FionaMaker(path,geometry='Polygon') as fm: + + path = os.path.join(self.current_dir_output, 'ab_{0}.shp'.format('polygon')) + with FionaMaker(path, geometry='Polygon') as fm: fm.write(features) ocgis.env.DIR_SHPCABINET = self.current_dir_output - - ops = OcgOperations(dataset=self.get_dataset(),output_format='shp', + + ops = OcgOperations(dataset=self.get_dataset(), output_format='shp', geom='ab_polygon') ret = ops.execute() - ugid_shp = os.path.join(os.path.split(ret)[0],ops.prefix+'_ugid.shp') - + ugid_shp = os.path.join(os.path.split(ret)[0], ops.prefix + '_ugid.shp') + with fiona.open(ugid_shp) as f: - self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']),from_crs) - - ops = OcgOperations(dataset=self.get_dataset(),output_format='shp', - geom='ab_polygon',output_crs=CFWGS84(),prefix='xx') + self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']), from_crs) + + ops = OcgOperations(dataset=self.get_dataset(), output_format='shp', + geom='ab_polygon', output_crs=CFWGS84(), prefix='xx') ret = ops.execute() - ugid_shp = os.path.join(os.path.split(ret)[0],ops.prefix+'_ugid.shp') - + ugid_shp = os.path.join(os.path.split(ret)[0], ops.prefix + '_ugid.shp') + with fiona.open(ugid_shp) as f: - self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']),WGS84()) + self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']), WGS84()) with fiona.open(ret) as f: - self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']),WGS84()) + self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']), WGS84()) if __name__ == "__main__": diff --git a/src/ocgis/util/helpers.py b/src/ocgis/util/helpers.py index d64537310..1bb1eb40c 100644 --- a/src/ocgis/util/helpers.py +++ b/src/ocgis/util/helpers.py @@ -1,22 +1,23 @@ from collections import OrderedDict -import numpy as np import itertools -from shapely.geometry.polygon import Polygon import os import tempfile -from osgeo import ogr -from shapely import wkt import sys import datetime -from copy import deepcopy, copy -from ocgis.util.logging_ocgis import ocgis_lh +from copy import deepcopy +from tempfile import mkdtemp + +import numpy as np +from shapely.geometry.polygon import Polygon +from osgeo import ogr from osgeo.ogr import CreateGeometryFromWkb from shapely.wkb import loads as wkb_loads import fiona from shapely.geometry.geo import mapping -from tempfile import mkdtemp from fiona.crs import from_epsg +from ocgis.util.logging_ocgis import ocgis_lh + def get_sorted_uris_by_time_dimension(uris, variable=None): """ @@ -229,45 +230,6 @@ def get_is_date_between(lower,upper,month=None,year=None): return(ret) -class FionaMaker(object): - - def __init__(self,path,epsg=4326,driver='ESRI Shapefile',geometry='Polygon'): - assert(not os.path.exists(path)) - self.path = path - self.crs = fiona.crs.from_epsg(epsg) - self.properties = {'UGID':'int','NAME':'str'} - self.geometry = geometry - self.driver = driver - self.schema = {'geometry':self.geometry, - 'properties':self.properties} - - def __enter__(self): - self._ugid = 1 - self._collection = fiona.open(self.path,'w',driver=self.driver,schema=self.schema,crs=self.crs) - return(self) - - def __exit__(self,*args,**kwds): - self._collection.close() - - def make_record(self,dct): - properties = dct.copy() - geom = wkt.loads(properties.pop('wkt')) - properties.update({'UGID':self._ugid}) - self._ugid += 1 - record = {'geometry':mapping(geom), - 'properties':properties} - return(record) - - def write(self,sequence_or_dct): - if isinstance(sequence_or_dct,dict): - itr = [sequence_or_dct] - else: - itr = sequence_or_dct - for element in itr: - record = self.make_record(element) - self._collection.write(record) - - def project_shapely_geometry(geom,from_sr,to_sr): if from_sr.IsSame(to_sr) == 1: ret = geom diff --git a/src/ocgis/util/shp_cabinet.py b/src/ocgis/util/shp_cabinet.py index dde5f1021..798a0d9d5 100644 --- a/src/ocgis/util/shp_cabinet.py +++ b/src/ocgis/util/shp_cabinet.py @@ -189,7 +189,6 @@ def iter_geoms(self, key=None, select_ugid=None, path=None, load_geoms=True, as_ ## get the source CRS meta = self.get_meta(path=shp_path) - crs = CoordinateReferenceSystem(value=meta['crs']) ## open the target shapefile ds = ogr.Open(shp_path) diff --git a/src/ocgis/util/spatial/fiona_maker.py b/src/ocgis/util/spatial/fiona_maker.py new file mode 100644 index 000000000..93238e904 --- /dev/null +++ b/src/ocgis/util/spatial/fiona_maker.py @@ -0,0 +1,51 @@ +import os +import fiona +from shapely import wkt +from shapely.geometry import mapping +from ocgis import CoordinateReferenceSystem + + +class FionaMaker(object): + + def __init__(self, path, epsg=4326, driver='ESRI Shapefile', geometry='Polygon'): + assert (not os.path.exists(path)) + self.path = path + self.crs = CoordinateReferenceSystem(epsg=epsg).value + self.properties = {'UGID': 'int', 'NAME': 'str'} + self.geometry = geometry + self.driver = driver + self.schema = {'geometry': self.geometry, + 'properties': self.properties} + + def __enter__(self): + self._ugid = 1 + self._collection = fiona.open(self.path, 'w', driver=self.driver, schema=self.schema, crs=self.crs) + return self + + def __exit__(self, *args, **kwargs): + self._collection.close() + + def make_record(self, dct): + properties = dct.copy() + + if 'wkt' in properties: + geom = wkt.loads(properties.pop('wkt')) + elif 'geom' in properties: + geom = properties.pop('geom') + else: + raise NotImplementedError + + properties.update({'UGID': self._ugid}) + self._ugid += 1 + record = {'geometry': mapping(geom), + 'properties': properties} + return record + + def write(self, sequence_or_dct): + if isinstance(sequence_or_dct, dict): + itr = [sequence_or_dct] + else: + itr = sequence_or_dct + for element in itr: + record = self.make_record(element) + self._collection.write(record) From d05c7997371da8797122cd7d96125c2ff85443d8 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Thu, 16 Oct 2014 16:57:08 -0600 Subject: [PATCH 06/71] add test command to setup.py #149 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit A command to run a standalone set of tests is now available through the command “python setup.py test”. Test for optional dependencies were split into a different module. Install command also copies requirements for standalone tests. If there are import errors for the required dependencies, those errors are no longer masked with a custom message. Docs updated. --- .gitignore | 1 - doc/install.rst | 11 + setup.py | 192 ++++++++----- src/ocgis/test/base.py | 21 +- .../shp/state_boundaries/state_boundaries.cfg | 4 + .../shp/state_boundaries/state_boundaries.dbf | Bin 0 -> 2794 bytes .../shp/state_boundaries/state_boundaries.prj | 1 + .../shp/state_boundaries/state_boundaries.shp | Bin 0 -> 222392 bytes .../shp/state_boundaries/state_boundaries.shx | Bin 0 -> 508 bytes ...t_csv_calc_conversion_two_calculations.csv | 257 ++++++++++++++++++ .../test_ocgis/test_util/test_shp_process.py | 20 +- src/ocgis/test/test_simple/run_simple.py | 37 +++ src/ocgis/test/test_simple/test_360.py | 170 ++++++------ src/ocgis/test/test_simple/test_cfunits.py | 22 -- .../test/test_simple/test_dependencies.py | 9 + .../test_simple/test_optional_dependencies.py | 34 +++ 16 files changed, 587 insertions(+), 192 deletions(-) create mode 100644 src/ocgis/test/bin/shp/state_boundaries/state_boundaries.cfg create mode 100644 src/ocgis/test/bin/shp/state_boundaries/state_boundaries.dbf create mode 100644 src/ocgis/test/bin/shp/state_boundaries/state_boundaries.prj create mode 100644 src/ocgis/test/bin/shp/state_boundaries/state_boundaries.shp create mode 100644 src/ocgis/test/bin/shp/state_boundaries/state_boundaries.shx create mode 100644 src/ocgis/test/bin/test_csv_calc_conversion_two_calculations.csv create mode 100644 src/ocgis/test/test_simple/run_simple.py delete mode 100644 src/ocgis/test/test_simple/test_cfunits.py create mode 100644 src/ocgis/test/test_simple/test_dependencies.py create mode 100644 src/ocgis/test/test_simple/test_optional_dependencies.py diff --git a/.gitignore b/.gitignore index 0c9f85089..a40a76453 100644 --- a/.gitignore +++ b/.gitignore @@ -8,7 +8,6 @@ # Misc # ######## -*.csv *.orig *~ *.log diff --git a/doc/install.rst b/doc/install.rst index 108179f54..47ae78193 100644 --- a/doc/install.rst +++ b/doc/install.rst @@ -99,6 +99,17 @@ or python -c 'import ocgis' +Testing the Installation +~~~~~~~~~~~~~~~~~~~~~~~~ + +.. note:: There are tests for the optional dependencies. These will fail if the optional dependencies are not installed! + +It is recommended that a simple suite of tests are run to verify the new installation: + +>>> python setup.py test + +Please report any errors to the support email address. + Configuring the :class:`~ocgis.ShpCabinet` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/setup.py b/setup.py index 3f79be007..b2a67792c 100644 --- a/setup.py +++ b/setup.py @@ -8,142 +8,196 @@ VERSION = '1.0.0-next' +######################################################################################################################## +# commands +######################################################################################################################## + + +class SimpleTestCommand(Command): + description = 'run a simple test suite to validate installation' + user_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + cwd = os.path.split(os.path.realpath(__file__))[0] + path = os.path.join(cwd, 'src', 'ocgis', 'test', 'test_simple', 'run_simple.py') + cmd = ['python', path] + check_call(cmd) + + class UninstallCommand(Command): description = "information on how to uninstall OCGIS" user_options = [] - def initialize_options(self): pass - - def finalize_options(self): pass - + def initialize_options(self): + pass + + def finalize_options(self): + pass + def run(self): try: import ocgis - print('To uninstall, manually remove the Python package folder located here: {0}'.format(os.path.split(ocgis.__file__)[0])) + + print('To uninstall, manually remove the Python package folder located here: {0}'.format( + os.path.split(ocgis.__file__)[0])) except ImportError: - raise(ImportError("Either OpenClimateGIS is not installed or not available on the Python path.")) + raise (ImportError("Either OpenClimateGIS is not installed or not available on the Python path.")) class InstallDependenciesUbuntu(Command): description = "install on Ubuntu systems" user_options = [] - + def run(self): cwd = os.getcwd() out = 'install_out.log' err = 'install_err.log' odir = tempfile.mkdtemp() - stdout = open(out,'w') - stderr = open(err,'w') - + stdout = open(out, 'w') + stderr = open(err, 'w') + def call(args): - check_call(args,stdout=stdout,stderr=stderr) - - def install_dependency(odir,url,tarball,edir,config_flags=None,custom_make=None): + check_call(args, stdout=stdout, stderr=stderr) + + def install_dependency(odir, url, tarball, edir, config_flags=None, custom_make=None): path = tempfile.mkdtemp(dir=odir) os.chdir(path) print('downloading {0}...'.format(edir)) - call(['wget',url]) + call(['wget', url]) print('extracting {0}...'.format(edir)) - call(['tar','-xzvf',tarball]) + call(['tar', '-xzvf', tarball]) os.chdir(edir) if custom_make is None: print('configuring {0}...'.format(edir)) - call(['./configure']+config_flags) + call(['./configure'] + config_flags) print('building {0}...'.format(edir)) call(['make']) print('installing {0}...'.format(edir)) - call(['make','install']) + call(['make', 'install']) else: print('installing {0}...'.format(edir)) custom_make() - + print('installing apt packages...') - call(['apt-get','update']) - call(['apt-get','-y','install','g++','libz-dev','curl','wget','python-dev','python-setuptools','python-gdal']) + call(['apt-get', 'update']) + call(['apt-get', '-y', 'install', 'g++', 'libz-dev', 'curl', 'wget', 'python-dev', 'python-setuptools', + 'python-gdal']) print('installing shapely...') - call(['easy_install','shapely']) - call(['easy_install','fiona']) - + call(['easy_install', 'shapely']) + call(['easy_install', 'fiona']) + prefix = '/usr/local' - + hdf5 = 'hdf5-1.8.10-patch1' hdf5_tarball = '{0}.tar.gz'.format(hdf5) hdf5_url = 'http://www.hdfgroup.org/ftp/HDF5/current/src/{0}'.format(hdf5_tarball) - hdf5_flags = ['--prefix={0}'.format(prefix),'--enable-shared','--enable-hl'] - install_dependency(odir,hdf5_url,hdf5_tarball,hdf5,hdf5_flags) - + hdf5_flags = ['--prefix={0}'.format(prefix), '--enable-shared', '--enable-hl'] + install_dependency(odir, hdf5_url, hdf5_tarball, hdf5, hdf5_flags) + nc4 = 'netcdf-4.2.1' nc4_tarball = '{0}.tar.gz'.format(nc4) nc4_url = 'ftp://ftp.unidata.ucar.edu/pub/netcdf/{0}'.format(nc4_tarball) - nc4_flags = ['--prefix={0}'.format(prefix),'--enable-shared','--enable-dap','--enable-netcdf-4'] - os.putenv('LDFLAGS','-L{0}/lib'.format(prefix)) - os.putenv('CPPFLAGS','-I{0}/include'.format(prefix)) - install_dependency(odir,nc4_url,nc4_tarball,nc4,nc4_flags) + nc4_flags = ['--prefix={0}'.format(prefix), '--enable-shared', '--enable-dap', '--enable-netcdf-4'] + os.putenv('LDFLAGS', '-L{0}/lib'.format(prefix)) + os.putenv('CPPFLAGS', '-I{0}/include'.format(prefix)) + install_dependency(odir, nc4_url, nc4_tarball, nc4, nc4_flags) os.unsetenv('LDFLAGS') os.unsetenv('CPPFLAGS') - + nc4p = 'netCDF4-1.0.4' nc4p_tarball = '{0}.tar.gz'.format(nc4p) nc4p_url = 'http://netcdf4-python.googlecode.com/files/{0}'.format(nc4p_tarball) call(['ldconfig']) + def nc4p_make(): - call(['python','setup.py','install']) - install_dependency(odir,nc4p_url,nc4p_tarball,nc4p,custom_make=nc4p_make) - - + call(['python', 'setup.py', 'install']) + + install_dependency(odir, nc4p_url, nc4p_tarball, nc4p, custom_make=nc4p_make) + stdout.close() stderr.close() - #shutil.rmtree(odir) + # shutil.rmtree(odir) os.chdir(cwd) print('dependencies installed.') -## check python version -python_version = float(sys.version_info[0]) + float(sys.version_info[1])/10 +######################################################################################################################## +# check python version +######################################################################################################################## + +python_version = float(sys.version_info[0]) + float(sys.version_info[1]) / 10 if python_version != 2.7: - raise(ImportError( + raise (ImportError( 'This software requires Python version 2.7.x. You have {0}.x'.format(python_version))) -## attempt package imports -pkgs = ['numpy','netCDF4','osgeo','shapely','fiona'] +######################################################################################################################## +# attempt package imports +######################################################################################################################## + +pkgs = ['numpy', 'netCDF4', 'osgeo', 'shapely', 'fiona'] for pkg in pkgs: - try: - __import__(pkg) - except ImportError: - msg = 'Unable to import required Python package: "{0}".'.format(pkg) - raise(ImportError(msg)) - -## get package structure -def _get_dot_(path,root='src'): + __import__(pkg) + + +######################################################################################################################## +# get package structure +######################################################################################################################## + + +def _get_dot_(path, root='src'): ret = [] path_parse = path while True: - path_parse,tail = os.path.split(path_parse) + path_parse, tail = os.path.split(path_parse) if tail == root: break else: ret.append(tail) ret.reverse() - return('.'.join(ret)) -package_dir = {'':'src'} -src_path = os.path.join(package_dir.keys()[0],package_dir.values()[0],'ocgis') + return '.'.join(ret) + + +package_dir = {'': 'src'} +src_path = os.path.join(package_dir.keys()[0], package_dir.values()[0], 'ocgis') packages = [] -for dirpath,dirnames,filenames in os.walk(src_path): +for dirpath, dirnames, filenames in os.walk(src_path): if '__init__.py' in filenames: package = _get_dot_(dirpath) packages.append(package) -## run the installation -setup(name='ocgis', - version=VERSION, - author='NESII/CIRES/NOAA-ESRL', - author_email='ocgis_support@list.woc.noaa.gov', - url='http://ncpp.github.io/ocgis/install.html#installing-openclimategis', - license='NCSA License', - platforms=['all'], - packages=packages, - package_dir=package_dir, - cmdclass={'uninstall':UninstallCommand, - 'install_dependencies_ubuntu':InstallDependenciesUbuntu}, requires=['numpy', 'netCDF4', 'fiona', - 'shapely'] - ) +######################################################################################################################## +# set up data files for installation +######################################################################################################################## + +shp_parts = ['state_boundaries.cfg', 'state_boundaries.dbf', 'state_boundaries.prj', 'state_boundaries.shp', + 'state_boundaries.shx'] +shp_parts = ['bin/shp/state_boundaries/{0}'.format(element) for element in shp_parts] +bin_files = ['bin/test_csv_calc_conversion_two_calculations.csv'] +bin_files += shp_parts +package_data = {'ocgis.test': bin_files} + +######################################################################################################################## +# setup command +######################################################################################################################## + +setup( + name='ocgis', + version=VERSION, + author='NESII/CIRES/NOAA-ESRL', + author_email='ocgis_support@list.woc.noaa.gov', + url='http://ncpp.github.io/ocgis/install.html#installing-openclimategis', + license='NCSA License', + platforms=['all'], + packages=packages, + package_dir=package_dir, + package_data=package_data, + cmdclass={'uninstall': UninstallCommand, + 'install_dependencies_ubuntu': InstallDependenciesUbuntu, + 'test': SimpleTestCommand}, + requires=['numpy', 'netCDF4', 'fiona', 'shapely'], +) diff --git a/src/ocgis/test/base.py b/src/ocgis/test/base.py index 03c56229e..ff034f8df 100644 --- a/src/ocgis/test/base.py +++ b/src/ocgis/test/base.py @@ -63,7 +63,11 @@ def assertDictEqual(self, d1, d2, msg=None): unittest.TestCase.assertDictEqual(self, d1, d2, msg=msg) except AssertionError: for k, v in d1.iteritems(): - msg = 'Issue with key "{0}". Values are {1}.'.format(k, (v, d2[k])) + try: + msg = 'Issue with key "{0}". Values are {1}.'.format(k, (v, d2[k])) + except KeyError: + msg = 'The key "{0}" was not found in the second dictionary.'.format(k) + raise KeyError(msg) self.assertEqual(v, d2[k], msg=msg) self.assertEqual(set(d1.keys()), set(d2.keys())) @@ -139,8 +143,21 @@ def assertNcEqual(self, uri_src, uri_dest, check_types=True, close=False, metada raise if check_types: self.assertEqual(var[:].dtype, dvar[:].dtype) + + # check values of attributes on all variables for k, v in var.__dict__.iteritems(): - to_test_attr = getattr(dvar, k) + try: + to_test_attr = getattr(dvar, k) + except AttributeError: + # if the variable and attribute are flagged to ignore, continue to the next attribute + if dvar._name in ignore_attributes: + if k in ignore_attributes[dvar._name]: + continue + + # notify if an attribute is missing + msg = 'The attribute "{0}" is not found on the variable "{1}" for URI "{2}".'\ + .format(k, dvar._name, uri_dest) + raise AttributeError(msg) try: self.assertNumpyAll(v, to_test_attr) except AttributeError: diff --git a/src/ocgis/test/bin/shp/state_boundaries/state_boundaries.cfg b/src/ocgis/test/bin/shp/state_boundaries/state_boundaries.cfg new file mode 100644 index 000000000..4acbab642 --- /dev/null +++ b/src/ocgis/test/bin/shp/state_boundaries/state_boundaries.cfg @@ -0,0 +1,4 @@ +[mapping] +ugid=id +attributes=state_name,state_fips,state_abbr + diff --git a/src/ocgis/test/bin/shp/state_boundaries/state_boundaries.dbf b/src/ocgis/test/bin/shp/state_boundaries/state_boundaries.dbf new file mode 100644 index 0000000000000000000000000000000000000000..641bb0337eb1580295124fb4c50042bc6cb47d79 GIT binary patch literal 2794 zcmZuz+iv4F5H;F|1zPk2+9!U3c+=%kE9athCGWeZYrg;JEmPWV52{xd#{#nl=A_ozSq{P|&g|G!ZC!I-x;!ng8GY7G;ub(^*(BAhydwX{23 zyi}%u$&7pZ%`+^$SSuhIV-j4Z5ynCng|dCR>IEW$RfJ(@FfY~1K>5Z$ ze}Gk#f|QtM>J8n>!)skOWk=HN79b2q&LA~!T?sF~6lx4}0wiNDq_uLhJ6cs$_OiVB z77oOxPA-0?o=a)>!$=v_7!D}tf`cZPCsXd-04F;G^>N5Z>&M}}Dhm6a|B!|5M!I(n z7I{wkQDSOcHkIBSdvYcqCGi+*qY&?5XSNEZHkZY=ooGD zWUjxPvQIHW36M;D>O}G9J3V^C&>YH=1Q_lM3DP{*h0&ezI-M`QYmZ4n3^hi=skT+A zHx)A9QDXAr;rXKqrU^hI3>TS1Q+-D6-4BvVWr z2o|z@9~4UyMnH$5!~^vysn2gpxgD~W%-dXQ@gaRmXR4@>(7H9V*p{^vhFa-TGeT;&HhWcl?*eGlHsNL zIm$6wer*p{KgiHv8EM*uP(SsiuYV>0LOsxh(KwgSawK~c!+l|lI0s{mF3sQTYfl;h z>NAchgyc8M)P?U-_2Ik$^_kC{T>SI9KSJ`tAvy3INUi>Uyf9;U_*h7w^|A2I9JL+m zmv_#=BJ4n->-vW2v37Y4jB$@Z>*Jo#o1K1wVADxwp?<`@6quuf6x$Ykl@Qmx)Q5i|POMS@neWjhTsw z6=aR5J936-i}kth%FU0+BZ@vHn4?k$?;ohBS$a!B|F6ea{I^w1Ow66jeLZh?S--GbgfG-uk{fw?S+G=b3vR9*) zca<`Fle|_Xc9k`Led(#;+=3Ds{gSw~ALPb<4=$V=RzmZAea~H4t#Rgh{iaPHl~90Q z>^E;VYdq6Gp%>qzg#Oj3df(8C`z=a{dc0)&5zvupZD*r4poBEDqPGk&!}IhR@{1WI zwDSGOc7#@y4l}TMs@%Q$NhRdA)3%ytoPiCmUl~0B@0)JdsXaT%z^fH6));v~N?FcEq8Mx0|b=Hbg8Lgv;ylb9e;8>MU zSVl@2SuD8ZFZZW*d2S?1Ss4vcU-1Qw{%1o0+~V;E^qFSseCRB%jCOtf+vD<`f%$|> z3#LSskzeq-cU~aZtp0QH%o=61QvVLOjiVt?8Tirc?rSGCl~Ab%v8e?xYv?$?I8h1Z z?OM1YP|U!YOY+3MWF<6m>e-DmWemK1oVsRKO$iOJKOFF^f`NOUu^9#EE1`-^RY`Is z1G^tJ;55-zLKpQz(sn#&U|qS~1=SNuNJ+B~%hfWl1yAB!j)@Yw)oxIB{RIPi>rb~` zJ+Fk+**<$V0zNIGyfk$~3Egns-5lP)z*@UCPLppdA+&Bt`7!htmZI-)@Rky~G&Qo% ztQzLyN@mTMY$deOeph>B6$3y2Ys|B*Knb0F$2pCjF)+W>1LmYuC3N`R#0U9u1};5! zbhzva=>Oxxs^L-w=G?|xdB|A_ZKC#CnwKze^MgP0HlEP_?$q5=ywz-hk;wLOQFP|mg9QPXq=7IPZ6kJ_h%IC@OOR8knYm+6a4z+NS!ARYRn zCYga5`z?6c<&}`=-;ke=?=mp*xpgi_NeS^Cdo-{Yu<)G>g+sfP&=nP#^GA~y_>~pw zaF47KvfJBvUJUSGi?K{k86~uGypFtA2G#B{@X_*`=T&2hNK$t0+BRsv%lymnCDtV{u<@%Z?$<9Bk>Q%cP0RKfO7AZ_Q=y1lW%`~?fIb@X zcY@8!6w!&(T+&ZL{}D|=-Zp5j{Q01cJK(93;2eQ2MbzjWZ}uGCfBQYxjKe2Iv{Ij* z^PiZAfegI&dSG3_Jw?JfTTosfg3DTOY`d$ z5q0f(5xZMpKOKer!>x+w?OQf41y~>I=}lGlzAK_+`RBKXL4JF++~>Wbq@uvW0;SpUtO)Q zFmN=p%Epfm6p>BvfrkA74D39A`^EBjSK5oMKi-Yv$@rfS|7+B`bJ&}KC0#x2V^%Am zJ;g6xUcAV_E!*9hZUYvIinV3)Vc?J1F}n8R3aF$h*=`BsifeWo@tP|jqR=n$DPLIc z9h^Mf7ZuP2=qGhYdwW-k0%~X95w@&fT2HsbrxyyS)}hwf5airi0khKW3W#6m z-M*uj7`T6XNBG#d0!nsL==8Y2z}mr{@lLB0k^1GHn?-IipGF6XBLqd1qSZyY=f%K0p9uR~6ctgT%HfOit_=Luai6xfgd%El z4*lj}%fN%=>n!vo1!VkyG_{cd_G^C=8QiaclBRy;Yg;j}(Y5zZzNHFiJcY~SfH?!- zak6enc&32ntADyK<6t2s_oWO4^zRucS!3LNFjxU){;6RWFk#?`HAcpdJrz*2E>BgC z83QXT4~E$|Dj<)qF~V0Z7`W~&S>vTX*e_}NP`@?o?^kMfj5QPx>v8ppYI_F0^`m@2 z?63kl-%)=%--Uq>9iT{EOSY$p}IOCO$abmp{Lj{{er0obGhDES`?TfA)Uq z*i1$xq)(2?NpyVS@2k~^wvf@SBjt+xfP*Y0UEFxcXr`o!Uo4i6)3#|6^4ZCVd$9+P zrO@#)jZIuqyksOl>_*K`rDLLEq}h*6WMp?pGd2d~SxQ^9E{T!R{D-8PPw>7L`#`%$ zQ8J3&WNoV)N5^LyS!4OPlab%!zUZxKbR4VY%*LifM#7(uXXa(mvEhJI^;<WHh2V(ryU)bn687C6dVKi$KyNpF}#o856g*R1<8JY$u?SOvmdJZ)8_c$cV2d zMZ5s|%f3Iy58~} z``^7TwD#D4bG~z1)^zVtGdw2$VlB~;jEY2kUS9%vAiufucUCe|zo9y519Ff_L0^EBjJ4)al3BCChcpw=tSMS;Pga{HEPkuC#1K3Y9 z%p_NXgf7Y)edYx?0!OTGP$r?b9rN)$fOWVwwpQ#TA!}KmI1vpqtjYXyd+II{s{aw9 z^Ahy4@hv4!$db^BNxg5I5XcqUQ;MWWs4@1w_|YR~c<9(&OnhPyd-zERAC~tD0eO5=#3grb61wzA&wG!W8MfSH5Xa3$LUZML zh9^NTSsCwonuGMeJtCX5;hW_W5g7*RM|IG_em~-(<^elsG#1{nHN)S!Q4Q9U{!-V64$d&(!hfsT7(|1y=nC!*4GR@N4q>DY_eMf+e65iMwF z?Vc2+85H25q(?l7WzjO_V2WaKpg)Kz1{Y~tW^hwz7jZP&Wnn8bknY$-6>3F>OZK-c7 z5#@BeV`A0<{rCkJ#XE_J;;}lp2=K(2jUN?2zgx+>Z1N~{TxsU3G+@nuGU2pV}=FY?O<1jCgeycEk9;-|ON=zI2>?ymG50*z-QS%U&iw zI`(gI|H$_W@T`L{x<<#FF6E@hG!fCDcuH$_1o+dqS0OU>L`2;h(PkSB`u=l9xyR_r zx80`WQ=K_l-VH=lpSr0KY%%xVRV2gELbStG9uDy%yT+A3vmHrf{LV z*p9q8{Q7U$O6d3!`F`aEW)j*jl9wD)O2>Pj z{5>ZP{^v#G;g{4(&|g27BDs!)Ow~@!9ez&7+&^hMCpMDMJGpl2+b`(2=*sD|DFG77 z@fv6*Hqo(QoKVeWK@z$;`7kQ31g@#~=D zb3~r%z-7#BB_;lmjxB`MW)-%P(4DF<{oElsb_%nZ6cHz(jlKh;XU6GRa(x{Rl7#+# zN?*vEqhk{u)!h26Bvc_S@-%9Rj+>6~ZYY)}p&O1{gm_rZu?3e!`|+({y~hm?N-S zfrN}bN+^m8bX@4GCKsbhLg`9VigLf{*ed2Sx8qR~T8z3SbMh}8=LzMit~y0RM=wa6 zIQa+clYggM%8-Ou&zo7dLw`2=qx*iCl90JhL8J~mf28nUk*PTe#fR~UJYQ{&XM6>v z?tz?g(ieAt{<6gb2|8vZ)Kg+W2x2qG3g7q)*4mIzfYMaRXAX1hJa@4~-kpRb$%FN^ zAh%eRuXx0Rgyto5ISK(Adc0fw;!Hwhv)7B=FkWAY2qjm05_<5v`3n;>;B93AH(f!V zdDh;^J@k`#!$L294~X zb+@3)u6*Wr>q|ASN$7xtn)Cct zb9}A(!gr4$64IQzH{Ky-j?c;jMZB3Hp?_n_JUi}p@*ByBa6?r}Vz)V#6HHn61meRE zWwOE`;0=$On)T!$eiN;d;gvMU@5+=l%XgE}@C0>(3yhcUO0zZRBO^l{rOmc)A-?>q zaYT6?89id}>iZ1qz4gbhd`{@U(y-9>PZ=Gv4R-Oa<0d0X!oCc-a_|qEV;Vv^$S9KY zMs4R)IB(qF+~I;^K*mRWs}0sC=dS;(sqE;1TSTDQsw;zwmyJ^Nu5gkX_+kXA}9rkmF_|6E}-@Y3Xx%uCa5Ze!H@>kg3*`+dj z{aQ%KOJo=(hq_ z4GB%!aHr&g{puaY93QokPzLX$q8#|UU%zXdZ+3$JFqC{i%%S6bXT z2XrhO$G)*;l!U~iUQZnXJjao_wriGzJaaWXxgeg;>$x9nldPlupO-_-ga!M(}T&#vN|lhx6C5Q!DQ!35~w+PA!N2 zR07MVzk$Aa8G=H`As#n7qT@A+NvMBAT+B0Qf5AsK>hBX0>b04jY=`#EERyefVY~&_ zr>E1Qf2V~w`N}L3dilEayDhXoSS5d0BMsuu8wsbSL9P9%v=Eeqcf|z z39y2zZw?FKVGq?1PgpP411RZ*k4WhGmGr@5u>ORiz3h3RKMA#f1H540prFK?5qC(a zL2W>+1nh6^K+9>qMM7zJ1fp9ZANcv&;^n%#B(zI}q2~+Fr!u}L?}7gQGJjYwg!QH^ zcJtXTh`-lFX>wBGJYmxCAZcZgkiwon+Kd;lo({`PBqxy2sr4_d~Veb(r64k&ubbj-4FKkKhn3KI~L|c{4v`cbZ2K)b)?D+laRcm+KUcG3&jHq^j%iNhlaOxDxgjy= zue??@`BXKmZiNu zs+AJpnVb;Dqa$RrQs3`7-oc|V|1%-9J@(tksNcuJIv)Jx8^`YWMtL&oWDh!&1N-Hb z6Mb8o0vYj`-5W@U`RC1MniAYWM&i3l&AR~adtUF;$Hs;< zLs(xPtJ(Xmg1;{~y?bU4_^bITmZ!Fha2`k6rT4@824YY1_Q3Oh4P%9ePHqyjx%6l(Z1Wi#Nj@g`C|+C-GOwPa}wF!|TMfw7Yi$68*f4Cyn)a}A;9 zVMNGFuG-Aph5j9~e_gc(tbPAyj1Azc#g`M`X+WGEBYN>Oyzgpb7&Qg3_TZW=7V!L~ z>+>er>O_>X%0EDBz!aB$IHFez&u7eVtc$0bbZyI3hFqOoDDN}rV zYmo0%L_`;|zHIECHN_uZiWVx6h^VKC^~BxNPwWYs&lAVSJ8l78{KyD{{N8trG4HpHS31Wl(j(xk>HL;e4=k+5T#sHUp@b9Ty zN5fmW>+O?3zXL5W-#j$jlas&eF<{4wYM*#F&~S}Sho2$XhefRN{s=z}3v^5SC`12S z?J8KB0dpCsB;HpeqFJ%cmjVT8SmNR8{3{2DsBs1*k8Psi8)x06t^(dvW@t&>Lc{A` zRy<#Qn21*2zAdp;jE3z>&t6tPL_{`UH=GENpkc)fwbZSukcT&4e%G>thMPCXj|T%* zu6}r}csC7SeY5&jBD`;3y^G}Z9vXfZTcWfGa?5AWN#FO;a9RBCOAn#{r-h+*)ABUj zdZcSh%0bAhM($nxrbxq{euaXANR!_=xkc%ME#0UzyP=I)H1g4ehzRhlr+j z5)(O;Y5&_Ratpqn%J$Q6IsX-R)4fC#Z55f;s6xX!LV*ipcwTN7OAGg58g5J7@oB3f z@G43Idh66^*lD{{W7U4hQ>9b8YY`1s@u#n2S0SR7A3p@n0M_;%(wqSM{EfA$(mYDT zY;v34X2Ey{Bgf5sLB76(smk;y5%r(t%yK*i`u7}QR)P83pFwN70r=u;>j#Q3U!5;= zFX|A$z5}sZF(-&9y|wH7d374T6W>(y?G*3`!_GE28Z@jl;Gg7W2>Wm7m8JoahVL)k zZQKa^``(jC?W=%;)7P{Fn-USP`Q0vQG7WFKb>7X2PDI`F9+%z%j*_?PIcEuZW#V9g zxP`^X?7k#mQJnhj`JExL?c z1lYw%zuDFh#_My@mJaRnsRRA9CN#XcO8#My0}=VG|4ay=(Qy8(rTw%M5lPnUh_5!M zVa>WBMKLcTs%suEk^@{Mx8b+CHxd2VuMwGP2J^l4rJf^T;;e8~2|RBW{B!cgdGIGn zv+V}}cVj|V?PbvCvg$iCXuo-4$uf;}ENJ+Aa$0Ch7!kFv89PH| z&~Uh>-|kw#QPlQZHP$qIN$Q3yZv+uN+Nt;E7GR-aT7$|>A|lcFN?C1bIOr#5jchc` z5BX1`=Q3W!RU;cqL}k(zRup?!PfErj-U-0FY@?3LI??dSvvJN*_lW4c)1J1CfaR^9 z-3-4^L~91T1ht%LI9KV=H|dAKEA6OFaRuC}Oyiu)BO(+u!k*wt!=>L_d zC&+FzESr+y&s7TRZ6tT?L69HX|G|IzGa`CNW^+CR*lEB>W(X4zzi;-ddBA>uey7RQ z0Pk}7OsuIV^moBK;9E5joy__C)Ww~KL#un!ieCbc(`p@m--Ct|JJvn;1UyMZ^R=Yw z=V}@o`y@dCw&eCoRB7&`V6q|{o;U|z~g*fJSZ)4k%m(zuWxw| zyoq3v4cjTepPhJ*4g*iMX;CY<&xuH^S?09@AMq4|JaMP6?=|TEr02AYG|2Y|-0funUTMQ- z+GLVH4UcZ+DQ5v5NAH7x;i+rjZ-0{A)&Y-1H;{^pxK6_tnv)AQH4@Psi_#13K{Sjn zOpUqK6Vc9_GJm*3XxNljS0}lSh%V|JdFB-g{)ODv6!jeDgVTNRQUndRAL6qKs3js> zcPiln;C0bq{uW^GFFK~z8zN~C7dPFPt0bb^bp@IOx1fKcvQMF9z_W>XJ3HK_;pg_Q z+jYu;XPbWet1cG&i$&P8UC_SVBD~^t0?1txuJlzAQBCVJdQ~C~&x~I2xdHE|9XigO zlm!0pOlPks=*#N!MXDj0hU1t?iF2TDzDs%&z6bl2K>y_S0?sEL?!ANeVgB{ri>gBV z=bX{&lT%^8rB+oL!u*DcdF5?Oqv5H6R?%;-V1B-YUEiDn{#*D+$`!D$l(CO}F5pK4 zYy3%If2Q?jYkJ^)?HXSsKli|S?xMC<75a-+mDnBvJmE}lziTew!)fGim->Lmvai_4 z3;o|>;U(PuL_`|5v>tB;{glO%kLL{$(GkTi?Mdh_=e;NQ2jGpXUI;Yl-lbuyQT_sV zz@3F1gJySVSkBz-iNqM3_nTIK=Qq!2=^R5JwXqb?J3Y!&uJ+}LIU!&L8)Mm69s%u zuE5j9Hxf1-gz=l-^BOrJ5A9c}m!Agv2+8_tNh(19`#*e)j-%m}rGZfHDX;|OLA*t=aF&N*az@Kw5G(7Q<{zUC0 zjK_GhKm+WDGHrc@(o?`&-5k2;2=c94rc0jbkU46Y`k>I*`|I z?Q&9o2I%Xw(YIZCNE=IOWbb&yrh%-?)8~8d^?aKF=a$TQ-aEKVvpecoKePW z=p>}!rX*aDO@sJyYwrl~d;z^Y)BU+LEcJR;Xr2o9O4X6ZB2U2IUgJCX*O-Lf3renP z$cOV6H&DwhAg|bU!TL4GeSgXoY_TFCS(|IYub$Fy({8J74F(B)zh7|XTrt?2Yj+H* zEeUPgzV~go6&1Zl|JhEC<9`GAQ$~_J@68iAfzK=^0e_ph>Yh6A zh@RJ{qa&U}eAoVZRV?s`xs!X66zf30@GvPGe-gTD+#!|SK*P0rq#Y%KNT|tT=bC^< z8a6ocQn?a%0)3y%Vgkj0w{Mzqi=k=>%S< z^kF~?eUOF)cON0pfcJT#x9ioXZxD}NIeIcZ57z%qr^pAxuwJQ&!-54QWbbj8%K8KL z$1!sD#v(_g0a)YJw%>8U(_KQ?(>C#9g`TB*;6R*=p`I!TPYt$jbw)K&dEs54;=_Jv%)Q z`fMuPe{<+H359*5nEwLYF+4lU4m_HCm_$JOPa0;3JhvzOW(r~e;*ayQt z;L%sb?G2lT^YQv@M@v5m*{191o6gYi$IHj}^8k-&GoJPaOaV_4JRYeL23V2(cuX$j@ek~4dQt$N2{X{>R3oE*bKxm{Kk|OFXOc;BMyS?rZ_Rg z&U`s{E_}CzXKkJ-E=k+4dl?giJovaR{=;nVY2l%!f0`1Y^q+1-nEcGMbjj%X=ERo< zfUiX!9_0pJq@&W-Rv2)SZG6^hCa7bu+R4uj*w<~eRsq&y`DmBN56Jz`ZWv-IhxP2= zeny!SgAGPR9_%>mL@_M^_*?$tziTe*cTj6?WptUg!TdrJ;j-T zm-d>y%sys3&8~6DgMJe5ArFm+CY;lCV2P+cU~G`_HXZx#73In z`QLZdt^%&<%{!y|+XTOG{v0(2*i!f)Ef(yp-P8Dj8T$Kf`&XVt#uQuem2Argc}4m! zu^sNF*o=#6`~z_G&TF{nvMJVBe=*1u+TU0wv>*|0ihC5F|7r)hs%pLY)DbCz1Iq3$t;G27n7GUkR1FY+S5BX{q^;{G1;DCH@4CJG$>W(}5;JEwM(IuR+QO{;xHNOU@n;nPz?0MGXzknqTuRa&kBBCoV@@04dTU2;3 zZ#o5a05T;Z27m)e5kW_Ei0IdN=q=sXz+bF?&8!Ig3~dApo&>xU@l#q2^26+g@`IzW zzD?vb0v(KC{x<7W$#t6I&es+7>L&2~!YX3CloQFFK)yX_lOEMI=ya*77? zw`euo(QS(T!alH?n*+bXXRzyjk10+Y`&{cpC!&w*99oI4Kr=Suzws3{is^KO~P>OAC*)v(>Csnwf{^&Jkvj4&0eP96jevp)Byr2 zyUV$8AK=HQ)1;4%5|9^BJ2vkM1#gnF%Q_7BwsVL)TQCKG?~J-41pVD9iKPfeQ1Ca; z!CejA1oT;lZRde-3LY}v$^4~}fQ~*r{Ls;R^%=Lefe8t^Qe@94MPjh&)p{=gD3CP z=K!aD^;gQuAi$l4Z`k@u3J&;A4*QxzK%|AKh=+g&lT3FOJs}{jCBdIT&tQH&m;~~b z5YVxqY*%rRU#jo^ytjgYvIq~_tU=B}j$;y#cn+|0y#5Vg87S z9{$LFC|E+l>k2xp-`|1tmBjz(C)mqrui2^u$jy&+n3S#td)ceL61IH|Tu$+GKOT?| zzBATkenLcI!@sYtDxl!fheq5^MUWeAq~@e%QgAd+(!x%VvxxYv-k1aPbw=DL6LRg< zk#rmCeF{EJcg%?FBqH6sK_hxRtdF{1pc?p|cFCjn6DU}7*Om7&Ux}!-h%$vDDfr4& z6un^z^Zh(>6ZVJ9?&mF#i#)z? zA>%5@N6mg53xXOY?JvFRk74}{?q2U63%SPO6IWU!qbT^Sw6!4QALyd@S&x8#6JouP6I!vO1Q(UC$xps zt)t-igmn3hfO``1#*!y>u&*9{y7fBXYTB_MKXvern%{;8eiKpb>?S7K4;^ePdFF~P zK_W79spIm?Mln$O@A-F$*`QQ&XaCZdU zAFb-x47spt(NRY^Xn)6;KaK(Ox!HA=@M}y5hhL(;Rs`-MTNruLhjegJ32~QK3lRkw zu={W6)xmlu+-u(g7r|@QCwdQRD%rN~_x8Xw{nGV9^7dZZ^P8NpxlaXqGu?vqH@nq2tx*SS=P@jkpuH>a>CQd1U_bOq z?&Sp{GU3ihWq{nFJ8-Z6{04-)f4wCyaDPTDBi_DP2WMQ_A2h-sqR#_&YTKK2 z@PJ*iNUk1m-I5P}KI_oIQkgNgGXPI$N|=1;(ZQ`amw zTQkk7d^1Ga`-TEgzQTvWFij1`_;HujkZy+G_j1EPXV~@ z>IOV2PQe1Fb4-P|0~ge?_@D*kccYH|RTqa?#NhZ5%e@rbTviaKzYDlCo|DzAim;!H z8nwRfAfiPb?-zW4H~EaD{#697()hQU8?+Bg+Eg|U1pZM<@`DQBet7?fQKBN;?P&P-=N2kZ@CQqI=V6Fl zCI`)LOahh@AL!Cn18%YN^gf0v1uM7h5ax&R@@o7p;8LUDV+Q_c%VxkDnu; zauHwcU4|6ga<7iOW;GG*9?v_eW=z3fEjdo_XNJ2RIfavBbPC2Yk>24y326U4-)qG- z@cwcYcS$&Z&lJzu20Bo1A5A)se~f^}{=U93VNbzo+xUCD{}NEN)M4^LJ81vt&uJ|d zh%LuVzid5A!Iia#p00=YYdqllGUN>H*E|wj$ zo6#Frs{mJ*YIZAx3)W+KU}c*u%y*{G3BQdLJW%XeB?{{+SNmE}ARo*}POR;#&EW4U z5B_QZfBHx4`rU^xUn}1aAsB>htU}AN+rN2?!0|tOx6eX79DhQ$?l$WGaR-?K?Up~A zHMuTTEfQFV*PTK z9rb`4g=rk4V#YZ2lU+I;@OEbh@7;dJxTIEqB@HlJ$7J2j3S*q&m+2D(SkP5x@1rVX zsP!84ItTdw%5hfYKr3^kl{wVP9BX9`wlYUsnZvEj@&2{lzxTDg+V+9$p#Q%(vRCni z(-mX<;li}v|DQvKip#QF7~>yb2eX!OONmb&8@Dkg?)$cJ8CQC#Ol|*6#XE&miOX1{ z!#|M?>>DI@PH7n@t_xscyF~q8&b#W+4gOqhDqjC%}^QoqisLZo7z-M;gNhGZ1)E9vkBaCYJz{!|?@>SgBVmVy30 z3vStz1bBy>bLdBSUgXgYLryON=zm}9<=aYt)$8d( zvY>C$I{vD>ePonI*{JvcLIBY1Tuo7xN)<3q@H-Yv}tZme*fO99N6_i0pOA+S^e_}cS|w`T;%=-0Kek4=C}3NCw{=O?2# zjY3Ihmi0a3an=%Q8IPqLNUZ?*{BT|I;s!E0|6#4zGmtlZR6R=s{8l@ut{HHoAosqn z>&PglSJiq#T@w+P=n(a(@sNtCt3x^?}+J$s@<6b&gUo*Dlq}4L_i^Q&1zC732b|?x$Ey zkWk7|q3PfwRP5#6_F(}yK%0}HB6ENvYkU8s0!PTT*-lVPm5O7;dsXtj0*C&nS0LjM z6;BLiMwfjeq0LI&hqeH2X{>Rzdk?jZQJmCK6)KipTf0~N4b=K2=9UiZr(#`gi98IP z=2)e9S+Ft{vwc&%J_)s087J17{sKH6)R(=!0XY7e4q;=!oj2kg3hRJFXA<}0hv$E< zJ=9tb9BPxS-3LFA*Cb?}av`f;TQNU+)dv^;4Lv0}U!K8G+ADwAmjRGJq(+l_YTzPMRw?<=$066Q--!gHy1L&Z?yYNDuib+CdN?QPj<*yahRRDjHZ@kg? z0trR?Fee3r+*EWapMD;mH`qfz16Y%V_?qHHLcbc#HouVrecz|pcLUZl=RE8RIH#^9 zYs)#{fafBsoA!afzS*b6>P$jBV`|Jh0lTTacaU^~TF0PT5-Yq0v(?68Rj{9F{w$_{U$qDBEX1Y17)w*}4>wQ8pq`<%EX+P%P3AJRuLsQ9sHR`)1 z!?u!8Xzb+WpzTz=SfZO1B|<{y>9-b>0Y5UEe-R0Jx~Blw-!MrkmOg8g@mPTLzu3&H z1AYs`4f`R2uc#m78f_=ZrK_Pf@qYBpUl6E%fP1zK0juGjd5rr zSI;ouR@=P4g8jx=x}B}89k5OQNNpVSZM1{Q*xC>JUY&iY5wL}oHBtW=0jZ5jeJliA z5EP$&9s|Gk>G0K7z$caWa_)kBAmz2f{dT}Xm)_ZBZGe37A6>`bW|k;OL^{3!q~-R* zH~TFDfU8}}eg9jB_b)fTk~?3?t*?wrSH`U?%`@@()xR_4ku3)x#xa^-_+z7P2|J6eJe7f=m~fEFD3_{Y>=hmHlCVWw?7h* z{PU(DP4GLCi^S679l*bI?A}oVelLNqv1AtbkbR#mB&R_hVj^|^Inhna_|+cV05+n#y)nFjmp@JXHKh(h3|wOtEJ;r&hP{7zK^-?o(_ z;W)np#1XG;3|W9L5eq-u7rB*+EfVzNxU+~z;k>Zv@hz~f*%Qi81`#=%#d-+<_BhU$ zZ~*ue7YDkd9N1N*t-1>MU|OP(Z$9kf;x{cECkqIu z>Xh*L1aT_9l)iX!Pd)){DvA^K0$kHMwjuv90XeZ$-;M)&Z1d}~w0uB7+NF;!@j!pW z{ySvE(+TKdacxl`_<>_PUvpR9BcQjL+EcHfe~$jm6Ame`zh+;@90&RHQY-Cj;EP4s z7;+fo1M}azdhZaBzV$n{y`nH*-_<<^5(y~xq2v`3*qgnARrz@W0WsY?bM7nb{m%kc zeX@}RG;_nF%Lm4v5YS_*2KNC*`1@28;1thyIoUS|=)`7e{d{fgKESVIk^Ww>X(5nv-4D~y@Fk!pf#LME&+4p!C^_7+b&tuf{rB+z5y)bYM0P#uH{RZf5QT`~XkCa1M;O zM~dldlLG-MCI6mJ1ba;!X|Epx>@}P8%md~t$Gv#W$QJBztg7m&3>9xUZ(-!jAfS)! z*DP0o-yi3ZKiqCkK-EKM4hw^P`~A(Pk4)hG#PKc;=s)$wt)80(1QZrm>YfVvknLuk zH0l#jCB56F9rWLN=zDvj4gswl)CpII@kUsnJz6KgzwJEivL58Ig_avVj{`q(!cdC> za&CGbpFEj>wn!LiSAhL=r6%b?$g481Jp}FjXVKdS`lnSHXDGhu4UXAp+Vn02PNYp3eN5q#=;|ZQQC_1^721 z%*>ZaK!0;fwim#;Xd`fA)La|x6VwwqUjTj@vgKjIDd6)%|EPPw{%v|HmBy<}KwsCI z+C2kY$#2af2jgc-3+6z3sCcfDIx+@Gam>_iATRuy1I1!Bw$?RIKABtCwj;K;xk#De)taC(K2KYtvvq1h2P`0Ze|@ z=pqDi1~n^)53tp_@W<*F1mx_-m6;E?TKkqgk0k-sn$~g6A;@!!cIT=9mgIbwXiSDU zgddNrhxsn7l&sW;ylFW^k^CM|}&(b|j$E z&}RF{Q!w6-7t%S-5RiwbRk%3?=I^pDp#|2TTZEkQxE>Wx)$R?{K1)EnO~cm1R4V3| zzqF3SnSe4E54EV9QSq5S9QyB}{neA>e4}(K{3c1tpcwi~Qhn@gZ2@sVQ`@8vJpY>z z5r54Z?0@^kWH7XM_i-?}e;WK_v2OekkbmllF#F;Naj#42r7fVJm-`++hqF{nYkIf8 z1Nv*J;_!2H1^?r^XUi#g-b;4k>S}k0)7l7$ReJsPklJ! zVLt^7E!KMyP_O#wT4ECDoB8YGFE0Xm(B2sEHW}vs!DQ?t_!pUPFL{o(0k0v;NMeI{ z1ix}QEc_Yz=eFx%0W1))M9Tr*#zFVGaxb*^-kILescnpJcgud(0o?W`%0Kj)F}}M& z@LnO{mGKXRMDV{tS{1h(NBw(meMh*gPvf!%avd+;b_!yXfA77Q<0v`!E`RpDeslIL z$gF6)RLtLMAa$8dlrX?+^kx37%rgRi#=#lw2#`$E6;SR4GF6zg**zzbD-rhdbGu3Yc11vuc( zh2?$Ie%Z`8OT`Ft#(IX_IRa-=`}w8uX9hSle?!kOUXx>^|82qm zm$cL#5x59P?@ohKY|Ng@O8*ypvcF~1;6|0K^dgBIo{?m<{2dVJ;l9YK% zy&(fEcDm!_J-D;u5E%Gi@x=i5+2n=KIT4ZHlxn;-;22HEPO3c-{XG2BzvjIGKG-Vm zw%!fy93)5F1G)`xTSB1ACp*ZIKF-ODbsFG>n2-9UHt>6iYcf{-AkRwbIhGFN*LV?n zT>GN|CMY$SP(lCP^fL!+KZ8AKNgT6qhqYIIuvO%f0WM7U+j?1mh{V&oXsX{0ux;U; z!fV__6rvm5=neYwd^PZNggY4BFo&SuqXu|Sb5@${8v@EJco(TXX@Ey-y*G^2!5KMz zDwi>3fb|iJba5F0y=Q;y$~tENzM$1)@G;2ez1K_urd2ju9f>C(^>Aj3HDEuqvzS~2 zF=v_#$5Dab26*u`Pqd~#0jc;nCrZ+n&DD$B5KA0OP|b$*cswHc z=!T7kI9ny`{BOW(FZAdf*bVcaus!TCU@|LH@r;}y4$L?i@&@o`jhQr&!-n|EyBLWs zz_F#HEX+oR7&RnTm#Y!bi^Naq_l*rPk=IZr2k>VxExMDZAzrvvm*EP&Zrdb#&6bOX zc*s@ZT_9kQ7a!+7Cm7;Q(+yiT93`MhpK`C>Ohep%Kk2nR;B~X1x7Pp;IqWz7KpkS9 zD+{yV0E=raq>Gc_43GS%crEMyBkRw@sd~Tv51foisR)&!L8TIsbjy1c)?|0dS z0_J`*V9ewoe1D(LteE{ozBltmRS^hj5Oks zTnru8;r$<(jCDrjd|J`33Q9to)kY1}^!r{v6N5iqO4v8fS5 zz>abYdtu!!V8Z7gU-&448)Dw&{O-4a9lLTf?$;<|?y{$8zEqchg*)7GY1BYX!|Ok5 z20?$%Z`hrzY0TYD?D16W7O)gUp@_$1V{U=!rDJpc3RsQPg}vvskz@XSXicSrkl9u) zk=E5iPJ7D+$7utFEYm-Dsp>4;Z~QC`f5-{hFC|lp)dq-NWt{)kJygijWPiQvMGk-I zcH!+{1tAMbpH-%f`)kg;&Z^R3Lgw>U*Ycq;Y7`cycbym|WMkj?mnGo-{Cko7xa%#B$#?^|o2sMSw`%UpiL)?Gkwt2Ws(GjxP_Kk8v)W`YET5&ymx{x^rS7-;L zzK*x!_tlK4LiWVt=jJBdkL8RGUi%C!eq&U+^G5h#+ZdGgb#*B2_XjWbX5jTBPAzNN9*+6MxWB|@=sR~>QY26l(k`MpsRXaz zt|tEF!%1VV;HcUYd+3YrRL1MXA?7h_noa?>5B(xqt8@<6hiF!T(G(#&I5xa2>jL~# zrN2MDoGfHH&4R_gS5U7RHC$uwL?P2$=9rw24o=Tz4`UPPWjd*U7G%Qz7I1%GBlMY? z#Opl7c%qjW{CI=cmoD3ObJ9Iy?$6D23bERFzvr3#Wgi)HF9hX&k8wO+lCuhC7r+lS zZ6>!#PsrxRo<6DyeP8mIh#KbqYYW#{Dj}xyZ#^J#>(QsfW$^25w+@$?DP;Cx+Bbhz z<9hn^*W3}W->y|sdiV|I@2^klYH&Vt;>V1=3BStWc}baH@p)GZobG7BkFtIFwfX_nuE-1yJb!6#LPZ07`(cPF89+0?1iN8t(^iz?16z(r+4X0y_;fDyfU+Z{Gz%08z z2Bl%VHtFDGeQ0fs(H~|kg5P13|3uzV0du-6p7hQTxuI$MQovX~SlHEVpb-#eU_g=I% z8@Yg+OFPz&gO3(xz)pQRzvRn!6E1D*yV-8W0=BD@c_a@-{da-# z6uAWgreIf{ScTaB(Q}6G`f~(qiNk>Wb@+Z0FWs5%KV86HbUNB>#PxFaN=$9Bu7J(S zuG@NE+=ScvJ9%#IGyx0fo^(?S`r3>gs=m;9zkAyC?ptVn*c{gpMfR)>xahT8xzg2RY+o}ly*5#xm zSO9G!Rw6xrJk}Fo_r1GCQQt2wIA5lL`}O>ksBzFT2l-2s#|qfdw0T}W*xzIS@{-ANKgjC8}dRW4q((1#Ew3>ZH#qv$d4ESi`|fSov~93}&0Fs2QvmH@`DbJ9K)l~U@mIF^yU{C(zu>QOs0Hj z#QpnsSlML>T+f&Do2@?K{_^|fprv90W@sK_n^*_G;-M@{XFQLn%oZx-VZ2&lpR^R7 zH|n1@b&mLe&wu05fzMd)ekscSqWBK;k(}?M&sg8qUJIGwQ-k&PH??yXSg-o|3*HWT zjpJX}rC|*H<=Cv=yejPfv59)_53JuW%|07Zf%$J$gJ>6Y*&g^G%fTCHlT0wf`gqas zuHeb9aQ>IO*ZM+db;L>KBj$f?OX;0dY@fAeze7L??uYLMuVVXNzuJ6P)l<~_=iP1L zWBt6>d(8ZN+`ruqRXvx&@o6qnjeQIqb<)&pKaQs)`>p7WeAH~Mez|xrwlD46@BRKh zYGCEDp2quKj-C1^<1YM{UON*8;qz{Lo743h^UKs3-kC1#cFgXU=lQN1;1|4BA9@(i z1LOA8S-r&k^zO{FY2UH^rLzX5_RtBHJqjnF6{{8wIeiuN_;YKIYe7ry*N$2No&GWL zXa$aM)l}ESrI%21*|1UZFW%p1O0e%6)CiB2JGmnn&j;OkALg(@h=VLjuO-=K=;*$=CS&hU?c zzv5VaeLBWl@4xsRdjdHx)dV-WuXui%>wH)=5`M=ft-!-y?AV-X%Xa-bj2h+^&ok3M z*|F_4zqC{j!B4GcRDB%JkGy8K-Dp4N^Y`~6Z$OJ48oT?+%XO zCX+eyD(%=^4WY)IbyzPwPj_prvSUpnZt9=iYRsu`_5A(ml^s+4+Y{Wn+n953n-`~1 zhR?fZ(VTlY9_IGe>TwBqaA@? z@Ej3UeW-FTbj~fC#lG*1IQ^-MMlT0%)2tzF@Va`$lJ}{f$-{VQ?B%E4UyQh->`iKy zz}MUxBoh(v6|rTbI@TZKvSVK9YB%9I?1o*9B6yi21P^ETLWc%Bbj5(bS-g96W$||- zPRF|EO`mpG9eywGC%hZeT6)%jr~!s3+-1bg3mo(JKKPuu z5+hbV7sdCcaaSt@um3iv&ShJF{7(Oj9CQ)e3##Kkg%3cj=rQran7PhOFVA6NBXq!~ ziqM?d&a7?suGLvXQH$M_z0nIyTgj!Bjn@?s7qQ9dEXV$|g5~S^!%^#Pda>3Ax;8N3 zQJD&S+S#*0Lh*SX%xC$D>WIta6q)_N=lu{hXyF&=BTwx-Z5KJSYSXx+BvtsJ9uME5 zXXMOY3S*Y(WBkUGfG96x_}~Jw6LrRbGjgiXLEprgm9O>f3WZ)?+P-u0VrS;C>UyRE zo{J*4Z5rKV=FAKhdr=!&x?)~!bTR#e@9Xg4-J|T>P%D0C*sPiOepU`Dy*&cYN&Uu|?AVC! zue@s6_W1SiR_u$PvTG)ItvgkZXybSJW%uAQJM?fp)Owmd#&eSI@M5R=`rxN-*c>*? z&zMuytBTgcc)QkME^sHF>(+J5GsSpF?s|!p(2nyDy?z9(VIkPr zBR%R2j=y@)YRSnsA4V!!&8et?y($y0r{T=xI}Q}r;5q5Zpp9KS$2zmTXybC7`?zm) zoI8G6&6$}_$&cBJdGpX9oDa}h$+T>_8Q-{Z4{2zB5~j#%+onOvll%Z?aV^U zu8sSH=OFP6YxnoqAFGg!O?!*qN9u-)?Ont@FcT9cB)VEB1&D8fyebLl6 zO?}nWcTIiS)VEE2-PHF@ec{wMPJQLncTRoj)VEH3?bP>9eeu*cPkr^&cTau!)VEK4 z{nYnQxd4W~M7ctgJ4Crelv_l(MwEL*xk!|oM7c_oyF|H6l-oqPPL%sZxloiF zMY&RxJ4Lxvlv_o)R+M{1xmc8&MY&p(yG6NNl-otQUX=SqxnPtVM!8~?J4U%=lv_r* zW|VtIxoDJ|M!9N~yGFTeS%lk0xo(vEM!9g58%McvlsiYcbd+01xptI$N4a>En@72N zl)Fc{e3aWqxqg)UN4bEM8%Vi=lsibdgp^xIxrUT`NV$lVn@G8el)Ff|jFj6*xsH_k zNV$-d8%ep6lsox9TuQ>Nq+CnNy`)@B%FU!)P0HP*Tu#dEq+CzR{iIw_$_=GlQOX^q zTvEy{rCd|WJ*8Y!%1xzQ)mMbOO1Z3*+e*2vl>17#u#_81xw4cyOS!a^TT8jNlzU6L zxRjeqxw`+u-6dRJ%I&3GU&{TZTwux#rd(ml9j07j$}Of`8OoiZTpG%)psZ3ZL#D$IL?ht`{E@;jJS@}&=(se!Hr9JyWAXliEeh;BE|I;+yjZipxpo(ku)eOEldVWL0 zGCm7A_$R9ob>bRZ8^rD3fqUY$Ch#U=_I)AqVEp&}r5j9_3$fxO&Nj7T7wVIK6j_(d zvF0llIV=_s2^Kn(mf;4wTN2S!gc{?c*JI0|W!s!i+M&~T z9Z4`2@|k$QIlRmgcs9cH-;~?)S;pK`*QyGPxT7PSa`PSe?9epDtKm;@Jd>Lr55c)H zo0y{hxfJzD1rHYI;PuLL+#@%<#`jjmPkSNYvw)zehH~T$a-Ls&_6Is!#0r z;f442c|Wkq&JsM}e&SZwFz%~1%}Li9ap=PiFWy0yInB#Rwnct!uIbOO*q^=LK}$s; z_`Ai^U%bco=8oP+H=K}9uKPati!GnknzH$p?tOin9v@G@xP3}PZKVfz#^UK2JMeys zB~lc^5J%svlm2W!j@N#?ZIKA7XTw*Mrb*}&R=PJgnhNtRf{sF^crvLsHX!=`9l395;;e#~` z|7s>Bg8fgoE1sbE-kPNqeU>TmL7aS8)nX6)_HK>Xyis!t_;i!^Z!4*@W_wHH_*-!N z`GHr4*?+QTSF26YZWj61k@-7UHG+}C3v+I9>C6AQOs}g4lS=#gv^)XwsnKpju3&rL zjYBJpZIG`FlsMvu{iSu?tT>7D+k0HFcKb&#QFb}XI9nKV+p4n*9dUevW>~Esf$vA~ zMQUIpUhf*-^Rka`-*RHBRyFk5vlXxYAP=G%tX5cx{mm;?Ke=QP@+!f5Cm+Q4@d2Yn zmMp;WRw{Z~HdwRqVU`!}A*34>X)SW@l4|z^To?T#|6g6Z?AWvj6 zbcCc*w>8U49P4spqA_PW+OsIR*P3OQz8TR3oz?p7uu(rgOZPkQ#%4S|zo+>3XJUNj z^pvIe;P+wdZ)jG5@y63B8X{`&>}9_)DZ+R`iblA#686t&KCHpGm00&sNd;q0(S7=u zxzJOk+Typ$q5jWqh~Z>xpZf}VKUp0Ao2&8i7{9c;xyu5&v7@eg7W78BPyH^*pdMi9 zqxs*&`ApJPseZUP>H%zycW;s4Ge>JFFAY)D_o=>N(o%eu^K#XYMBG1yrHD3x1`eC6>Yd*YbfM= zkm0j)8+pz_EqMJazhy56@LBiCxliJ8KeHJU7e93%pRM}g9QpP=>O-GIsY(vwGyS!q z*A#I-JTm!JPV!*vZ^&W3S}i;yLi5$zVFto--NFN1wu5vlisS6C<}|w7)W+?UpIBlg`2Tfy<+vFrKc} zBNCK>`p^$f0n115*@C<8l8vqzafOD@vo~WLjE_f6my9?A|1$;)G5+T04y)A{F<;qd z&3%CJL;I&Lwa5H3dD(%AcO&_1m)kbKtau}Cw0MX#px2+z@^Kd%x3#_-u1vlAt5YrupHGg2wX{-y{wZIOh>XheF@ z@Ey?ITX&YNhKK0*=fyKGNC>&*Y?OEz}i81>6giW~j4HTlF$~p^$B{&H6YIdb*W&huXok*vRQdEx|E?u*hWsb+a+WI$GS)Tp)Q-LxUD3gIQ8z|F(G9M@tf-)m0 zQ-U%lD3gLRD=5=~GA}3-gEBKHQ-d-$D3gOSJ1EnGGCwF2gfc@YQ-m@{D3gRTODNNX zGEXQIg)&npQ-v~DD3gUUTPV|oGG8bYhB9L)Q-(5UD3gXVYbeu(GH)mohca_0Q-?Bl zD3gaWdnnU~GJhx&h%$pHQ;0H$D3gdXizw5GGLI+|i87NYQ;9N{D3ggYn<&$XGM^|D ziZY`pQ;IUDD3gjZt0>coGOs8Ti!!q)Q;RaUD3gmayC~C(GQTJjj55P0Q;aglD3gpb z%P7-~GS4UzjWW|HQ;jm$D3gsc+bGkGGT$f@jxysYQ;ss{D3gvd>nPKXGVdr8k23Qp zQ;#zDD3gye`zX_oGXE$O@ZVbXA2YD+#M*k95nvF7j9K5u5d75ofMq-vGV8ZzZujXO z56?v@Kf!v>?AZH0?GblmT4tf}|JJsfr{5UO-4wDxhk9y+sis`&_@daDG>mI}E;IHv z<(7XO8MWz>kXdGi)rW!^cJtxUAN@30Kayzj&J9ElUMHn%roU0?t{dFXxq zW+Z$v=S&x$fc`qN_-%f;kk#A^;BU1t(8eJg$! z#VtZV!Iyd|J)7WD%2F!wUx@y%(pxVyU_IQnN=4KS489qEJk<)Eh3sDB-zx$z{BA4@ zQ?h`*q+S*Xn-qZ1{>gwN83A8JKc^tmXDrs0*2?>}fMwFaWE2D{^P3V*R~n z=3+neQ(W18;Io;UkiCjmK9Gxkx%HE0bYF#UXh=_7pdlD`mm;=F-5DZe8obzp#_Fcr z`f+nF&XgCjQNl?>bimxZdH=weOBkPTIkH$8uU~a$M4p^9&c{RVw?Dx2yArr!6W0&E zr?ll3yD{E)IsDo+`0xs5J{dT06qt|UN9Sl4BmENW9@D*;XZhh@C5Z`}QyKfY9tzhE)V-h$Y^}7^p;DgK- znbN%m{daA@YdpR8K)|w^*S#0P{`S8WtFwWxC`d6VG7Rs(>eO%ZWAL4xzVcXwKfsi$ z&}!4MPKFP$)u!^89Im&&o2Diu3D|;%=s=3`xWd~bE#T7$^V)3ah2v?6R@!!Rw}83X zshktXxL(&t;p9L8yD&`alb9I3|CVK)iM|4s$=mHI(+eL}?uZ+0Uhs+Ni&wZ|{NJ;k z`S=UQ`XZ*>v)_S{G9Ci3o_U-sw&#rg3bLTz=&~inohDq8q35km>_0MgvCTm+_3pSd zzKlODV4B$(hHtCUSLVp;Dp!17)21K4#K5dGcTn;F`vyMY{;%Kn95Lat(=RG4`YC`f zV0>QccCfWKhrYfDA7q4HVx7}!6E56(=bC%)adj((X70B$;r5>$Sm2KD+je$yePh&DMtx`0mqvYS)YnFRZ`2n@eRI@TM}2qHmq&ej)YnISf7BO9eS_3jNPUOY zmq>k!)YnLTkJJ}QeUsEzNqv{pmq~q_)YnOUpVSvheWTP@N`0r)mr8xB)YnRVuhbVy zeY4b8OMSQ0mrH%S)YnUWztk5@eZ$mOOnt}HmrQ-j)YnXX&(s%9ebdxeO?}tYmrZ@! z)YnaY-_#dQedE+uPJQRpmri}_)YndZ@0V9jd}J7h01=oWlAsWlT1MoPJ|)CrIM z0^vh>|Jrl8znCjK$Ktve@=a~SeO1cEU74|AvHfD?U%ESv4c;l@%9_2!`X`3KC-D01 zz}Y=6%>I7z$3Xa#ZanE9^x&@x8#Lnde5D-lPUK2`TDx4BW*3u@cmPKENA$J+gTC0i z?_9ZjudhE&+oXOSE=+2!Q*)o6kNrIVKpgw4esU(P&ky))bHb%YyxzyvBe=Z^tjfy< z%Db`usmk%9i{MM}emOa2I=1(o5ofyVJ94$tX68KVMc?eF`#(qghR?(E^xd{je1G%5 zJv#U-~XzJ3qgfoZ*G33BzCt2$3JVB`P&ijTfYqd{i(@?1^@Xl`2H{xu3}`N)u%7$ zW3IiS-41L{*T#|kl-}d>|95R3JMdvIufm0;E!}-?3-;d}-ajDdg$vfLgCFdJR-f8x z^X-`n3k%$6=mj0zdNDG(7=7SRKh4X<{;%w)dum+d!jz(wa|4x3xaz58t4=&dpXuEm z$v^Ra%NAJvp8L>+$^LMf{9PXY+QPX{>~qlf-S(=r20q`+k=GKB-*I6&={?&g$e3`y zhuM}(qA&bT)1(|f_tnXgKZ^mU8L~_TzTE?H!op4V%VQ8>M!6Ux}0((0)4a7$IerJREqKL(zTZ_;`qar z6E4BW@_O#NzdFe-Y{Snn{l-2;&x5l=1)b;)BY%}S2iRc{{PXZOD-L8VRF&W4r)F!=8`%*hjV*f*od81QjQlvOL^;v1ffrT z@s?VJXYi+jxDkW`NT zsZP&^H*UfC3RnMo71wLcfT+#k-Y(3^#qhLgr7<_@Vrz@u1|09NNYJnQ^0RT0H7i`0 zcu91@h#KVoM87@a<9_u>tm0Z5##e;S+TU*(`qVqOU0V&kKS0XpfY60CExW=$g8dhj zj12a(MIZ7}xyL3$M`MbohR*yr>otgVr9V z^mp`2@F#3F6U;2o$NqNJon7$fjw?UeJHgxqKArWMZ*YF^*?hMbGj(BsmWt{bIR6TZ zd@?h5F6`C#Z=>??`NPh4MQRwju;SUf*F@m^Yn0o3sB4}J8~khJx+sh*_kXee=3Lxg z!$%yQhjEp;iYX%vT$tMBLwq65XP5ZaU885Zpnq1z3k@7!;wJI%I4vAs!|~rg;7fXY z>5R$PNiM8Lyl37cTrUzHkCMJlaA9Ys*C%SjrzH1tx4}*g7si{Jx3;_%_b1+xDXr=* zEc4P%PZ^w#JKI;TTCU>4c8GzEi}NQ~b5mi$P~6{7_K4hp-#PEW)dJ&zF6`ceZ*`+_ z{yq2=?w$QzSlq~2Vw-Tij=A*q?kaKoe!o?uy}|t^Z%N9Jv%Su2uwu;S7hs+m_BYMB z*x}5A+GCWqeL?@)L*d=_znq!)I_1}Qz9L_1Tbr`(t26Vs71eK46MC2o{mM!koZ0xd zlJC{=d(x@%>Am#{J~zq4kcKw&Z1K4|v*4XG8?|&@uQJA+n(r-9tZ`<+KiYF7f1xMJ zwP|^#mCkI=lIsh%LK{ju`YwOr%%Y}C_^;>y?`guZJ>EsmY{}wbr{vo4`+2dx=1+k$ zn_r)FDgZiM|JP@^TxT{d<@38MUB+DIl?z6%bKpBW-GAKsUic{Wxubgno!RCl=}bNN z>*vi~rc<}enSIsqRp`dJC$BIg%H5eYR&Yhq7#HlbjeOza%(k^eM|NVI`nss^i~7Q- zZ;blNsPByW(x`8Z`r4@Pjr!uKZ;txvsPB&Y@~Cf*`ueEvkNN_sZ;<*5sqc{b5~**I z`WmV4k@_O3Z<6{dsqd2dGO2Hq`Z}rallnrbZ)mHJ|-ZRKZQSyqbf~tn8*v=~1lTxpfhK^|*gdb*bu(!?VP> z!Y!kM%i)XdG`t@Ped(k_c@*yV(`OGfO@NM-2(~ZAeD&|ygFav2kz=5Uh*(SCbIQM3 zh31H&SLlqvMTE{D+kFN7kpAmO*Y}*_kN>_u7mMMOz6?Q~h$7#37HSY_9RjUWpmhwi z&VklJ&^n2K$M&y|g4S8kIt*H;LF+hZod>N0p>-m(j)c~k&^i=ar$XykXq^kKgQ0aY z|5Hap>TGBo4z1Inbv(4rht>hnIw4v|MC*)b9TKfmqIFEP&WYAR(K;zwM@8$bXdM=< z)1q};w9bpxfzdiKT1Q6f%xE1Nty80QY_!ge*1^#_Ia)_Y>+EP99<9@(b$qnWkJbUw zIzd`TNb3w~9U`q$q;-t6&XLwZ(mF|6M@j1}X&ol5)1-Brw9a$ch_bQMP{%n~^3~2% zaAV^0c2*pGZOxk3jEegcj&(k}_bs;yy){2|Z2J|1-gol?H$~N;ZqamW>&yi71k0*h zmsN@0k7|!~he9u$J+MLIg*BTPGjsM)#2G}@8ugzRqPJ`FgU8!2PJ4aQUZAvBDD5Rm zdyUdwq_kHl?PW@Pozh;Yv{x$arAm9P(q62zS1ax1N_)N1Ua+!*rrb_MuT#%C7p~Yx z7;%mL*8KW&+nUYIU0n6|5ICO71BI=3&`Y(;O8)`s_#{j>^6z2$_U@elVf)eR_5D(r z+8gLy`{&`Qod^HlTsHS-b@*P4$GFdnSIj_fJbl03F4W~cnctk-mD5gw12KgUFC<(Gj#K;nT*Q& z??2taRc&tRy#3gkrN2zvlnvJRjLn_9PZU|RQ%X%*0@VFoygNj-3HnLks2fL6m#05t zjM%DY)~q}xs7%Y%i2HWo=x-0wU`G4GhpGK}FPKU?o?&)`IEOJZ#YVZ2y z`QP~O`}2IVbiuX90=D7q>kf0|hU|wJ?KTF#)aCo6E^8r=Te9%TLl5vsZ`a;%6te{j z`+@GEhsEHtohj>D%ja=B555>u0KTV%-?iR1);#X#o_PVXl>+wAW>)3=C181XKIZYO z1uVf&#m9azkE?!PAmIfaR=cfdoH37c*LhvL9ozpstvT-eA|7{H=bBnCc%VB??oaUL zcwFc_)3=u}{-Hzh2REC?72Bt0y}@|nxxEo5rt>(<(u#ZO7&irU)j*fWd6fv3mtz0^ zG4CVyP2+JtPK6j})_~Xd-?<1o9$B=%7QDVGTd&Jv`+Wl^Sgv?0KwjAWhXnNH{$ARW z?*y#vwV!zn_TMu9{Pq0z*#CC%jX^Vb+}Pvbl|o-T+TM6=CVU}FRJZN?0RGz~m9aZ# z@woQ7)5AtVKV78IU~a(UzFmxuSNJ4gPwqDcI?ds6Jf*yyyP?08OWYNo&*KKk97^&- zoO#wjb}bQ~*CxsHYz1_6?#3zlJRW!G^~CXN@YNRNeYd79pXCRg zFugK=o3W5po$IPA!uy9zu-TU`?BkUNA6NpN5LKNixk|_$Y)n?NhrX}WuZX`{$kJnu zN6SDz**WUNgxx|msUtSR7uUl_v0KjqwT=CkVjQ=)j0mu8$FZ!TZHV*CYvH(OCGl@%eQ_!#>YJLO|!=Jd?R$4|A^m0 zmK7~%+2zdR4tKlX-P0~)iZkugMN}$7gj?V%y8X;74%%tt-oT8VJZ^nqQ;Ra{OiMn!s}z7ezu9f@^?uOC zKh%Huh48rZDYmxr@%}w`2m5Xe<#FYmypATk-+_>R)qf82IE&1fQzJ2+yVvBn_;DUL z=Kk+4NxVMDU!!svwDN|M5k1(x^T*VxktccF38v(Brc=nmi-Lw!M1z(8M{c}imyoqS zEBYN9%i~TvYzbL{ajoC-6)#WmIDU3qT`0x}o!VS*@f@xf^HpQ5Fou(lW4t|UDyl0==wBw-v>PIR$Jt#c<8(H)a#-j;(E8;G|wE{|22(U zc|DKEW!!o$p^rLV`Krowsu;gDYN5w23G~#-UKd)7aZ&TTrXQeNmfRUP47~}am_J;j z1uc8pWJOjXk6W%2eL-Ccz4iQ}=UsZrudS6l?$1kU$=+D1L&c4!x^WhU4Q$4_*1@BWCzP|_CkI{GO zlC@_ILz0yGo;T+n5u2bQhaNT;e$1JOd{ei~dSesl@I{ujNn%D^MN!=4p27Ca*|36V zC~3rT^Rl-s7>eG41;LAp<P=}?Mh_85)%;^4SiQa!=*L7}c8FBM{`&l@mE{Hd?ruo5C%=5WD>$0Hx zt7orQ(lg?G6P`=&7>*tw93DcmM0zLwowroL$oIm?O8;78quCdv?mhnnM8Xk z(Vk1RCll@2M0+~Xo=>zV6zv&BdrHxsQ?w@)?O8>8TG5_Yv?mtrnMHeQQ6B;I8BiYr z^(jyv1NAvj9|ZMDP#*>LSx_Ga^=VKa2laVS9|-k{P#+1+tJAzY&C63>J>}U`-aX~v zQ(ivh=~Lc5G#Iegd@L0PROW`xVfB2DIM+?T0}7CD48fwBG{l z$KYL=dpq-@fZ4v^D&#LP;#?i&PXs|nd|E2@VJ@D_yf6HId``fAi+s=jX@GtS1%8$C zsCzqkGP(UP?#I1FsSaL=0w(j<<;%lq@DYWm7#N)ru=y_f`byfk-@;^K*z5U`cLwcvz zdmDW1`bDJZuZ%UD6^yvd8Oc?=L+E$$W#XR<%wM8qhRP-f1#H!Y zRN)5bYTtF+FM*#OIOgF3(}CzepgwU{9`w_qXA;k)(7$3!%#LI{OXcTHnQkf$zt(@x zR%W}0I{(D{8n@c|<|m9lJvAj%ql3q(#QnN{0Pi>OllZQw4d_iYsd%^p_}%>CT=5}w zJWj`1)NL<#-zB}T8(w1mH(y+s`xeLF*b#VXPYu?ehwsm;I4(esLitUKSZ|n}&+NPd zt?w-nHL)1$$&GDUOHK%wV}?q*S`m-C*VCJ#fb&>_>3BYm z`+cjka8x|{ag+pP?#Fsa=Ujmx5&ZI9#|9|aLf40^8M77N-=W_(wB}%aHSpc-Pm@#e zJXYLku8sAY=do#*?JlF=MaHuWr|$5$ynWmBzk!#oHcs&1m?`wQ)&z2;>aUBuYLYeZF&~Z12?ul-3=Y>AQhYx zkLQov`x^0h{p=6+6ChABU09?ROg<_Y323*C)E3*abazMS~#C7ry_r^}?1M0n7QcFp#$$ z>r2hlSWUcN!pE4vliob;#mb+(>FCF?>+H#onVwh=XUO*cfFAMq&2|43IDh1M8;{^} zh|lNsJ-1s`-|qe93UBwDX)S>XJnoA4`p@_L{$Gqj4BKqOf8U=2{yp$7>HO@i*Gp@* zP+(wD@d4}Qz4{C9zp`d7%Nt}paD7!4yj1cgr2Z`f^Y9 zp92oiYQ+k}mh>~?mfdyeryvQx?C1PdOR(O2kbU{n9clPq5099z0=jF*^1(adm-L(7 zGrk47E;{yixCZ>?+I8bzL#Io1bj%yaXKq`vr1G%8-RHi?)sIKsVv|7qEA+WtoBjGt z;Im2Xaq*4V{>FE^$mLVv-?uSSvBU2-W12{t`ZPYuik24w8v}P8GNR~H1dA13}@jO}I3SG{rL_-uLSwzfEEv6m;3z8b&#+hgw zipHsE9E--eXdH~j$!Hvn#@T2bj>hR|9PhWv(@Das)~rB%gI%+&5%=_V|IYRlYv!Hg zIwp&cUTN;j{105f^(VDnGRn+|TX{&`Tn2H#QN4bre;6SSccJ;&RK)!ZtUfMroDE)v z>C;z!+14z0__kF0nMT}3j>r;WT<(v4||8@9)_#mXCT zlCzJ#iarm2`>Vlcn(%yPG4gwT)LCn$aaqB0BIb+a_qGq$LeCaIJ8LZFi^?yZB_raj z*&5MMah|ttAG#ty$*BxEIp6A1LXKvzH38W{M4=A5Y=;e{j%{LmT(u{5f7}=)3;EOD}h@ zjj(1BkDvBj$9!ki@3eJFs5Q%NE!$;+>$feW^sB)h`0=@GInMY!T{F??$Oy2;^Y2{e zJ9r-bchBNtChsuB{YdZD5~E=l*D7#7^79qumt}qjH^*2rpQ(ehJxXxD75&?qfzQ`+ z>)E3w%vY0wAAEd-?@w~$Y)fU#SA*6D?mwK0^DXPEt@9Y?W2naDRGh!KVsi9n0l$rB>F;fr#4* zFE*yCAf7OFyJlj4#O*u#t?7M>`PDSu%=pezYbMKLhF6v2{@*k3(D@R?{Wm0L9mn(g z37sCV!Oy{y*f9NM2fhz_|7EwNoG+iVX1ezd>Gj@1Jjlmtgk3Up{O$#l5P$htHu_0F#JMkx-q?_EiN|^0OMa)2 zZp{LR_AXYxj9!ScBVP-!y;Je0w&;tve>awOt6Z~&Z$qT%%Qd{;glnTRalVDS4cz8r zAwGI4QSDLR=jm?Ddw}>1{e0_ACMQV+TQlzbPv<7YhcZ)7pYg=?6+O9fMkMxc%lVAU z!1d=-5qWbWj-Nm4KtbGI#IbXajp|L|an76HNa*49-FBvt4{^L5W6Z`)!1Z-)BKKr6 z;zgNVZxhOJ{k&f|ROCLkm#awX>4c7vE(n=(n#b+E_slp3*YCeMbM=~UI_Gfx$Gojm zJc0GWgqq8P7ebF%bLM<9UayfSeqa%{x7(J!J}imH(c{OIIvBI+tiJj9U*A9Rr=HFO zdlzxXB@ELayb%6hzwf>{dLI7!{-U)l?+$fzV7qKyy&EOM<7Sp!cC^^zz;=8&9$tgj z2DGf{l7asA*D(L}WUPaZb?V7N=f+DPoHCQg^^_V+X$^K@^N*C|E_C2=B{$o?iefxd z*7lQ{D~}7P9zJmpblRd##VinUu*x5obI&`l<~s(GQbC9-6)TqJTy$WsKJ`D4i*bh; zD$9Z^9a!(6T4x!IXX}K#JYDC&Iy;U1+A#jFf9JoRo3tkP#|KD4e_THtvd1>!n+S`}*{-wQvY42d#TbTA9roD-2 z?_%29nD#!Vy^(3}WZGMq_FksFnQ8B4+S{4-ex|*lY42#-TblNsroE|Y?`qoHn)be? zy|HQUY}#9!_THwwx$F7*{ngPs{a^2H8b_sZRvL$;aatP3rEy*w2c~gi8b_vaW*Ud4 zacUaJrg3f>2d8mz8b_ybb{dDLae5lZr*VFo2cUTZnn$2{2AYSUc?z1xpm`3O2cdZq znn$5|7Mh2lc^aC>p?Myf2cmf*nn$8}CYpz$c`BO6qIoWw2cvm1nn$B~HkyZ{c{-ZM zqj^4>2c&sInn$F0Mw*ADc}kkcqyZnn$I1*8ZK-@4rT#RZ2YenG^EXcVgzn zPW}$Q&jz1o8a(9Hv)oF}z6sgVnT2VJi?BWsH;?-J6?t6cnHyi4phx&i*(39zFY$+( zZ!qO?+n3Z{-ta}p4((3d6oT<|gO3+fp`|SN&F}I0E!lC0%dx$?mSpRZg?Nq_koYtR z`~T8vc2pgCcG={`@edk>EOmj~*L%8HH*3BRI05Zad1`Xg6dtEPC8WZu0eRWLqmqtV zh;xV!oOK9#$ASA_)=ffP#O$mbtIx>$)|#gnPDEbR(kNjyw0M!(r-%uN+pXR_U9}#c z&utew2fggk2n+F#@HeXn?d8Xy2e?DR!jM|zfm5=g`>WzPM$Oyh%^T#+MKAs|ABo;b zC&cClL2ETO`WY$mxVlk)s`|ecvhi7)I%){9^{6YA9+4F^`D&a35 z6!>a}ERPHRE)!dU@umB_Y8T63UG1Q&>R%22w91M-nvys^qf!2UvHiKv7f!`U@VHq& z-FDQ}^&ht^d-KCE2GeCsDT=|cZo>)u=!AN39E*R@vbUJQSQ&->q6I3vaO zZ#(klirTL(Ov1R^@{6Gg$h(exSF&OS^kjd>_SMKEeu{bj!5q3JE~b5usy*ZH;bay< ze>Al6F`I7B>JPYy^o^f++q-s=XS-9Kzn_5?&-hrb zcEz48{@4@Mx4lN`v*#Ll_Dt1%+TlXzQ!kdETT^7uG?OonZ-sVn&8pOHv}cANUw&MH z_57@jOgd@+*4rMXm*k)$3M(hgk#%50C$83(hgOT8B0E6Gf$<-jy|2UTrM^9gu$=0^ z>bpgPBr$Gl`9?m$7VGz)x30{Arq2s_gcyS7j-0;dj%}CjS5~&zASV0nb{>bCk~wdx zFL&S>{XgcRIJS9-|GvLd_-FWkivNGF%Wc|OFcYzfIMLo0o;5aXzLwhA%ZR;iyuN4s zt#TW-IO%uxM8xhly@_-fQE0=ya9gDgFE--*bou2^b8Xm(!)Xmcs1I&v95uG*mJQRg zAAQ9K?6SM1_qMj&wPDL;2gKh1E6ul1cr`H71|34D#nl8OmZv`U_gswEeDu@bABx`6 z4_}ynerUs->^`?eqeg4#*s)7Puz$t*&!6v$1iQ{^T~-&ybL%=j+nz-)Vuc~^@5S4& z`$-4<;;+J6*t$s}Akc=TCDqFmT|unvT8HixFB_(;`?97!6|B%J@>?rc*|2>ZwI#nM zpm(w5n4jb9ZJ5<_r(M#hV}fZW?h{tUQJeSG8fUz8fVeF|gep4a#3Q$c9z( zmj^aKGU6hGpY=W)V8cw?cAt`cZp78dR-K(V(1z_-$iK6)!U(PE0hEwqcct$71;%sBt@QDs~Fmeq(pwb+7}EJ`5XD zkL@p{_wHLQ4X@hp%VR|3ZJ62pIWLw=fYmv~ar0~?8#Zq2{C6k7CiK6PJ<7P9&m=ZU zskMQU zI=4dQDq^p)(?2K$q9<5?gZkyW5o=vOS@Gqr|7CAdHYa6wQnn{$e^NFmWrtF>C}odQ zHYsJ7Qno2&pHem|Wv5cMDrK)yHY;VfQno8)zfv|VWyeysEM?D9HZ5h>QnoE+-%>U% zW#>}1E@kghHZNuOQnoK;|57$EWd~EXFl7%@HZf%vQ?@Z>A5%6mWhYaXB8}bC*glQ@ zuisSBqP&97Yz#Z~^bq@8{AG3P4`{8*U)G;<^*b0eR%dU>s_JjCPVFIyy(ujR95uQ8kB zcf&h!s@O@XfNw@t{cyi*ko+{EJ^%(D@6Y(}ml;&yWwa#KEmof2>T z_{?C1x_PcUk9!}vYw7ABK704%?R^zT!4n>{mU0h|Q`u?R0}q zeYF1BGaO%b!Gd*X599uF)hw{=2xvnHzjTkDot@5S1CK8Zm4X(YiLSA@4HoIb>yC4g z%Y3BovC!fUpWSkN+|!rKbaTJ+cQ>@U;Uhi%opi}3kI%yT$sKYn z#v1h1S(zkg5tlcfzKD%y^w0ln`Vi++V^zp8#Im<^JPFmoxZ(x5+(5+ktCHf~Q!t)< zDE_hd8?2d?w~TY&m_BlYzn)yU81@*f?%n%c#(d>*`6l*|R>Nkk9mv z?c03-xr5+DQ-1psu)M=Lr(xgWP1@$(xwsT;=yOBo&xD>cV^-IMGCm9S)E;8~2j_RR zcTo0AK23)IW`EaB$y@L|2Ue$;1*1y$nuA2CDY33QlTNArzZT;J-8 zIzpj`I2OjjR&it>3S{A(pFc)8`rmit@lU^FCtne4st%uF=3^KfNiXndAonzZG+|3Xx7xC12 zg!%r1!uUt8us=KLyL~Qz8$tNjjt<~Sd8Cm{lsODeUUrL$@O(w$Y(n%-zEO? zNA4(Z=#_PvVBg;6<;Dgem$NExo|yulZ5lmfnd&jr3K;l1{Dp^YyoqJZeB}BgKQ;$n zQsgtu^_x{jhU53s8|$7jqVN8-&dhEPj|*@9qq1l$eoxx=Pt&mfR}U1-Z}#PqkA8gr z4##tULr=ypRvCb z$7K&3NAsCXNr6Yu{olC8tGF1#XAfL9)oIwER_6Ahvcf%>-wOUd z6JLU7@L0=PZ};PR*LyT9%ZSHawcmes)ge9;h8-B`h!_EVKRiMWMGuC?zGtv&(+lsU zj<#i*zbdv3ty%PcbI-qLtG?V%EdKlc{-r{0|8W76_6>NKLm!7!BK0N1mi++6# zy?%YD(c0rSY~#Z4saK(kLqyF^N82#*R_nqf=sUUv;{_RbE-11ca0L3))*Zk0<2kB+ z&)-Aapcf7sFnDE^&Ho)sxb~en1+_NpNsjr?X&CqXB5dlweTAnjt+NVxwUlXNUZ)M? zd`?HZLcdPjIc)n#TlTxevuG@IQ09%(hw<4Aeg<}poFLy5DbW0q& zqM>cW`v3UakH`Hx7i_mgP_Y8;BX@7h2V*>Q%;)Gn4~Csc@sK0Xf|1KgOy}`9&BtfF zJfQQpE_ibL5jm5C8mG{jp8e3bm!|D;z2E0a`6aRi`tOhnTJ@Nh?n*6KX$`GcyWy4f zU*7-RSLuD1-k0fpo8H&yeV@(?bl#xz3Y~Z8yhP_MIfjYv{U%u8ZipiLR^Yx{I#M=(>%r>*%_Vt_$h9k*+K0 zx|6O;>AID!Yw5aIV7%s3YC4~diGQ@Y`8?Ko)jY`uRr^iT2c{{J7gz67eqsBJqM6%u7Gl1K@alE!_` zDM_K>oP#?MC>Ldg@`B(l0+&gi3b1G|G(aEt>^n&>sYR} z_p_hcS-mW&QU>! zcg|_t;R{LDnFBxTkWx7SI`;myO?EZ(2>l1IC`AC*+$nfJ9(?c4$%X-ckDHLzgrl*n z0?0j={-|CCU5l2q4)OvYTOi^*cRl2mZ3T(9J-}zisO?db0G-@@ZVdNpsSUlZ@*#Q# z=z3SC>fT$xxw@H0S)j{m;ed(Y}4V-sycUBGc5WS+JGQ}Rs!}cX4%$mTT4t@}it{7m zy2&n-KbSIcGH)|%4hm!Dtik*Bel48P3c44wI@+}beDCBa$)5R}KxeDklAX&T2Q&(w zd$=Ap9ET37iU610NV88Az=mOsThQzZ^#5vY?TaSMn5m&&&(?_dyp`@1Z{^ zzm+t0vjymwh#p7uCZ5;w)QVHb`AK`GE58C>n=<`N+A7$TBpe9X2)ruhPg@T7w*G8o zv3TIjpq&dxo1=XlL%(FSPo*zwnSyUja1=4@!}uzd$XQuHmma84yJQ-^H!QKp;2G#5 zIi+PY0psUw``6$$8$L2~Z7UB0w;mKZwM7qnT$hJc3dXOsyl1Oz_xa*|Su5cqCmJs-!Fh;spR(Q}lrNFn+w>gk>Am6ggSzug$i`On zS*ehlG(=Y`+*8H;{xX``gY`GVyxd4r1#-px)Eq$$_!ifIk?ZEbN9_dvb>|)M4H8Gq zD`uFG^8T<-%hMniwz*|Jnh3sWe5uLKRM7qLp$Df=!2G+3nHQqGqA%?CS831>d6&$w zcwW_eu2cP3)NkH(sRHZYeWWoR90k5Q1#%JAFUcqr-XU&6Sl1SA??V4Gu1)b5h~WEk zyVrcf`hDz@KJSkZayi+ZsIkNN$VN1+R2<@v!I(RZt{Bg<5jN$`0~})c_onDnjQ7Br zON&GXIb_8laK_bZ5yFlQTbI`*q{prsA8}x4a82ER>IZ@>w|k z25je+O-nj}dyt%cX~(R9*%pF(BgcadjxY%uh{gL=7mfG``Oj?fhhO7C2PN#rw=S6s zeUd?+eSHe_2fB}LXaTpe?$=&GeXeuc#)puvLNXrzrPpxIR?g`Z*TXrZad%hfb?7r_ z*OSy`HuU*}>EADh|8M;oQ~&mFZT-{FG4*#${T@^Q$6Rk%g0fh*&co}L`R)Jg82{N_ z58H;%!*BRc0Y4@DnB&i!ALjfq=a*^A&9wDq+JZA}#hJF`Oj~oNEjrUyooUO?w6$c~ zVlr(t*MGS-LH#9cDW?$6{x{I)+^;kBtA_3C-00Bedhi)`rrQ?1v!=_dhfePJ0Gm0@ z%b%iA{^o1=KG7Q3#y&pyqpAkBgJ&Aj550nXDV4ckU9B}$6Rdt?0l8%CsFhqh*t*}E zwYz3v2JQp}AI~lAu%>(6y~;oSf7UB=Ju}xkb3ZWm3v)i1wyI29)_-f{pRKF!tQ*Bo zVcS~udydGe`~Tb4wRB##j2>)TS9?Fn33v{^Lg~bIDcHVxd&TAoDxq&Xpu2q;Vi?@& zT2xm8ThQnm!gqIKe0zhJ8-C6+ArFc66-W3QobOGpk%9h{*&ddp9OP?>;q}~|RhRAE z4Bet9H*~)j&h-E2ivQ_$htFYe{D%JyjacAl)d0Pyz9@SAd=&W0wjVr%bw06m))7v@-NLm%z)^NI$&YBmd9Ucwo{3(-vW%9R7{+G!gGx=vGf6e5-nfy7E ze`oUdO#YuKA28(yrhLJaKbZ0fQ+{E}H%$45DIYQAC#HPGl)sqr8B=~^%6Clpk0~EA zrDDH6@Y|^WsN43ja->MBP^22D_aEz0(XI*T^ z5XfU!?bes*;vP}E_E$ODm;6XJJz0vFA~V_wW4vu>z|<=C_IV~m+@+?_*BA2Dv=p^b zW1vs$J>C8;z=oPWh-s>YymtI?P>=wa{Avj_mxBB$a;{MW^*b6yw5J2l<@**cgx+`l zwR{;f$Zvwt>J^oEUwh_+vr%FuWZw3r;@$qZ=Q@+7<0=Y0`~v!P5%kDsP7Eq}K%O$p z8kgJ$JhVsF?K|kpj+xfubvpK?gUzWLqq9HP=2b+|ea@_Ufa@zKD83iSrRfqyT2 zIeXew4*8{cBY$ls?%4vLYdQIG$iTfLX{L~upOpPvnCZ_D zC#=@_w&5^`wEg-v{x{0?qksMm3xT|NFVQ187xxwxYrU!?I3#1u{?Xqc&wC#6>Dv^= zA!qOI+MH5iL;s5H{}mg_A*<-S^%p@uCAv$c|E6(}&SZh7(?`sYP0su$4>_bgt#+5p z2hig;UseZ{a>zegFHun0Erao7(sPjbi9$jW|5rY9zAannEykywy`QY@Ke9>6U+I9& znmoEaUQ%xmJlZf{ATj(8{;uYQHNKDF(V?^98`CFZ9G<=OvyS9Zu}-^D^EwN%tw4+zRuJIi5 zHp(hboFTKBZ<1$`5Z;>AlRO z3i^#NFOB1n9ug%w93kChvirf$sSo~VuSjY!sQ3-q###! zn{2cO{({|gaQeeHw7N?v?a!B98G24CvfAu_;w0Ve4NYA7%vcM-oUv9qC$0p$eMyK`z zmq{ocDe7gDr}vM~zjmERJ$hy@>G{bf)1N5x=*RQu(y8@Yhf#iX;iBvn2|SvXF_`fQ z<(~ozBiAPK=;7CcvtG8dNzb}VgH|^%eluP3%)hZo+xJ?Zm}DM(ASFBG0emD*jPngR z#Ypt%ybd;ztBF6t2A-0n7EzRhko0Ki}%p~65IUof7s;q$iPoA_p#PYo3msvKIhA-%`G4C zsC8o9E`?ug^2B;aa5d%8y}bJ=ju;P{`1R?$0v>Iv9v!e0{afB1X?3=UM;8a5HnRH6 zCWf;1Sd9Xs)wz0{a$aYh|XFR%AdD53OjNhA^6}R?R;`!I6BM)@4NtvbS;>K4zdcJdn z!)AQH*qqcW6>oWTOTXuY6!dqE+lduF-t*|m8QLukz(4FDtoHqpOVL=cr-+vtILGU+mz~TkMYFV<^wi66-Vk&ZCoF7uEj+Uixf& z?A;z7^=I|obo`++@GKPjrlqIdht5+|M}H1JfVJkzm8<& zBYf{^59P>bz#ETr|9XJ;P55w&{}Y&9m1!-G_KxLGTfq9mqZ76krmjJMKmKWyX$DrD z|5803^I1D~&be6wJla3*XU3WqHYrJa`pbTh_usQhZGH8JY$3!-Q>eZhhH{<+-DEbB zPfvtxnl=m1Pk&)A$zKe1D@}(sd<==axMu{&Kkj|(QkPUT_`X1(OA|E?9zCDnmfkFB|*Nz3H=e8qxnZ4 zr^|W?F(&t-f|GF!0sDp& zIJsay9c)7yTeRQua?*?!SYNNoos}AZGq}?nq(^Xw&VZ)yaCu=Kk7FsqA+2M(MzCpUZJDm))|H{fqQ z&gTig?PA_we#h>0X*!PcUVhy8DUAAh9ow4BHD zk>vw^8B?G~9ZFq3?JtkUasoa}E5HWWqhj_wlt0ivO6&^!qQY-$7IH5a4g8{RZ@bkz zs&mLlv#Pi1KX4u@cQ$QZ#34Tn=iO=P}t^=a2Kp?Qw?SBNABB{@CxX?h{`6 z6WE9^v-tW7=R=mx0hw{Yb`@DqZUd8~C7Qok&>K&A&@O}e66cbFCM@HSE$0Mfr6^xM z=j^ys+9LZ#9S9 z9rzw747~X0^9ANM&_iG0j7Y}$b!w#gTq!<>oK%eUeBQ*PbCz44e!hl7`T16vX5)`=$n>F+8$Y1`?>UYqjE`cyH0sM8TJp1o)4p~%W5TJ(fQGFDA?{yGt?rlc|Ibl3JxI;D0r#K|! zc-)O0z*{e-*sc!dke%Puzi#}B{?>$e9tU687E|7G4iDP&F+aXXo!Y~|H-2_np419@u*j)GYAX1~PSKYw;=ncOyweWABT_0p-$egr z+$>yq`yy=k-*-2=0!t1p-Td|nd}J->lvbm@{S~*zqp!gZ!B;=o75#VHHmy)Bjzfl~ zpDK|BzPr{$cv=G1PtABs5%fQ5+(bjWM68cuULe8qhP&k=SKb6aw7|*u9L96aNut+q z3*(zO&-fs4MPt!?g*@=B^3Rr<0eiIXkrH}_{qOg2*9zd{_b2hgyRbec%(vhG&wmHG zLJs$iFWy!y2lhQ{rTHFq4r?L;*K7c`E8HJuWsG}QS;dS=z&>+k)VSdu&~WYF*<*nH ztBSmq9Wx;fyY=@-052^dg+}M0k9^UoD+_G3ZU-wY0rr(Q7M>V>e)Nl!l9|x2ZQ3L{ z@CEDR#`kYeu0X%Oe7VftlsX4fOP}*yCv6Tb*OPL~VIgM7=;F57@L@+*M!+e)Qdn z*m&R>tQXdGi+MCr=BZ);aO;_f9UkEQlccKTw*se_hXzFe7ic0D!8^!>ZNAUrfqALN z=6^0k?7;V}ds2ZfoRe}Xz6*QPmID42V6h6}dE7Yo^vpf9?gsEW|3z_Iu9}d-Gk5<+ z0*9puVJCb7)XI=%C%>499>>y&Rua))B2iA9JwydRJC8DYMz;@H9kDKj)7Ut-yUFwp3_BZkq9C%A7C27kpRp)ABjwq1AicKHxZ6 zm7qUZ@1YqNe-7gVTBnnrU_aQ3{#wxq+~B^|UlIFRwYD~;ALF@ZPr_BxVJsn2^+*@z zy`i*=?FkMUHTQeQPhclS)A&Py9CGX1rbTkV{N~kPwf*7mF1zoExDNK;CNa53KG-iS zdQWWC=Fv-bW)V+u9t9cXNvWcImam0C13Fxx;Mv>9;`y~L8}sgaaL8meUsVxca$LvQ zWj}}bxRy5XYibfu0z@GI6CKtvwi@|Hyt> z)Ay@&?1WsKwtY(>zQ>9+I^($;hkRJG@zWMy+81yz1m|z>(PeJCjfUlmO=G{|d~S~a zUVf2<{Sq2)S+#*fW)(<=Y%sxjSyuUZj1%a`_Cb#%Gt5`8dYg+q&i^6V`<5%f-%TD7 z)V~%!NoqY#iYs|kSYhg^aqBo_<-N&)j@CFYQvC0kt>=)cwufcAd02nyUQ79)7b7wq z&lcM8=>AI;?*729Q=W&N7hpUl-3l!cz>dqJLUxn`kIr57ZDJeR|LqXGd!GGpdGP4> zD{zn5cr+~=&wsphZ+pEN9SPC!;D>N?<(?u68|l2;(7EkZUi(A?#jH*L6pDbavdl0lyI(!cTanw!;Vf(d)Zk zmI`qQ*J+H5m@oJZZ~MQK|APOY|0!*~ANc!K;P3m{q<5NNa!w$=@59Q2`|$iIiwGsH z(>z-Eq3g3Lv|8yb2YEVw!+b;N2blT>s30Z@360DdzR!5tCSFpAybRj?gWKO9Ws%#9(ylxE z3jUkdVOV9s#c%j;)uhgc`A?8zr$oE;&SDP9TFSY;waS5R9l6W=AP2UFjpwA_HapPZ z_@lr2ug!B+?di8#pI^-r|dmjV-?OJ?(FJunZTcVJF**bj=am&Nq7kS{t)y}IUI7P zZ`S;L;GLxYrvdI){(ajP(OB!z!3Ve5| zeOz}D^h5@M%U%Ny#oF3^0iS49u&ba6*v)2E;uP?q_vc*<6!R0%-O+a=V`N~@;6K4? z7|)u%;rqpMHksL0*76+nHLGrgh8%_5bHu~A9N5!)MNa-U=o$XAHcB5^BqmH_lW$Gy zMtnp$H*M$gs>zW1r8ky+3KY<_Tf;scm4@7A-}9AqLO{=akKMLLludk=TiwwQ7Epyt z?%TCTvdIC17usDR0=iB3V(MDp$P09lIihRQY$vcunB$o!*UJK` zd{!T0qS+bAoz-y%*+w>A5DLT|P!aHK>2|)q@^xf`H!oA^$rI?Q!me zdz2;$Xo2k99<}jovZp-aLv)IOjup|4@E8L*@?kD|E}&B%oqi#N{#_Zrd}AW;B>As; z0qB2p+z3Gj>I?J_9nBJ9lSO>(sS0TVx?n*Ud)p9;)R=GXqPGOp!|~zWv>p~&w4yv| zO|pQ#A8cHg`5pM7dFf{0gW_YZ9qMF}k?xD0%t3wE(?x>^zp==X+(kkeHwCn~D!2T@ z2NqE)qSxe5E>gZSt+TvYS>a0-nR?)9Xm^r;x~Dz5L7uV5tG|`qB61uW8*Z!v!r$|Eg*`DNT=5fjmhNB83e^!CH>H4P~&;^~+$bOkue zWL3h*L>3VoKdkd2RzOqlfBLoHGK06P39SM;@1}O7{nQZuen+ z7kDgG5rUrK#&jo>6%KSn-S>f4UWh!C*M-mpDQv!!E*U_}u-s7tgC7IMwBiy)$oJb0OQF7R59;Sq%GNpw2w}hX4N6{kLzK z`d<b$^UK?U%Tloz}qgZ4{Rg>`{vJlSoyP#WiQ$0CdEXg}MRb2Dd_fcA#Jb**&r2WFo8OuE2ae^`=JaGUXBcpbkF-ZL;0c~jG~N)HX28rekuEEYZ? z_-`KA;W`tC7c>5P@f(5vi{mGaA1h;VUznRCwflN{_D$%e4xVvXI)1Ms)$Um{`84>f ze%9J=k3Er7@W!~wJm|^%I;Pc*Jn9HJ<-Kw(=$HQqsR?xV+7M2y`YC%ykp;PpD5%!t!vIqKzUBwuh9pQBU7f|&M2&0cW@ zIXqjh3H*T-=igiZ`kEu1&Jxlx$2$0=W6|vxgZKGZn|udfHg)#dRM)GH^bJR#wP_^o zDxvqky8C3H0PUso=FT;NUQ)lNR)HIhoS!;>3rnD9-57d5ZUowY zTsE>)4|>)k@||i4$&SbwA=)ViJ^th6?=(!2^V9ol``$y)o6gfne-V>4oMUT`OF#Hf znP2;__v9i+rsSprZNeOKV0ZgR-8@G+=be&LCGHizB`kcFMjdHE*5O+#{z8vyb9t2f z1B_?&XssB0kAXu3FX<6-j*feCN(y}U=H4@2Gms-yHXw3)f-=6xxNJiSa;P4z^IYPy z1blM$&Vw?q9BEvYxTy3p=!GusvpbF)u1Z2%`u{+VA+l$#X?5WJIqt2F;15f`mqq2Z zInwuJgv2i13@O$f(3 z{^%6s9F7}uid{Vueq4KuN`2+l)7@Gjyms90{#)BgQE|0*5bI1fVXE2)VzMG|HlZiI>iX&_}w`^BS|d@a{dLOz&pqR>|$i17@rHh$G(qw zdZ>R@VbU$xI{4i^ar+)3jU2*{#y;8p5q@5l5t(MliL4TSe=y?P|IU%j%%RN8vCPcD z%*@fu%(2GI!N%l|nfx=8zh?5^O#Yn7zccxJCjZZr518@;Q@&uzA5417q{mEp&7|i{ zde7tsnEZmfu=tV#;3uvqE|7$UKUuQLE#e{giOOs-jnacG5;-7uXC2BpO&+=#?ksY{ zs9#nY_+Ejsm&|S!IX*diw>S8komL08c)78NUHbDUx33H6o-dEBmTh1mc8g8&b?~P} z;;(GquV<0{HsNCLuLvmpaY~yD96V~PPzCr|-IZ$<>+HZET8VmzT@uifk(remt5{@r zchjY(!1Ae8RmqktayhqT$!R1+34wbalG1-`X$ z+$N@G~0 zWI=_662?dG=>Df-l6c?Ga@YO97nfc8UMb8Xz5Xh;su=&CE*;I|{e*~A-TWB~{Iu96 zwe>e4FBfQ7H(@@0HEoI9+DnM@G1I+kP#(VVr{n$(Lf%V$9!swZ=#L9)1B%<=uAnK9 zu|v6qg30FlUkFLP)S9{i<;OFPPL?zgA`v*@bP{mxXU|274TN-LPnmZQ?F*Xg-`=c2 zeY17nxL6+&BP|*>RTILV{QFBZ+M8;y$avvvLRRGas^kGLUO7tj`g1}aMs0UJhx(^? z-5R&5j1cel>7q?2pK_x1(CrdJwr&n{4Ff-&Wa)ZLJD(79k;hVI;FrBlzuUI-At4u+ ztZN_Uk8de*$MxUG^Jz1!?6H0wXUa}8&LKq6NKeBY{C-Z#n~U*x2)X2MV80pi&*U#J zBcrnjS*1GXjX30=`b8d6(^CkM8j)%!1Nq0OC48S^A|d%+_ifiherTMhCRlZy5IH$h z4`IkJ0{d{0t1*OhX14qcK>3Rc+kzflCS;~zrp*@M(YMpxT_OnaVHdyMiuT_&w+`Jn zLr7bh^V>k+N4Yvjv`=HcY%ZyLL*BXfZmUbbKOtYbj8?2be@iFUtxO9bByjhIwlI9p z<w{GWN| z+Bdfn65!B~WrX=rR9XL^-kFehk?&;NQC>9n$fdlE=z4LEkiz5}ke)9Akl z+;<_v)!7m2_uR%R!Z~IR^rA^qo9aqJHe4t)?*ksYSaj)UKFXczvU5=XXkzk~ z2)u8s_@}E)tC2(Wb{}`l8bW+0jxT20I8fb^&rfRsv7H0Wb*zi+jmG?amrKz@4rZkb6Q=rP5W>}dwZAIRfffY| z7Tw1F%PK86Q+mdMs*Z6Vkp4)>#N(&;2*Qv<`fbbgDcFxLBANZ)q8unU^n6f2Js}Tn z77pyYg#IS1YG$FnSeap6^#uoNl^bpJ=QGX&$5vq{$cKG8V2SVVNLg(j9(r1w%^T1tw637XDwbf#)0gEWKsczGhM~?Ek}QMqp@_t{oc;lG1L8rUXvF~~`tv3G~q?R_jVBH??D0ov1@9<<@; zVU(ZMlJi84^74N9h;xwNbsWFktwnqLibVH)gSZ0tyNu8v{v z$z_qjzZQBM%)!4tKYu;=1&i!8nAB^59Pq*I61g4Fmq^CdCP?!jk60U9oM>SYcbkky zegf1_dE6NTeGTp$>y@`VQuVYst_vrziC_fLKY*O};@T(M%;&Pn*3?S^r6Z2?&&K7} z7gn%I^T?VzTrWqu<>AD@-`3CzmwcMV;trC*9+ zlYY1=Ag`3Aho`L1W0UpU_){Dp&m=$0KHgCPJ+{d7_$J6N z$HuSgy!MVwTn{dLIE*jP2;IK~c0?w=<+VwWr`~2Q9Chq7o2cXnou7Xm{pI>7RK14| z#Nk8Vo=8X9u6n=u3!c|}{-Q<%*l}rr*O+H)()3tMFFU}IN~TU~{B{p^B!&yxw|PUJ z+3(-f2mOm;lH}wbl-nh}>rH&bMjQ@tx6;#&bj>0O*=hA`k{7o*eCQm;Z~eu@xQ}e| zXuWJtD)6o?W-FB;FFm}Lc=<8jx1_Ul$7aY=gWcO6S>pRyEg`j0umjQca(VLw@?yrT zqc57Fr>_yS(~l-M~yIR|I z7V>56zT^c5P`evX#zo$=uOm&NW^lcWo4l9Gj+X z_~@$x-BdJw!+o4DTeGgLjEBBHJ*%f$Nt{LYROUH*eQ=-;Q+oIBz-!z(n`^Ii&*6Q{{uvEQyWf>`qzlvwx>taoa><+FWFqZI zo%2VgJC9(I6QWn32Yd;+jbEIk_&C5>y zAcWbT?5S-Z#w9q=SMPH7pUMOO6C`(7BNF<&3^mcq;8$Yact_OWe4iquzW8}E_#3AT zuKPI$T99%5w>|ikcjqsSR*rU{4!`$Ah1~!@vZZ(Rvls_je%iD-6a3HlfY_MJu@1Cs z&mx8BY>2mv_Z6B~P4PGn`u6d@bFB*@RYGS)KY|{7WZUI006!pgW4_I!s}6KcX+y}u zy@VVNeRISS{q6O$d4JoNkfMVt_0PrOek1D2or~Z{!sl({-2nYiKWck zir9Q~SO@d(&h@$Ds~(OP?u`ButKeVk0jOhyUgP(F|JI#+xxbQ2Pp{Wd9$1O{Igwyy2iSAM@tSlueVM+<`tLfbX}au1wPh$PTQx7OGCCTua8Cj;|sn|nem!SQ*`5o znyqk8G}OMKxEgT|0%!ERvBf>mnc&e9bzG_u&>7Op#eHW0PisUYm+B5Y+s0oDecZB% zQ!+nr>E6|L>Qix#v{3P&@43%hDz(wMyae|eEe$SW$6LAdZO7pk@;t<$IK5U_^$V9K zeVF!rlQoA#+4K!Z=A-^ko~^EhJ&o)pBgq|YTF1rCNY(UJ&U8Zv3gIwyM`RT{WO}L+Y+TyiGm`8Ueh07M8 z{Mj`Jq1&Rv_6p*o2Y_Y26}PUI;?Zr(Q~lbVIYi-p-PqW%u=jh|WFY3kAsec*rKP1| zA3_y&cEKJ;Iq_K4gK@Bz`Q80#mpk4+GA}Z8JnV18CtPni$RQHNzV>5edDKtdW>Lj4 z4q5NC*;sKR?hF4;9#!p+d%EUWw$v0Ja#QX%JqP^M+e)l%D(s01&(Dz##Qo{Kahr{% z;XZ!A)A(%=?6FjRf;^|g-oUQS-YNv|pD0>!WCrXb*36fC7>s)*39XDjGhu%g+aEdh zH0*~@##Z$x<9@$6a{tf?e1FcIgYV{|{V^lIpTM+VVA?k@?H`!- z5ls6DrhNs|{(@pZ(wuIB#v1$sp$c zvU$41HQ--E3!X&&;?lK-sW;0G72*7NHSX00=)oSZGyS2K%cUE)7gzSd-em&+%HF0FE_GH}S(*(0yU0ez zIWuFq)W@M<%_R6Ix`jz-zl-3~acR$Y=_B4*z-V^LUmq@=-kNm#XFlv*OtU|qIl`sM zb=%$>;e9Fcqpomvap{5DJ<9d)cdWGzsk`Kem^e3kW#6O!X9b7(3)f&jXBb-F!S}tq z|Hfp%m`i_%KGJ^;y&E$w3o~vDGp@_Ov-Drw7iL@-X51KNTp4EEnfD?G3<_r>uFU8o z#%~cf;p^g%D@t>@v?u+Tfg|DvWhi>gDO2In^D!HKpGSGrK2@m)s$44iM8(SwaTRh~ zQf`$2k8`MU%0t|Qokz!+2ciB(!!ZdWh?}rb-Exiy@GQgU=3&HyRJ+J;@}I+{m%j+C zd<{*AqmF;_0+hGtHOT(f#Q2mKo-_r1Di@veNCW<*fhMhksQ)bG?%|S!$fcaF?Rp!y zU{;5))?CDycZZ{R)J=nuMWxYzOeEnq>WZ6R_SyI#wGmXP%iZnxSd08jQIAvb}^UkS6i>^)rNb8beqN7fybS(wh#WyAzJN?L-&?o zeZ6t>F#7FeQFLb!HAwJgt#%jcs*SFq88~bHNX5mQKEl5lp*k}oS>H0U(Q8ln1 z{@J0<#`8V{pQs%4+;48GPpAb}U1j>|Nfn1AS@5Sj>w+E&wSW5X9QXb|#6E1*6b7`~B;QoYS#BK0< zcGwwMTkZ7P1x29e2TmrKFXPfRv?=@4BiN^^Xu5ctaq0J-B>Tqu95Q-9+%3ccahfz@ zt4HN#%w zB_(`y8!r61xaUs+r!{fC(|BBJqujN`Bn9J}KSNl0Etf`U^n@GV0{xDvuzYOKr6tDI zVSSm1d0bU$byL8lqjv2&bs6n{+c7qyz=2CECUZL{03RQ((;u~wOKUqK!DvYzSz4jJQ?=_Z8IAaoH2j>f#IXBbI27Zep-$jmp)cJ7kLFZ!Mj3JaXXi09sfCb z^)>J(g@^Yh19yIkxH|7L?m-N#-YwgS^T;C0$?_5herC=Cp}P^MO4CyG=y}{j&HuXO z2F@enn{sK>VUMcKmR_fa@|a+u@)@9)4hs4YZTE614a*Dt2>K|I(vrV#KbNjJ>X}sw zthgwxnF7DDspk5Co;q8PE0P9YEd6**Jnl&nyKd#i?cvgeVVxdxePMr>>ryX{_L7&W z755!N+^p)OUQh78u_jdu#vQ;tO@6arCdz{buB}*tdlwa>)kQgIZ~ls}PFp>}zs&s{ zQ0Wf-%TTvqH28_DCGQeF58(NYiQgRez0z$=hTg^q02tH<|^&Ke5)dV))x zWL6)Zu@Clo1%cam1mSZI+3hOEFtNdFX4k_6BS3ejb*s^Ge4Euxh@lN&(2<1$}3mBm1KV1K4!}yapKRW+T?I)KV81fV-4&#OA{QLP(I7+#1;tw&ZAIn z;~$h?va{z*06(6jt(DN5i}TCg=U5Q1(fGrvh7Y;4^~{82@~Ch3-fZ{1e9)hsnX}#P zIYda^$>UQY=-tJn55;KT>dv&w3MJ@I)Yq#-3=HikzA9>CY?fK>svqXIO_kpQZ{M z++LZdQxdh>}fW6xo3H(zXEcs=AU?PcoO<7i=%v(8GQPF z^dX@KC)gw*xZ~b5MLci)UC$EuecHs2PnGzzZ25|Jwy1wWuSLyjE}vSt{22EO`Y6qk z8Iz^w^Xb}#%65Jzn@Hc5^;xwL-zVZFAsYsLR9<0R3Gmrw5izfG@cTJ*T#>Z|IY$TH z7=Damlf4^nE=beo)1gQ~}diELYD&}cSDzRFQoSu5`wGLyhWo)d|Hd+49-1nXLzO#2F+S9x= zDjUx)96Y{m$!a_wW~8z%fK9ecyY2hgicjZuoIb9C_g`%6oU3Uw>~|G?A`MtHynlmT2>M11Ipf?{5u3`@&n!*A@dCIe#@(IL0Qg z8#AU(MSG_kER#Q@zlRPEIJM&WP1E{XjZmH(Ep^gwEuZdMCc#-1$R=u*7UOM!&$fKo zavSpo96e+qp9-flKKUNVtQ zRyIBqmI78d-B>(7nN5DRS~`qyL{8PPfUWy(!d~&W#pP!7$7^3OJqdlKox4W8Bk)d+ z-N$I`hY{?U#8G&^0q;q)F!Y78LisYc@P6&ZdEP%BqW z{O&@=I^;s%+0rBH#yi5Nfj<=|*!8o?R5^$43?Jke_H`7~_*qv`MkXjMUr?o~)MK?kJDS5AS zw+V6xkKOP=I9?ib`{NqxQ(=61u0?vJK#oJ4ywBbVL5^VWjjqS%O@_aCLg-q%NIuOS zb9BXDdH4+teB#9ePe_cPw|yS!H$2biMGoP!cPl;=Er6cLajNt~Jb!8ASJ6?Lu&aOn za=Gm}jPLlN4R=5n4L7@ai-hv2_3Ccy%0H;o7|+hNmWb5cgWxl! z=~pQl1CM+5(sq>_;;|Z!D>{by3!BzwY3u-BX8*t@2K}qgUb1AIJBN67t$kaI@8ML= zygO_^v+8BD=RxFPexYI4=L|gJTz%XgZ-V*pdOmx%O z-*b(Rc!3Sgir~{GxgT5V8^@=zcV}3f1Yh?jv5`F|3GKCd1U=dfzpF8VrWeV4>K0%o zngc$r=g3M^P2^M-h&c`n>y-v=_NiC^XWuLro7J z!w&GPCs(8b`{!Eoi09?tGxBXlt*Jsz$mv%~RqPZ%z5xP|hjUlY3s;%B5RyshvT<%i84 z&Gm%dVavkWdJmKjT~$#|j9X#Zrr`=IhN6O#R>#KZq zH0l@H>b^Bv3BALqS2Cq9_|($t_TM^d*oo$Ao-V|Exm|nN?~3>!D_Lg>T`-^7^_h0U zT;xKMF+H}d6gj&)Rr{?G&ytzbnwj&OnG>6tGn<)Ho0)T)nUkBDvzwXIo0;>QnG>9u zGn|=IoSAc+nUkEEvz(dJoSE~SnG>CvGo6`Jotbl;nUkHFvz?jKotg8UnG>FwGoG1K zo|$u=nUkKGv!0pLo|*Ih(q`*BpHA{=`DOJJvwxWoO}~-nlz>+p8}HCPXhM|sMIDF( zE-q5nT8yCwEMSoZad}TdKp%c)ZQUztN&*_S|7PPnZ{fUH$DV?`M~d!)L6Ll# zB_5H{I@OdYc5YfKgY&&;iQI8%h*R4&i}~BxZWY;U6S0L)K`+g zr+q@5#ocyWbXM=Kym*>|bMpyi7li+&}Oh^x~f^Gp(e) zuL=E``mtyS2}KYhkO!HoUjMD6S5QvQA5=_b8#(+t30)I`jBZG`@c3T5{$0>6@Aaq1257_YZhOVvS7 zE-DA70Gpq=!WusTPL%XK6a>H&}oAou|mA0(GAQm&*4$Qa@%(xKDxDm{_63n<0%(xWHxE0K}7RNr}9MeCt|EH~#G}AK%z@zJ{ zWi?^XApJ#hUjuOV)Q5KaiM=#iAk_M1e~lq@8#B=xDTqa-*hY6=!S6r7h)yVd;4OU*DPi#V5$a6NwCe z_rI9m{`&o)rW&DQ!R#@uaChds-KYjh&_0VHSmaklW82Dx8 zI0F^vsqen6Nn786c<@ol_m<(iE}x0lR{d^6gXK8wija3Jo6PL*0Z&XA5Xpr+`~JE8 zrLVwAPbdEU0C{%R_00x55s&h)0-Pox1NC?$P`{uN{fly5oP}lnyE7kfxuP z8y_LQyZ@6DJDu+19xtcb`gbGpHS8W4)Sn9fB2kI;58l~OW8tFS!K=8}-MISDPGF5Z z{!an!&D0m{ax1E`p|2e{;Ze9JaueN>l==#Je(s;Lo#csoAFWr`n#ezJCwB3Iz^%9! zD4Y7nqS%Jg17F)VK_ByPE&N*>){&wOYm1N%;cuDDWvrFCSJjm}QC~%Lm-H=QTi41L zn-IUg=}x84ay{JB-k7a^5Ao+^w9L+!>%)(bv3{_m6tJs&hS%ZNniG=SZs5-JN2ZS+ zbnUql8efL&wEb@`n&EXAJp2#-_{$cAy?Taxr}D9%eGK78A90Siv(lC(nkFUwUV{CT zxGLMc%9i%dKcQBM`))ft0UyPOOu%*+lJ{=tZJ>0B^dAC}CXU8>79i@)=8cJ$=KOyhW z*S?bTtAS^AurKI#+tPyDbso*YYy(?ogKCKKagMb)Pe%<*|<;h z9e1Y;*x+PE(xzF^18G@AY5cUMbvF*o9W@j8zd}tLuMg)DVjrCKV-+ltwf5Uuy>Hjr=bYyZ`|Pv#eP8!=T`GIYWll?6I(AL~`ebXFLD!>aA-~*Qzkea( zvT9M4=QeSh(^6sW9QugEW=tw%a`Kqdr`8@068#FEYRm=aDYT=$#;F%^U%o{ZIi`Hb z&tG2Bbr1S%)B6^?iUG%eI+=ojmO6J~eY4SgbK2>I=btZK$P>hcg|Yze-JH2~xE;LO zg2OV-0?2>O^?c-rICWuAGyP=&_$Bf}Pe)%v?@WTHBM|LRS@;_K5Z7KwANFMcOFk~~ z{L_fIyN`|MRvd5T2HQ()O{lY9`e2X^zu)CsKlZtSLG9fE-==xUU;m5~*#{kgABPNA z=n0zBPj)M>4|xnd!bQK*-vJ*=8us5@gZ(RRY%0a^SMGX~mhupKi_Ri`6a41%ua8%j z-9Y<1Ifr$JaeiF;_wPT9yw{Dj1J^e#GKYSdO>ZaS^!{ZT6&HZF`R0H8R>q(@CqmiY z3WGl&5%4jg6u4CF_1FYFdKrx(QEVv{I>e%;)PJF*hy&=Ead zb{_4!py#cga8c5n{!$=n`vS*j5xEvnwiNl=8_yTc0}e=;WwZ%= z7cNbe?ZOWjl;_-Bv89UUbe?95W6@m(wSaSsy+FyFW~qIgtecPe-|VhO4_ASos8PNv z1?`eLqB3uR)#;J4pgg?msp138%E(t zVbw=I|DI5lavF{=GjKuI7<`lm;?|W(4C)P^N$D69=aV?37K`JZoIDaRZi#%V9r8Xn zpRV0!bU1C0Uw$|F`4x_Db>4r^OW;(q-?J5QeC_&^+YUM4`5f}{ow^ZLQQ$S5w0iI^=;)=@G00i z+rl$%AulV6u^;@1vk466SX_T}oIRf@_z~iNBpybgKio*xG8_WGqU*wkv{lICo^7ls z&^%>M>xT-7K0tpj5O2G8(%+muzWzyT19R56~z*tI%x=dEGrlH?9ou_l<)6?Wb+AtUJT2dWOUC!5nv zo80d5j^p|47_Yx_9q&)t(k(Y8fk)o2EKS1r_+~tF1-=hEll$b40vo4qKR@uBL4`^x znMb97f3p0L1}hu#R(oDcOn`rJ&st7PfDih>wa)Ukm>WDVnfs$;0epD=cdiiV)ykKL zyxFm9^rsnq#@hF*nxEl&$7Ynj`Bet^JqnK78jwdTSY5Q$9qml{euqoYH~N(%H@g7G zPnCMoy!&TJS&ST~aH1i0`NP{om&#(EjvJ?JxIb;HSMFKA(*CFYFz5&a%*( zI=d@eAIC3g5p7+u>c4yul21bNO-Mcp$yXuyEF|BB93QpOEAml6*vxuSoJ4 zNxmb=ha~xuB%hMxTatWClCMefIZ3`J$p{zZf!zVU8d5F8Jok=UR3Uw_5 z?)ZDeGNp}4Us6zZ^GQV4dDBjQNemSpfIV)K=HT%R)OUxIdg4*B5AyS7$|1K<9&?&j|T z#940VwC{h+r1N#twmKGq&wT##+FOqhZ>X)X(0+h=DPPTKW#GIB_3u8V;LnCs-oFd{ z!aFxq4?GLQ^(o3hHSiO+u6U{o%)IDfA_08P@4~rN6^Q@+$vi9x|M6%Ww_8Ky(A|kB zNnTvRr1!V(ijx9w!}0kcsb%HxH!mwT`c?vd^qNVx(qbmfbx~Vn8+aRcp3KflD`dhS zYWSrn;zqBUID?Yz!S7swEjt{zIb*@TRQME&%>TV&@+o*MNt2{#=&?UIZ7y)o2g;DB$oumq~wb%XX%K zkA}TGK?4g;an(dLKu=jWylb$;y zxN?Sn@8+!Vt_S<~NYXA1Pjui%&9bB|2fwGG&YxTdesJu{qKqu~1`dyjl+-{+NPK@( z=qoD9S_jH~o6p`9aD?AGEN@M%_3IiFt* zzH;Ne&li)KwDc~XL+h|T{}%_H`XnZ8RaI&nj`n>Wv)ETA!uMIO(%lsMe|7YfzL0?X ztI=q<>Jj+k%Y*5!@$jer_=JZEEPd|P-Uw{}F6LRCHLho|`URO*Y#%p0R@3ne_xms{ z@h%SiiBcF)2kuR6m%VX~N&m>4v_FCSb;Re$heY^FM`~HoH}O0^4h&W*0IS~x?*{kR zX;}31>lh~e=%?_R@4yG%jP29~mWlH0_}d5`&G>o|-%EHNW#x0^aJ;+6%wqf_aevcu zYx(i~#9iD9vm+4C%3ko5$es&FT&X1t58))CC zGpqVOwy!*yuyO(LZo9K}QaJvTbI*HU_%i9)aS8R?aQxnetk0~#Av$`GKhE%y(*MY| z`!H#3W1+f1>@T%0yNC8-(rFD*PXy54438+X3>?Gp?KbxL0kf`2-%SIH4mn3`M}Om2 z^6ien_Pgh@HtfRt@~mx7dIpXcGP?e|7Vxnb_D(;5A01q9*$w@1-md2`wM|o@;=y+7|S;Lvojw%mp@Z zXT4GZ%!r*O*67Kk_v{n*4Z;378l2a4u>aZb>P6Z#>H*g3bT*(p&{}uLx=O59@-7QI z9zlN&4c4p%rhK3Ll0OQc@ztB@P>dg8wj=AcJeage-@}fF7%yIndA;>U`^Ldl;#)C3 zCERb{@fGc9l}X9P7{BICJ1q%7yYbgcL1q|lLW3NRTLV9T7bs&{jQR|=3tk@XIKS=B z>4QZ&l-^wF<^yg_+Tm1AZyI=JN6I~#m4M$4#|0N*{8X6UGQJnv2gtnZKavN1aTA_n zg=mkLIh8A!i+qK5s^@03$E)v|XO#_J*NuZ-tibDOnN_bbKL3mT7BM8kb270WeExPk z9_@V%aVl;%P)B|_r&u1>KPk0iJOX)y=>s!Vs z9%Mtiy0E?U7kppjesmss8IAGP$-b`u*Bh{NBElQvhlYpT!ii+;uPwSQ6yrk#!!FJi z*V8^r>XHD)hrU%oXJqlclNH-EEDhYl+SP4{>+_kHvyH~~&g@!qTB5Oi@qUj8wBJ%T zR_3|_o@LLbyV)3jB7g6R-gpu5>Z8l7_^vbQ)^EF2%p<@PHQ-n$it(@asW^M1?mnZQ38_}rWiSbx;cv5}eN$?iapMBql@tSQ_cxkpT>XUWG zTF+-QY2mxC=%2`6J-y$f0n7v%66a8CtS5)ExvbG{&FeYM;_`^`RWJ9 z@V)G~G`gYK5&1b4H8<5te4mw;=j=X&{ugZdTCSQ&Gxnb4-|GzC=7&FRr|~_O@7m$3 zwioqWkN?Ic0Vl|P9}mI)HXC@gqH7T!-vfUETg)F$|18e}R(r6!b(cBv0SdNfE9;ox zJ*_pLsqc2*S6Cy9`9tLJR~v0$GJk=Th6MBk{W_2EWG;@(wUD_OGFL<9a*m$Ac%&a*#*6%;l9;P-kC^dAX}}h> zdqVl7k#kUf@Z|YRk2b<1pUYb{@vz6L&W`b4ZO8^+kw ziiiAtXy5WCD+1i3?e{(QwNpo}q& zQ|M3aZ{*6P^qs_IFekllzBFeV-s7m`-?nG{Xu|>~-PzjX{~b9Y-5uT@S-`!4yI2f2VT}1}|9A^8`mm_@nn*nzs_LhD zBhP%yL6iq0RvF=)H3`k*WWyY9r$f>@eY{f@iE5AVdr9nE_?yL51G6wjeB^FPMIViPv1N`p50fqu`V>$HJPX-oJw7J= zB}Z*41AY3KXzpSGe!OFu4+Adl2KT!3qh`!P^x0EyvwvFRn=>j`d0-Lxd|5*9NgI5F z!}@v-VEfPC8w_gjj&0oXX}b@uCw@=q{RtdTYO?q1Rc_p$1)oNL?7{tbV|&mD=kr!J z&A}EqY{R_>qf4cb7E2K}o&51v_zn zs|8SwnS*zDYej_JxEXzGl52Y?#{AVoF>=@c!t4DvWqualj1lgA7e>%ujOIc^6?6QH z`)cOfqrGY2$FGa#Xx$$?KkkSyu?fs6Ds>LNXW_*CVJN+J!yMxC*5@vrb8!D_ zt+l=IO+CdI!1TlUbFgh0nel4d#qBz4ix87;F|P}=#qaG9Sd(#91Y@$cYffAuzNv5C z#!gA$`P?+kC~GE3rP{^Qzoo zJLBNlGQG^{4Yr?jcspC^st(nVrIN}Iyrdsr{wT~Lmp3n7fbG))FFsp}?L`{)1ut2Q zZ=U6n))lzln*k9M-{st4@Se#$G;e^y1^uNvF z0j^<^m=hf^Uf#w)4c*+v@@!!yJ-@d0f`}^K?>ytQAzWXe{_jJ^D^bI?@cgGyQQVJ^ z!d2XO-yHY?vXt<9X17^Kf63#UckTB*9UOo47=7f-YH-c_E*;#rf=PdS7=I~S1Mf#x z^P(yAXBqKz5A+wKX8QX${}SN1&m%P*oESgqU(#Q2eHpU{L(VM0^D6$8o~n#FzBEIU z72^qBkbC-TnRLp^hSJ42zco5ZB2w#debJxt_AUq4{ptxRZB5LXt%8l5a6Z8`*S0JL zN2EC8fyzyF%(>~ba&coO9kqkSj1&8dZ>iq3!vu56qmT1`;Cy|0&h}K9;{8@k+i0%| zPYKxrZAM#}^jB|zYd_79ORrM=`Fji2Krc6aovE?(63$-pT@TNtj@p~ce6t&OF=@r(^ON5($2t7*eh`l%=2Xw$UXe%touAsiDG}R8De$hi z6$7oHtQ!eoIG)>njiBjksOj~7yto`V{Fs-d1>TQfF4d{Ho#5=8!Wlsh)vEWk7_s{$dI|lqf_3?5`tZ@ULne$u(u0P9akQ|Ho z(TM4jG~h#r&g&hB#{5bfDtiTBsafj^mSMh5jRF@G&R+?Tf(xOTE&cK>X|7`AY+djON%PLlgcazjb(D9J4) zxu@iM*i&_haXHps`ipl@sbPM)+Bt3H74W9aO{49Yub6z=G?|0>;GRjrXz$~g5ASFx z6cq&i99|UV1Mg#&clpV{=2f~=)!08`eAbU%Y@g55r*7zlT17{TTtQ%efmdCUh(YYx zS~bj$`RtSGX)de7(9luw@O_E(lRT$L%q<-M@0|5-yxOV zIz^L{@YK}UrBDmZ-8_`I0mpw})bjNN)>|Q^W`u5b5<-t`7EI*NDEXa@(@hY~zV&h=)r;{kjrFPGEF7QwJw!4j5zJ_tiGlp{m#|yO;c(R-{;i{V@s5Euv{qCWb3DGv!$(< z_hilmzGC5EaqF-xZKA+dwGEhG@5NWYQ?_)&^W&E6z-gDoW!ELy(x;amO6h++lU^F6*9PguL3(xkyVw7`JV>t((hG$23L(8jNUssni-hzl znVz~E`0SVM|2^B%fJ;}7y|Sgp!)ON%JZlaYV=E`zuUS8i9Ic19Ne5@B)BI>#dVj{0 zmOR=cD!gxVhTGEbEkvHP0dMoX_OjK~mfn1jvFkjxFP0uJd$GZmP7o=1uM2$SYq9ip zc3V16KDDNxMf4X{xm0~ z8tt5I-*c*O+JMWqo@*A`Premd!FR@n&el0nPow=s;g3O9;QPGhpTu|SQh#2qR~$iq z)V%njTH6Mm2v>fs;YE9O;_OgGcq*Lywr`3X_{(2L?K=fsYHd@Ftr4*IKy*a=G}cW@ zEc4}oH^dfvfBG1?sO+N`C4dins{b|zcx>8y=X_uTldYZ_h-=J;0Qgeio(Dze%Fcq* z&C<1l2iReD(8+Xbs6O!=%k7SOld&-N}7^(H6!wsuTgGN^-U zCYw^=Tb!~{ATb7d#TGVl<(j}HH{Vr$)P-K~dF9kH;D6T}me|l$ApyM&ZU&cP|FmwpGHO%J~4R)a6IWq)xOa9HZB1s_+V-nC@Q^lxC9sMw!28cX91%8we-n34eZPezZ5CC;9{|jIRMS<{g~C{cYs-?7D&NCpGeeOm{$U#sYdY zz$Ih#Y~uFlFCJQs*4W=kp`>%4BY4KrabkD%QLlSaO;jE@JWBZO?5&pc+21Sgo9v$9 zRja?@1CH709`Cjneq|v>+UM|lz1(erKb@gp9Pn-FVjQn#Zx+9T3!eX|3+pYk2keQM zJ#mmhbrk4sGQ$3LychKQU7;6Jod4neX6OOLRY{5-hrWzsc1k;N-|YT&@st16vmy0v zNIe`LrnS zN~GQrsmDa>HIaHwq}~&$2Sw^dk@a6>eHd9kM%I^+^=D*#8d<+a`pT2O^Q135>03|w z+LONbVP4CwzLzA}<~1pY=Z7NiyZfX}h^c(%(o@Q&5ECO4tpTj+I7=NQ@xUR%D7|7t$*LUY33))yKeYpaDFQWH$cmRCAZGRp+(}eaRzJvRlfLTL(wD<-Y)MjntJ%t#L z9p?LHunxdqu;u%MZumU6Zhfc6_66_bfn?FY@R8mz^4qZxbssHZRL4BDA6IKh;lS~1 zAMN3ghEH@1eUnzh`5Wte)yYA7pYfq?W8}t0Z1?9IcUjPzH8{7Z;d-rH&Kr!tNBM%p zn%?&lsP|i*d%eC5zUEWD0y)4>jW}0#y~6Q^S?%}Y_$}G>w>=vz==9fmY~J8CFTXmZ zcLqMo*2{$Y@8JBlbPi5wJ+q+adud;?#`UyDmj4!eYC&IqIh+4&KZ7!4dtB)L82fYA z3NHG@pf(BaC}?;D-{*DOHoL*2-a2&EBog1R)!|{9Cg4$ zdN{n5#`Cc#&2hl>#XN1*H>$LtO>cI%iQ|0p*v@@QDus`;UQYo(c+`hC41S1zU_txU zF{>7UN6mHJ{;gIqe1R9v-Ov1sx#GORwcQ24W%VwDz^;qUnU42x|J%aN^^sF65ZQHG zGtYuv7jeP;53YaG_)cGXt_A%kdvG)mxb~q)y6A2Azz!WqD8~7n=m^kymSsWv_?RSp z1YYP$5_xP|d^5id?&X`_>kM{7w@^WC(2bc88C4F>BpIy?2m-Oi+ zeSAruU(yGd^a&c}Kc~WPd)S)MJ z>Pa1YQsLV(&vEm zK_Gn+NFN2#XMyx#AblE09|zLsf%JhOeIiI73DReR^r0YqDo7nxQm2*FaV2$LNgY^H zCzjNaC3R*=9a>VSmejE&b#6%=Tv8|Z1)-x$>g~N zk#%Kc-5FVzM%Jy7b!}wb8(9}e*3FT1b!6QgS(iuF?U8kTWZfTG7f99(l68e--62_* zNY*Wqb&X`*BUu+o)=iRim1Nx|S(i!HZIX4JWZfrO7fRNRl69qI-6{LXaYcXBX?kdR z<#XO-P?AOSY|~>JqmuKV#Dri}e^|*KN*RbJ~9-h<9NG;$e2tJPN=|b$Im#L$RLtT*Tv)V@@jz zkL(Sr-ozW&MOo$lu`|NkN7wjH?CGp^m{EQ9j$lsY%mC%N*75DjVZ^TO{LR7lqMy2d^cXq>u4DF#>eY{-V=q*uQ|bgiag9c< zap+LbT^2$u-^HLZH`h6Lpgq6(gHDM9;!{&*1+(j+tG&Ql^Ecv0W-phAUTVhuYv1sv z(H6MM^zGd?_}Ew zV=dbGy5mi&{jt7nJNRmjDT7+GMAV=x4C@8A+m=>l=uhwWUXQ_gCEL%1Hbp#-jMsAQ z%u=|v(^!u|v6(G}A^8@x`DchRKmTE$O{Vc@kI9zQgS6uV&~Q>o4{Tkk|}8>~nD% zD}hb-CGOp#gSvEWza_7MBVA{GxrTV?p%on>$ABBXkGnSEe8}@d*m)@n3;QG|{_!u) z?vYgMeZ?PI$Q=id9nY|$86TTJI!+(&HV9Rm=hFVV?~?Kb3USs_dsUjaS^V! zR&+r2EO|j-(SfH%Ss$$Ef~YhHDPYP`@t*#O6`eOvLcRJFeB1QjZruLI3c7;^rEF+7 zRR~?;#9~czPiZ(kLHm-L?e^=Xtmy~?oxufY&)L$PHn7SXT-MN|oWNDT0wx-^TGLzY z58eNX?FEOm)t=Z}(>qTt)C>dGO;&C3IciNO$gbdP1l}o}a4qkgHND;QidQr6GmR6m zYh$eGF9J(7XL3x|&4Z(tQ>|(Ky5VO}fuml@3rtp8(|(oDU(>(}TP7|}zq6)u)i&hK zv4X>{6}rZRb24ggPYKBbn;X+ygwMlZMrT#^;xdsWVc zo^|{DKk_%_~q#p^3l3B^!UwfzitDczm)6h=43-3e%6Df^{$Vu&gd3zqP&CZfGr9SKb>&pqRc4h2)W=+f8$XPyu z{bgP@G|JRi(_+pl;ZJ}$3O=@o=UUVHAOA$v1GBT9a?($>rcYYit63~t&x`Ru)jYFg;45^ij)hLhR9n&Y zzKqrO-%vl$9aX%i%8EXp+P&)K2)KlNT;Hq9t>|9eg-XVwh$Bs1ROBtg`POfHmXG!$ zGPCN3OReZ@52?HNzkutgH2p*p`21{F-WvnZgRKEy?*-CGcI6fAw9j0`&RUt zvW-`g``|AlOPz^8JKO1rrv#K{Na^a)X{kKeVE23SY!5dWrf+ z3m0csU@rI1ev6;s`SwiT`ibYE-hZ^F2K87e-h1kTf%#U<-L<*~^;grK9X!~+c8T*Q zi3%M`>-&wet9U-YWi?Z@OTjhHxH2hLiShY@7({5nmB~FYX@&D!bSc9)s{mZj52eSn za6LKa7FgEj=}^impSYjI^>$R(H{A!G-SgOkiTkHq`AqmiHgc*=&7(rV8O5f5t#6?} z^Eb@fgZp)6XQO=L4c!0L1GlXTalJM!7f+|4KYh8j?RL7(pDr1U23MO~H+e(06)j20ymN|yepE$fnZqsIuOoA9wnWY7 zQArf7xe2VpuXW%G{Ph0!TQ+d(Z<)Yla8pFm?+@bJ_W1Cj&F&F6e&vwYUtrIJthb86 zJqfwvkgtmU*Ztk*y!;^g>+Wq$n}9XbUAEafAP#!Xaef>yLuUOZn~kV1YtOor1RNO9 zD5j)}IA;C_%RpdhIoa>hh?{0^*SqQi-1O;o=|^xqHuo9@qykTLTpr_WMcn<$)zn1b zjR{fSTCW(Cz3-XVGsi!E@9I^>T-b&_4dt8?#`B&3r}}~jzL(_p8lP}!RLwk}`R@Ol!yQzfKfuJC(5}>c>9TRZXH@^-OyE+0rH_%uCj+4@kMu z#iT!N4wn-LJ`f<6+&cs=`GoMZWx!L{+qRu!g}>YOvi>^k|Ep!O%6xWndYrkPb1vE? z|5D*?i=mG-uxm*;aMIu3oWip3U(tNpWwV7rCEHeWrK~Zh4}YA`CXIR0-w#{!UTDnd z&v;7P1G{OT_g)13Ac6OEt`+jOzt%pC>V&>e&+Ij)PGa8K6sp!-aR7QKde=8S#ys+? zq*XRA^m$xUq@HfYymrdO%AywfIxJ;u_d|fK`P~Y9p_gNRWKv`+=B3%nQj0sGU-Kep z;YUT_b1s^79nhcYUM3&O4Xhc_`d}LCnB8@$%V_-Gi@3vYTlf|JI#2PSHjP1dA7sMJh_y2;N>aI~y3v3C64ucK_&3o&p1uFPAX0sQZo{+pMR^LBDxPtNa$QHR`^a@6xo#xamE^jUT$hsTR&rfSu6xOK zF{#%?>N$~mPoy3csTW1+Ns)R}q#hNiS4HYsk$P989u}#WMe1phdRwF(7pd1p>UoiR zU!)!wsTW4-iII9^q#hZmS4QfYk$PvO9vZ2aM(U}NdTXQ}8>!bu>ba46Z=@a^sTW7; z$&q?iLm+f21BDsTWA<36gq)q#hxu zS4ip^l6r@v9wMoiNa`t)dW)nUBdOO&>N%2nkE9+XsTWD=Ns@Y#q#h-yS4rwwl6se< z9ww=mN$P2mdYhykC#ly->UolSpQIirsTWG>iIRGw|Is5Q^h!xRQ&R7g)I%lpQb|2k zQg4;iV@o2=(1>%GZ(aI#*UtS2Yy&B=OnvR<96XD933 z$$EIQUY@L{C+qFWdVI28pRDI6>-|YR08%f2)Ds}}21q>uQm=s2Ga&U2NIe8nFM-rk zAoUhVJqA**fz)##^&Ut)2vRSC)RQ3fCP+OBQm=y4vmo^@NIeX)wvennBx@7N+C{Rq zk*s|rYa_|pNwT(*ti2>_Gs)UbvbK|~{UmEc$=Xq}wv?wDfozz<=_1Hs%n&M?&Od_8SYV9Z3Vz2|iL0VZmt{T3msgj$j>aXmLIQ0pUk;mZ)T35?DUu?3nz z8z9hkiMx^xHTP40_Z4&05`{)Q;z8~8Q7e|R-Kgb#QR#ZxRUYw^prjBV)D~}3;YygN zj9L^?lhfZ&8#L$UX^kFf#GmFqndsku+8}wAzON$C%gOyCE`nNe6Q5@V`@nPBy1!$g zIBLTSoVOJQfLAsDZ`u93z@9#d6a(>sA^tTY`KTq5*E_oG5DT;ycFMmBLv8oQvZ;=A zR@CxWOBJ6%EqwgM?`x;QgX@)U(zyif1T&e)!f)Rhl#z4}rzr5-`&BJ1U!a#;($Y|g z+8xiI$69rIk$2&(%YKR4n@9ZnHq38FUhHb{omZ&M@!E6UFcrQ6cNYqT-Ng0*kNDW@ zT9AjU-7vt5_TEBzKiz;Dk;O^XuBcs~x-2y|yBYBbM(3#HGA4cca^Kyn-SA;NIZ<*z zj!9>(&@meQgm^`4*W+C(xWA4S+_vDcH7YSx?N_6gDz@iE5pdrh?Tj}{&@12eNKOOi zm&#OQBq*TvUE@~4s*msmUB=vY826*Emb#+U1RkSI(hV`3-}}8AGn#r?@sSmt3j>)fJfl9@hV(@ZAD!kaXixYYlz1hbK|SKE={Sk@p#{Nr zi<05%S@zMl9<}}P&TH@0;dtZTTeSrh(Lc{wc17d&BxHt5-pQlCeE<6{2FG8*Onx~J z{f~Wl>C`qHPbKS=l^pIz?0h}<@OTCl5~m_?0O$9(VyX4HE2zOqYaH2y-;Zc+xgsBf zcut?_69)R*0)u;RsxO1L&*ioJ60U#a)B8iW(QZ`gHpM_K+2^?9m(wpoyJX&2$hT6uGDe zmv|OZIKL%kgWi`3pKaG}vmE4UH*Dm3`~%O|gHzw#JQRB2Ca;YaqkmA3 z&kYFS_f1$I@Y(@u|DE-1O9M-YzwzGtcMtU3=*aydUhHB~#n* zd>VZ65A*!&aapuKayvol;BxcZxBp*V1j`i8o@h~Ei~ zO~^DuJ4H?*UCq%B_fPhmV-JpZW4v##9=30PFSoy#6?#t23c*`|ZP=DK4zoj>h3&Eb z7Q{2hwa_1-)lwJofw3L!hsuszl|rrQ@9QE&p5@M;hGoFz`FYhLQP8sZb}z3@5B>womWNs=GUtHZ$72_K%OqH@wdqm?wGQ>k)OZW$f4}gSz^AUCL^-x9bfnTLHVK&DNAh zyKr}b(sg{#$lq@nv0;GdKd7=*e87yR-Y+cioBXdf6{%fCYFjCkx$B*Sk4w{^lpZb2ALiCHYL~*dCBR?t ze0~eQH_6TqD{wv+X1zG&i}QB~yQ&!r?JGCF>4RCg{(<|$oFO&;oG9mU|Y)40~-s!}D!liz`#*?ySOTp2V!Um0p#%hi{i1g_?dw7pz} z`OGS4H{pKl8r;5H8{fwS*-OFQXqVm^W)*_(<8rIW@IAPnf=i?~^5FX@n>4T@2KWEV z-&6$##?NE>CWghKU6jfOzX5zNj(^pimc#q!?ZT55auf5FSNt}QvHd#f_si#2U_82I zw6W%o8NFdy<)fYD81H{A+$0DrDR#J|0rQJ)UV*4=HcfLlrKC(Zsz=jC^@Ufiu zN8&FxM{(qK{cUvT<^0wDznZKvZC01L#{a|1FhNdc<_}q~Mb>kX^7+Ehy){~L- zW@J4YS+7Rcvyt^~WIY^NFGtqXk@a?DJsw%FN7nO^^?qbMAXzU+))SKTh6g{Y-Y>MX zq}iQ$3J$G@Z-eRyJ8AH3daPdv3u$27)>5203I5KWKF4h_8~)43A^AEapNHi8kbEGL zFGTW*NWKxtMt2iFh#ZxL+h(U>X#C7sE2KZ9B&t>@4QKiRWAd1PSqyBOc?pb>ZOVAPWyYg#lL&m)lg%7Kk% z;jenCYU)KfRY@oiMDNV}W+7vr-WmE8rtCc6ZJ?w0p-F4;)m& z`ENWKY6)ySxh3SMGTx7LYbEI*OL}Nv%gy(y;HyL zD@j-#*Q2w2O(1aG#VczTrZ;F;miY4K#69?}YcjQxfy3(T%P&6$&W+Uf2A+0% z+G+dNlHR7#u)iHx->vG=ANXP0My|N?2-v7cW+(p_`0)xx;ifHxCh{iql?v^F^>RNabJ zy#7A80C-x{)ovEkil&5O(_a9~$w(!$TUbHkZC5u9teTOivC`R!K5`)CM-gztjjDtX zURJb&##7U~z}$L+W2=I|ici zKiqx-tV}m`mpkHl5j+Euhd}ZaNFD>pb0B#TBu|3mQII?fl7~U^G)Nu?$@3t2AS6$O zED33(0dKc`zhThUC$ZJR6dSL-KS;9uLX$A$dR~Pl)6Zkvt=kheYy} zNFEc(b0T?ABu|RuQIR|=l7~g|v`8Ko$@3z4U?fkB6x|F?*s@MtefHjvmo{{6yr2>b!<6GZ=*A+a~sn4xT$(L(dC186m zzEiJP^XgJbJ1yQyquqVwjqzGOU8+a&=bJh2tmx$>M<&|m>r$pM?3w``R`klgzTy`JywUA?NqhUjGkj+7DH7OPqol$Fczvf~I%9<{b%JB=y0Fhyw0B75z(ILk zN?q}6s~hl(QJ- z_GysO)urbAQmcIT4aet*Z?M~}OZ{6rSQ<1B?={q=E|#u%asv0CVgJ?pnE`mQdzsb) z;C=cVjCz~^4tO28YY@D>--WRT-TJ!J%I?h4(M&5kvf;^}yTI#roSnsY-HKN8y?^l( z@JIHdM(vBZ|6e%{F9d$-C)q7~8oa%U_()&iN+GsqW*$~_eq3}cH;$ha_4Yp39xJ-a z^@fl&@L5^rxqK@;&-}oUZ8Pm^ZMDZXThVVarg~=1&vh#B&NoHy^0IfWJC5^zq#w9! z=L##@sA*_UAuv|Tv!#W=%ajuytT4j!U70mu$BXanlX>|Kz`@Tl%`g47q>l@T+mI`VZ~o1^F&xjQ;HQBczTXj-8(Q|@_ru=l&oczx`$En> z5%-gs=91d_*^+)+?W(Ab`_0E4mUbL?-CR2FI;BhL3f1_X{)G8O3uh?@p8uCyiHU;1 zPb>RQ*=^9J#(R<%N_Jb)&vgVp8?M)-s<(+9^8=2RUn1?dPM4C7{NUSQUWO-FUVngUDgP(Sb?sqwr}6 z$vyQR+pE^_Ntu3!M(vsMKUKiaX$D=s;1NG|%JXnEmDwDtW}JWJuEhEo9_@gyIY&MAPl0d5a~;YeCD@4d zsU`hJ;BR-?BONMxT#=RK5$2N|s?LQE!8={iDJE5e`SR<}TsdX%Sr~Rnu&BoT+VT%u z4|uV29?f>WU1mw!^t>J1au4y+ zg?^%W2kWWIn@ta{gSYJOd#fuK>yy^!^ByIDXS|E2e=-O2XP2N+0q|V;LcY)W0IVpa zE#wWJs)nuoS(V$kUqiQkO^51Gf6R``8KS+oDERNuVAR*2+@_}rY{%vByA|3-5p(Uj zUuId-E zeUFrf4t4YXy*hy;OIqCCa=Z?jY^+y0Zybro^;oy6uYx9=aZ``457r+qRct5sctKP8 zyLH>UOO~`wqhyf-@V6ahW=U9YbQZ5&J0F_Vd0O*pPGdg4s(WXF3N+sqZ)ld%!hC)H zizcSA3p9Rru`PWL+#8)+;^Pd>;;gG@GO_)A#<3k?dw`E_`s{`ENMx;?|M+e^FIw<$ zOqeC@CAi?iZ9DLu<*pi!0{@+qS$5L~J~HFEkyW91U;M%h%#rugasGbyA#nGxc{~!v zI@Fb4XHFc(`8lyaF67yY=jCYE@c`>3?W7gQ)O4ZQyw}B_1J`4^eN@bu0S(*srmf5? z=)Vj8Iu@$KC%{2;UPZJeG{pxv=YZ$C>wQE1cnr?3sQyC@@`M_{KUt(-wWI}Zrmr}( z0zBo6OV+l)C38<1Op9TA|H#4!?C-z(K=2A-9V+Idm1!u}Z&!NfvDrbdfcwE?cU_zh zpW_GibmRrMId0@J2)Cr)X=-pf@j%nJFODq*_vbXjHL)N146Z^uCDL*KB-Nj9l;(h+ z^ofdsyg;mHJb8trrxBN4Xg``Cfc0CTgPq`S#Ib(}x>#Vnd2UQfz!q^%^J@&#On*yS zCW6ntT-Yq{vZ7je$O4NcBBeDVHRhg(VjJBRE^JK=3fH>f!m%8$TT z@oxH3G2nz16-9lBdqzKq)jsQm^_=j}#<##yYr~?GT`g(;+k)F~4k9il`)zo`A*|p3 zrmp7vjC!t5pT9D&UbdZj-(U-UkoprhzGb>t!Y5Pf{-O6+pDdu?uia}&ZxK3YvzfRQMI$A&;R^cB_ECBXV7mYaX$WO8D}qDLfqEK zZ~@aA{js;-IWG*)Cz0pE8DO`~vvrKmp=Qa*ladYl87^&w!pd+S|7uxHbSR;aj>d;tYebQ(3s@752~MQ8`;53_Yl*DQ6d8 zVaJ_khC`r-(={xsisPNQlz9G5DEiCdyui=cU-oNQupIEJKfyL@x8wc3tSM&`gm_Af zf8mUN)-Tm-bBb|%elycMPDYlr-<>fP6aW9}OOg6hWV{n0&}A$v99^9FhvtL(WmgOA z=&`jTOTSoXQCHUXRWd;P`S!0_x{GIu8{IBAeX^I`azF2-B^bAF<<_{2m zoMuP&wLIQhc$T74PDpdixoJlWRH(l6MEg1S*ul%0cF4!>A2vjL|94FTgIjj=<3%0Y z?gUX3huJkn|13ND!JB>ir_et5bWa;(=;+E4^^Tx#^yn zbG9ALn|Jrv{%DFCzSlbz1pG4gU`S;GMJ-&Y=f!^8j<(wo#gUvuQB}uFnLE+mwL-zp zIF+J!SaUb=-?5`Ld!vp@-J~dE`QfAbMK>yIt$IySxm#?$m}C3dY=-ZhS}E#lvB-)C*Kxf) zoo(4~DQfmPfw0VEJG$hvud>v8Y=5mU*8n*9p5}$RZi)(3wd1$N@eIXv_m=ikR6F-) zJ{s*KvDQJggA}#rw4R3*+M{-lIe31hs3s*9({3DpqTBiEzHvOS=IpUFv_Gpg%kBF~ zQR8RjnDW3szN@PSPf?WF*z*IP*X-!{%_-j+*|n)UrFuhY;Pt0GpK);`j;A9pPsQ8O z^It30Nzc`$1~%@m$c(b1znqnuy=Z|pb@ulGU!BW#^hy!BZ(yM|mEnAHY|%wLfB&6J zTZFW!*Os*_VlUXy)=A2aE0=0hW{G39?C0%}4=dLG1e}|a8xjp1V&5&UBd1N#qb?DR z*nVLnTj6{KZR&=-%<7w=cC^dlKZ2D?+SE`W%gKO9^w(3LxIe3DQ-#;xNlD}PZ|r5} zRjk&g;3K|l_Z2(3ApP|7M{Bhyp39H=^`q_Rb>Z6#G}dcVjVl-B3&+~g8b3#CTY&Y9 zRMaj3-`+icDgP#Iifh}kA7a1}gL#W{b+xI4lDf4TxcytPsHzSvzF?x)uuenqPW`eJm`_oHRe0Du`V;q zO2z%Jne~d<4s3h3;Yl9)$C3U=CYv3#sbJw^cmEr9w5y4a^Kln#>Xzn|;4<{z2i41t z^Sf&!AGfg323V)tYhC^^ZECHPOLPVLb7L`=%3~jG>KOa1wccpIE~UhM@02zbk)|6x z)8E%j{W;VQ++ECgcMo^uG- zsdL2#*fY)+t$qB8LzE6Z38+B-_4i`WC&5|{S-~gsJ{ov`I>FB<04pDn(T;Yo6_f435CGZ>Yy6SW; z@Q2Z6>zqF9dmFp}TOz(_ zV99joY`rcVut0rgewX=m7yIHts`;G*Xm9<8_4(e-0G~!o=~2IGevsIEVCYdp~di@rjC5ebK@9|41u8VHNh} z3(vK??!@=zF7Vl20qHw`AuaeO@N(@ntA>#Ny{sC|{zyB<8@b>Kz#qbB^F7^Dc8qNN zZXZox3)_J%PM94N)tBPU9gp+MDb7znhoZl5Kht&y>FI8@=5GtOV=Qj^8r4eTyi@IF zvI}rU@%prD*tdUua1Xm0cz@8kyhq+Rm-_m&{RQx{+RW3D829x{qRcO@v}3wFUv4=D zd_Q>AjXZffW^nvL!-v3gEB*9Try%}bwe>#0S9PpitLkl;T@^~>R$wfBDZP7{{Y_h@ zIsDTJQ{d(5H%Aa0 zvdOmaVc{X9_sdXWho2Rjctk5qjW4zR-?@ZL<#pDzH@3`(b4C3=Up7%nd8;_p65nIU zUgy~Hg#0)65I0=Q8l}gKDAbus7ML z|A|BYGJN5VrwefGsX>W<{mqC&f67}&LWY0)TDPHpk((R3uo3v(_V&R(;7^`+?YB4) z^67&%ge7(ZTp4;1K(dclBYPr|4MN zW3cta-zo5RWVKmWJW`F+cmLTFZx!j6VITK}mMZ+atnz~za{on-8XjW;?n3VdCDbLdy#E4HBzb7~2x zv^G}gNX34sWYOh=uL)U@Xm|X^RW8G=yt=m&IQ}6weGl-{!_o&!5q`>rigHziUwd({ zZ40nnsk~7;-nV_G_+P<#Lin9i(k3DN^o+Yjb(oihZWDRB8#pnf*VQkt-+ztE z)T$n{)q;J7R_*g0*dJWJQ1Rv|%-e>?E${mT?3i^YyAt-danZes*TEw;^cDSu_wBf_ z(Cj^MUzO65HL%Zb`=2kYqJzLMMY^Yw3@$VN<-;xp?|WdURx=2E?VC;Js9uXw)y*VB7jnh9C8n3`8>deoCsAx{rSqwbu!q2Yv?Syo@w$ z)hGnd_~`7e?Z6Q_@{AJjjgih9DVPV}j+QFfS%m#qFR4wZyOG}o=e`JlcW(6I#jF$+%^3UvZhQOVrPn^#pywSk7nX*r~jCPdGgfZYy>t(aAmZQC98!xCF zK>P1Jp*yz%y!1r@p&qc$Xi{@n4t%S^r)3Ae--?j7;vqAW-!(3;0fd>DKKm@7di`3YR*oFft5}2qQjuWkeJmX(}3rF zKHICz2fpy<%Y%a`&sQ(+4Rw9sGVVKVch;gje;BR2_zUM7P7O|C0eF7=!=@lEP|v9+!h{XyzG&#ee=mSsLptPTC2OPRM7>~FW8 zZF$gw`qu53{0&%8p+9Lgu)C+ihN&n&yNTl;|HkvLX112)BfnEeKYC331RV$6*7RDG zN9SshSI=;M5+wEMP#4N?$KXO zTVqoRyvfjLqzdO1&Z=JDBk=woe0x)BzHynjh^5c2;(hEJYYcPxaQYL`&KmH2x^=B>%3ris=hMZDd%mOmmdx(Ch4QjnrT?ON0QGxiUf5T( z@5Q54V#@~6K9YvKuLHmCjJj6vhs#LpR-VI$_}A^a;^8>LW%f;d@V*D-^{4Yq$d*y` zuQ}ga3lU%M1(~O(|02ETgD%&i{kzMBZM-_hgkyG3OTsI>*BPbync=S>t%*~-=Nry%d-{+$2({ozT zo_+k2lP2?+$ywXaG@*ZMA8=k@Gwok_2<7mjzZ$4B@{ODhU6SEr+>;2u(%9Q$^$Z^K z&Man&C;HdE8`X;q#8EzyA$$$!pH=TPD6g8uW5Uuu$eE#kUVLm$ZLtK8albe%sRI3F z-VE;#AEbGV=Qh6;Uoo$XSyX*gb1sj0I!|3T5$|)Fns{xh9FNg5&Oguw`}W_H(r*LD z#M|e2qdd-PNBI7o&to3i?m_*3 z+vSn#vyjJ}l^s>eLwm82SD2r!1Ra)zP8+`Cd(leO9p|!y$DI1)sa%ToyZ+bl7MGSt{C{dN7&En1Lz>*0Q2R%pK2ROq~@x;%aJuRp!i zT{Uy=avtN7EEaeX_|yiucN4XE%B;VZx!OENGM?|&VqShEI$?DHG}M+#5K@a1D=f zh}@(*fbdC@O?g7Ve;e*TBnY2mvchu3D%98YWAnUFKZ`e4&AtM=lHTtxPH4aDv-@=H zfH#Tld?SqU@kCbU{ujXGZwbD=j`8?n`MtSbhKTR5>g!&N=So5!+MXEmm@UQ=b{fI1 zXFhF`+y)+_TiB%PfcEO78UEFJBag{4Tz0hw?PKWy`L08-$G?A&AdK|PAJG+jv5vKN2--m!bPI)Hu6#tar<^@x0;99tmaO!K?kYKCoA6#0`J4Ja)kID+<*W0P2( z4E$V9?(8w7|NGml$`}V86BxZ`ss`G7kW~CqRVTF1Rxe>Y^ykeBPMICu3LT`9WnQ@m zpH>ra=YtE6vAmb!a{^ecBU!xHmB(DUVIF=M;hD~>mcDLiudk;SN{tZWm$9~Z$cx9c zZV;4={EP9)$mNQ}HXakwkov{}cCC3zMp?e7pX{bzdT4*Q&TAQO^F#QJEA`FLK0l`W zK0CjY$Lva8#*8BUCyo|q#qNO)*Y9Ib@BeFW2?bI&_wtzD{o}6h1rFF##nK4iG2S&A zYuiwMS~l;3=N{xS&YCT9Qg}XcQS^e~AoM>uoFBd)(M&J-EZ8&E$MHV=#(1@-Fu4wRv0|#a8S;}a zKh3oh@hM!Hwn3SXL-sy;E*yG<$9VmS&E_HhY41;MJ9`x6<8!S0F0jw!@|Y7L=wHg5 zjBg{n+oJA!(#MgW#FHc6CvwQi=zZoQ&@rphx~;~V#v$$xHfcG+e$Hsq#F;ZVM7cxz zLU|a*6Wa)-0@y`3*XPa*NB`U>?pH3xA!836vWrjinB$M1uP>UyA#(Sxtq6MWL2qeqxjF*8$RS?S59yT-&_8)Mfrq<;q~8q-AwMzxF{78zAHO`s zI|@77XI|6Y6!hN#%2`3!H~kntTfQTe$Ji^iS1RNEO--5^t=G`rTi)`yAb)yuldEQ? z^O#95K3-ZUj(xt$ugZ?sc}%cUh%f7*5=9N{;X(9Z03k&hZSv;JNuUYkQ zDu=Xekdu3R8@h1mjdQ;Mzm)J^*qej==d_OA!t*^-er%uifX6Vro_iM%|A}$)6aN%q zyi>?`>ypGi#N0bh<;Bpc6S;c%wKRv6#>Q2wEkXa)b9nxKtRI74aGnX5@tEoI)ehOP z`v`hX*Q&tyS7Cgy8ulFv_y^Zl;rq@WcUo^Y{5u)de64C8vyWU=zBY$L-Xv6x$UK8C znQ&2AfgFcexh2gz2m8fKvcppsaY*yCpyZ{nAKxIj^N14mAsxyUjccHTmn5)efhvc1 zjknw|_a(~fmw(*oG7g!e>0g&y%VV65MSg%R&66t_2R_AE28T62IcM3WwYn z*uL;y6T;hzyp&SskoE1w6Bak~7^{K56AQ5K+&BK-$$f1+M*itb*+rTh(yts4&i|Rm z>|Y~PV2$+f`yN^!0Q~vX&f{zFyl830(qrvBMtRD7PgT6{XTaLn_g{I;x3W#=%+!!x z9p|E|evBvO^-^zEaL7z)r?UA!dCUZnt%q5JLoV8hR!I$^eqz$jx)^W>U-Ic*r(yI5 zd-b0$UCSZ9nVT+L|AYAn+t5aS9rlrrzIc0J1ofx6ZMQmbNW+kf-d`T0QaGT|W5~hz z|2pzy4C!I_X^sF}ecjgbPv?)f@$!wU*q2U~^5`xDjuF0f@Yhl7i?YPj8-Qz7_zs$% z=a8e4B_A_@pQjxcvdQHT`i`@k6vAH_+BII3jq}09Tb1?#$2?XISpgicDt}sSzBR*pc)%b{ANTXv zx{I&Tvu4IGZrh#-yu8wtRl3HSIUTRGeIM|}oo>7G-L08Bg4(UWf!&2Y-)R54%jkg1 zIBCSErLW);chZ_!(cU^%4EwuA9iKYf4ff>MkJ0h4|F;+O-(6v}4k4{mNb4BVI)}6l zBCV51>nPGXi?j|St=^#r;^sOq;)Q79ZXs$lh)Cs zbv9{TFIxAD_SdETd1-%N+8o!=gRaxy1R+>#M!7%S<*+*8=XFJVQa+5c*I2_ejq|JnBw!2gr* zn}(k>e)9i*CHLzL#w@TGIPPdd5cd9lM?R05V97Ln5qf_Z_`mog;x-+P1HN(m+Kfxn zaW_e%SLV++4vD$N3g0D$yCXE`y$QO)A#GJA28To}8GW|B-TNdC`J8xt@Q@Jh&bmC@ z>30cZX-buMfdI5;S4;0}Pv#Jr)8dN5{FaPj@^y(_S2?6gdb3a?Fw1VR@oEN#1#FRsWv$@Ck?f zIkGlR3|hSl_M{A&S8&MnL7kh1Uo4m!W4SW*N{rQYCluP-ESQe-2@_|ObBNJmCmH)@ z3+BY}h~3G+nqp)3%%Cy0BI`%~o@$IGj<${`^4H!~SupPJu9=(QjOiIm^_ps>1+%G0>s>P1)~~~oB(mIs32tz` zUk-fv$?=eJVvNpKwNynfqN4nre1de|%=Pamb~6O{e0EEEwAt9k(4mW31o4 zbJ>J^3nqcRTze}{zQ45`aJRGZ41U#%-((1Zw}coMcU^u@KbAnubzK7WarDs zQ}wqjnE3DG!Uu4svE0YhSR(`4$!6-jWFc^%_iVYYnr^}Do2Vi+3_K(-Wp(B?3;4q2 z$Q;9&50}sBuz#SvS8*EZKG+B`4L0-Bi0 zXVNBt`*Li?*RJyx%rxBt&H6)!&_yXYeEeF#ax1&B@ z&hxVb_r(4Caba)Zdm=jHmB2MQKOy^TmX8G^kg-tLodr(jJL{ey;E$7eSqBaNuZAG4 zF-U6=(i(-dh9RwSNNXU{8i}-qq9&!WNNX_C99WtoOLJ&xjxEi>r8&AZhnME~(i~u# zBTRFMX^t_?L8dv%G>4hyIMWx6+5errEmq=_@unURU&e9#c^9g$20p3FZQ{T`fM3C*#N zYP6>+`$dsk!0Ggn6%||tZc_21z%&KmEvZ)~_b(^jNKhHV=BCbUWQ{VJ-0TBz56#fya{!mY~~t}>@7+Y4ZtByKW*__ z2j7F|0cbGb{rov!XWhZ~@GAYa_9>(<=y6|_KTY%0*wN}$;}Q3ADCYAeq^#aL*zd@jv&4_Z}OYfz-b@;xILB!ZkWqe z;nFFS%o(xH{@dO7zVm{Jt|0E?4N-fg(F-ow@0(8J=YT`+b0|z?2hN-yDD&$lK;yx1 z%E$K$k>7?5*82*e5fCXL*rF>8j=zs-Rx3FES#CpV|IY9eO3w!PLsP){v;2}i^k=&j zo?ZS64UyH`hu4|%L*t`aP~;di86u26IQmY+U4{0mCmeyM$RzfXBW3Ujka&4;ssc0` ztPY4aJD~qmvVK;$9~udYak;$`NT0j>sTQ$+_E($V-$(vzhHIAHRENgHxQd!qWpJWD z#(a?lr+qj3Q3gSO@akwrhYvI!TJL2C+M#^*mw`bAO^pj)S%ov@5r5jIk__1IT)81L zD1-j4dfA~2HFHKZdXmaWT2bIX|v;3}NkH{-NR` z?9TRur6-0tP`ROXZtrIk7^M};xso(6OVc@Z-!&c%R zhXlyaNx6vjS>n3rOW_R;@iba^xg8n>gC8nIH^Bb$`Knd(<)Pug*?Z>d0}e@xFkWE_ zO_a!tx&G%Eyl;`2bR;xb);owV3&Z?JG=BMYHMFle6BBR!#C#`7cg48Fh|ksp^8xtC zbS!}e0@_nXSaani%%7GS)=ypmtmyh;_w+o>H~7Uh=A!=Zek?64#{6uG_PyGqwfG(; zWz_6I{52=e&HSQo&ipkhiT!jRdlelv;r^&!w{hufn=?_L8+DFtT7>qrdRx!{=7&ET zwx-meea9WRn>Un({01Sv$PZ6THs{kdtd(`c__sq7Bm2e{(MHTKa|&%k`jH;F7b@qZ z&td+z`qA24Xt+p=iB&B0;hf|EUne`25UFHRT-K8sN=9iR!aqHN@}G}fk#G}aEadmwsgFD(;1fYJkBAvUvu1nGto;ZtY+FA?j6hSRTXyBsP!6%kIo<39%?MfZ z-otHsQ6KfWr?%qzT^gn%JaHF?xcqt({Q;UJW(!mlR1R=RuSu?KE9%Frs#oy|aF^ng z@ZG4t8#z@Pi-NIt0}YrT-p78;-;ep-xW(dY@qIj3)@2v%!`{rdn^UGh)8a|S zl%xH4e#wkwMzYZSSornrqJ0QoRa+@C8U2re#m{BIE*v5y`ptC=<>ez&tg2v#`hGaN zM+^BqS8>lb(3(S@n;us@YHZFFuaf!t&I$9;G20XaBWPq4r$*f3;`^$tbmd!T&e&vB zO^>y}_Y*n3&)*rEBc088$8^x1-)04dxtcQ%m%0_VYr;qBLsJo-yE$_}D2V@_HV%9q zE@efyK?6phA^6|k%#laiipD`BxrN=X`n*ZhiiZ-@j*|Kb%wc*dKeFiKnBlFs0_q+LDy( z{7BFI*|IuuAIuq%?1=2~ ze^qT#ynY<(vFE>T*z00_SO+}AJ$*x#-@(v(O<0C)qZ!c1@ocW*=!f^ zr95Tx#X(pPw3Z&0xxgWXCwiXyVg4-38ZTc6JdkvM_p{TOZy$;)`~|F#`sl;47|c%{ zQ^|QefA@3b#~tS^m}Lw87-zw*o~?0d66Wi}a*Aobu)k7n=Pv=y-;`(Q{x5uDJ=1*w zce%vXdf$k{US#)sL%{?Kregg5f%~!O4<=+rd`N`m^sL_!E78Ar7o7`3`9D_opK=L!?UG-Xx6yxSO+II5i}lfOb?3J4C$avOs?B?rhw>>}!$_h2 zLVl#q+|5`pO=0^*=AeICU)gL}^~8e7(G zJ@$Odf=u9)s;rYjBe<*P$sP4HV0u4**3h9fc0%i}P2*zzb%?tqd(Rqh`A7ZLl`xPA6gTr-~6lHZj6uD z^>SkDF`sz7bqW}J%{S{m8 z)+eGb&|mJ*Wu>>6F>Y;3mn};~|13B5y$A2xquD!XiuN{E<)OerdNkK3H%cX;KNJ>q zvPSs61zSxgrJ}!&aW8OdGGm5~*+zyH=wGS#pZ~`1Hx=`X!EEu`4t#$}+pWU9QGSYM zM+!bgVz2qV^r_335A)SKDsM;obf0`!;vF<#cAO~MpN#kSYrkmi#Qa%5ZLXOh>~omf z!Pl5iXfD~j@DIM%{^c9G6fqzEFMj%ce_~3`yuf%n7^3F3YCXQsySh0?=A%C*bAnVi zVZPBNd(#DWsd0{wrAN>f%i|!*Ei6&WbW#n4-f-BJ~7o$9(O)nn9thzZ=dgj`F-31xd&ErQSu$#0+)g|JK2*<@ zGk#{ttP$rn2p)ki^%JQw4(?Faku7}Chkh%mIK#8O-I6h>HjK4~{j$omj@e_DOn2-V zr^%S-s7Fp+6(DNGw3M5_Du7)kzrxuIcRtMU{LC!*t?A@x#l#ggO^}3L;8xtDFWan`=g;e_ zzQF!pTXo^;*Ga)%IVt;mHaKlDUFN-7m0+dhr}``iW0T8R8RlG8H&`8C;2z{HrTNt(eVQ*Ctn7!5QKao6aJ< z-)f-c=nLSZ+e$NMw&TeQz+GOBFt(->9H2BKv9cg`m=Ld88cCSlEeT;}+ zF#~?F)j*Anb--b-Wd2g%gyZMdzt2YbZ8#wli}#OY$xLpzg?>beJEtD$TN^j4L*y>T zMgQ|fyI_~zEie{w4?e2pn-8mj^Y=7~du2axXXfGdKyU&#&&ZniZ{PTYK1b*`aGt1# zm+)=0x4R|6#Ymq;h3>>-~tHcD(W#?j^-D;ltPc!AI)K%NGJPxsZCm|4q z^a+KXkH1{VAr4;*3s>X)nmGfpD~fOi7s@NDLi(JiMC{!M`_J!P9}0oP15A#_A--?J zD{LD7X~0~Ra_&KT-uhM!kAag|bYI8S4e4#%#hCs+guYFEwZnu5XfKsn=d)pF_$xQW zJwkm6e!6%GSj$E&D+uedH9tItWr5H7@-4}Gf_YZO(^pn_zVK-Kl`rKSl79A#$@zoe zU~lt%v7nSgboLP8Yv3?GowieP*JF%xQJobR1JV9>uRF;Ahu3fH7!3etu->)j3*z7W z$nu3Lo@c%|@J+|M^nuv-8yw))?8kF7pJ82fVPxy#eb9L7D9fAi67#6%nTOYb^C&lK zifmF1=JCZ%mu$d!teC9Gx`;ClzdNrEH32uDuKf9>4(np^%SW~#e2nY%-RkwgjcY!u zfKyreeBFUHQ%{Vh2X*x0q@pp&v)|G$6KKs0^xNp0lQN$F6PHW+i!N9a%%lxgF z)7~0u=d_{zA;x-cC)&qS>`Q#%5b?{KYp?i$qkOHMN$Eg;rj*bpy&at2&Oae0u)BYE z%YN@;#q`U~4eag2ykK`v=4asj%FPZ>zhd3x9+Ptj&qvfKOP>0M`cV=b64?RHysy!N zrTys7dQ|FaVfVRoaQQRr8%$f}v6+SV4^LK%o%a*<*BhAq5dF#2oAEO;2e1ymAJ}oy z%L+cm>rEZ;{D)5)CPkzFnc~6O8-VAvEX>19fM2ycKl_F7rgv|8A4T|(^!eSQ!>G@5 zIi^<;pS^r?#~tj8(3(rMCKIjML~Ah-*9XMcp~%ld8>vM^VzENelze#|M_Jv zzAdy)SNCjWc$Q z&3adWudrq$o?8bEF%N2pi^T$4Wcq|Zg^b5r_Ul|FZ+&t>Uz zTl!p=KKG^1h3Ru+`dpblcc#y!>2quPT$?`krq9Lcb94G!oj!M`&*kZJd-`0TKKG~n z0%*Sh+OL52JD~j%Xuk#8uYvY^p#36fzX{r}g7&+h{W9qFIF{&(;gu5iZ(ZLtr!6Ti z!-fefG~c*LZJ)Y-3+|!(AAhcYJ}oBW@DKhZ@c$hA7T~9hpALQ^0ToQoAsePN zVb|KXE^P90oWtI0r)(If=i6+21K7m6A?iZJ1si5uz37yW1#D9KX2|V$vJKO`_T{Ga zm25KjKH!^enhmqrFyp-aOE&S}>>lk4{M>*07t+8c@=xq9tU&rS*s01(KeI_m-g4`x zbQ^}>x+`3ui%q^x?~XeFtoy##w{4hB{>#tO+gln3f3wM|1JeUK5#Rmo&obuo!GHca z?~pdqBXHFCpd|2^Q^hRJ!!}Hhj0sl@xa3KN=mcFG#yY0?Via)XR)sHnwQQK{+-GB6 zz+W13m00{X%(a^z&VL7P`Ea-O`Cn`3BJZD52E6LJjP}i5YbLxUd2ThZ?8MpwMs?PV zWWnWi5x{*Cvqg#?Tf?99@oEF$wc|D>L;=gjJd*eoqFV0)W-jlZoTUDqQ`82DQs z8yEfH^CN49<6x2QwunO{-OgzH15Xeg6LojQ9?b27>o&xo=M<@Twh2 zS8ko-kO~>$HQRt=VlLl%jXk(Eg0%rVflIPjv4WV}3mCrTKM9;KS0Sj3Gt=Xn_@@*D z+iB_S?f(Dq*Q5RUXn#N2ACUGJr2PqLe?!_Ik@i=l{TXS0N7^5f_LrpnDQSO8+8>kl z*QEV9X@5`JAC&eNrTs~1e^c5YmG)Pq{aI;$SK1$z_LrsoX=#63+8>wp*QNb=X@6hZ zADH$Rru~U&e`DGonf6zv{h4WhXWAc{_LrvpscC;}+8>+t*QWisX@76pADs3Vr~S!k ze{1ls^+8>|x*QfpYX@7s34?yz;Xg&eWH=y|lG+%+{ zGthhonh!zqC1^ec&9|WW7&Kpl=5x?|51J1`^F?Sr3C%a5`6x7Bh32!+d>5JzL-S>5 zJ`K&cq4_v8Ux()N(0m`74@C2YXg(3mH=_ASG+&A4Gtqn}nh!jpnn_d^eg8NAu-qJ{`@sqxpC=UytVV(R@Fe4@mO` zX+9y%H>CN9G+&YCGtzuVnh#0yC22k-&9|iam^5FL=5x|~Pnr+n&w;6d~BMpP4l^FzBkPWr}^SEpPc5K(|mNAuTJyXX}&wnho|}SG@qX4+tYk} zny*ju`DwmCtp`Bs1<-l|wB7)%M?mWp(0T^6-T|$LKow4N4z%6_ ztp`EtMbLT@wB7`*M?vdV(0UfM-UY3PLF;AEdK$Fe2Cc_I>vhn29{)%0gVF<`^+ISp z5n6AA)+3?yN@zV3TJMC`L!tFjXgw8LZ-v%lq4ip5Jr`Q_7q4j!bJs(={ht>n4^@3|l(p3~%A3H=tM zJ~8!+&_B@*lTQFP5AeRZ_5$=wyd+0{RuNJl!CWsm&t(Q?cQ~E|?oJ$8`UW_AO?%^J zgg<)w(nb?JAN)q5t)LwG495kki(tQy!QLwSgb?ZFK_Qc4xy;=VDYq>MFZOt5u>Dys zOab5)@%@vYlmIxQf9tM zFqbKuwnly_-dD85O~D%Y%**B4Ny*SXd!O?5AaLl~KZnPHp`S28H6at&SF+IXryn8O z?6a3*fiEk+vm4++Kj5|K`qjW;yff@K>d;lQn{|%`tSy^X9U(;sU+0!f9N?Whw{1%K z3mv#W&U3FG<}&lS6_o~~Y_i6+W%hdDcvBSMk`VS^+i!i=k_Yy!F3&1&Vv}(OSMPZ4<1(*9=6T6|WRn#&S9U1@ zC+d_+XTD{V!cGYbyS>o!k^I}f3j4dI+847uc5|74Z&3<%&)B#Z&bQ{$F0^MIr8m}3 z*yPMb-xtea?>yZ{S{XL6%)Z?7)*t#;n|o8oVgGo(i;D6Rgn#sXRj+$4o1}I+b(g{3 z?`6zYxyL55XDd$J-^pb-%0j%wup0?AZ5)REg@0~EK_;90jx;-U3D`BpHOC{3O|I_L zi(G{GZ1lp3n?dx6i%{w-ab#3q?~naT%|9%a$OV_}JGV&@l@y#Y8qYvucS z=h@^z*UuXqgtu49maaU@CLt}g;jM_@edDSvN>OZ*6eX9|w}Z=62IYolhp|bkq@qBh zAMURhII7)woJ}tJpLqOkJK8g`Kbv!cP4c4zkJS63e8>Ust08PM#^DvHY~wQHgxcoQ=@f*g(vjf>L!)gtYec+pOrnF5npXtRGQLS zHqrhWCn&FBlM_eg9hCvz{%(lbqp=d_uqiwT;!vDtP}bi-X@>aG!_ylw6smXwQ!I zdF-uRX3|d=z8*C;i40E9{osWDB(!01yegZRov4;51fE9fuO}*Nf*C&BZd z)qu<9iVnSkeY=!SXUSsZ|A*w;de~>3-MzC)5$V6+VC4zy8E7~5V;XcAEHv;N*W(u;2=+YJ~Tj159dFi&W zOAGIf2?x&D^7EDTm_D)1e!aUB&zm)Gp4L68Pws44|MM#B{IkBE`0-1hoSAgWLJoH3 z)A!ll!}_FfW!Pyqr1#9wtVQE~>XY=>jcU_@qn8M;83Q&b4~)L{Fa1v^rX+pSCq~lS zruG7N%;JAz)1yx+p4ru^BEAcj15;mh>w|ME`f~)&MVc#fi_QctTsS+{A@y)Ss)G;HY}@=>1ztxFl)3M~7m+Rx~NJ~_JDa{nrX zZ%p<~@@UW}a_6V-IRm@DOZbCZZ}f>-%P}nu@L{Nt@dT9fJ0!%o>bze5Owv zu3hEr0k#fO{*zy(Pd=F&UOJEVs+63E?!TBeEEBx>Hf4r$rP8c7SA^4uQZT*$KF;kz^RO~Ns z!}pfb^;)JTMW66G6-R9FJxd&!G1?A1Q9DlZHSDG>Gk#yaq)&XjJchZjFSowKcPCMw zgcR?Z^%wE&EUV0wyr@qW=o!A<3j5=}IY(`P(GnJHiC?y@w;KA#$M*TspMCX-&ibm3Fr;VS>J5Uq9{NQ2%=IVc z=ud+Tg-xG2=@ao?!>d2S9`C0kJjX$wcq#am#>0MQ`R=>9Hu_}MZtH~?frEATHEibU zlleARZSNy~fx+vC=4{d@0a|?*O7Q+NMFDZu4S4^D-Wf(nUmyDwzoni&>CTeA^DjS* znaiU6SLl<6a@zf~fZv672HB|S6NSWCxBvCWi$q>O5>nPDFW0FIR05Z)S541QK>g3$ zzo`KAWxuCGl@Hi|YVwXw;Ni^sFSHiulVW!d^#=(5H?lxj6Bue+GZKL%Hyv=&m(?d1 z_>aEg;eGF8`<=JR=#zpgX6CPfo%LD*!zJ~}M*gzFc;Fx5(9GN8Gr5-b9d^m^u;B=K!T%z>Ilovri=3_qVdUnP~ zyHk2(^iE^s?Hd?R>kAjP`s$IczdmNuZ)3iq!_Tj8uLq8fbl&PaT;}^?cV?-x9{I5) zCo2u%hYRLQ-`cN7z8MKGx}M5qQY9xDgoNplX;zwZB`>4CK3|OpKc+{-f)w?{lW;#C zXO+^0NIjC>(`|4UcH2ug-zJ9Zk<6Ei43;EgK6F#5ry1$t3z_mo9`UjAcj_C(0CyDV z`s4jQ3&p3Sr#(D+gm>1*g?-Xzu(JFa`yV=NZ zLi+KHGL+BvA8yUh^hjcFuEE6;%%@z2`&Q)Yk)0_q3IFso1)n!pOhI_rv5~?q2L0Dq z>Udq0Z{gY_S?191?Bm}wvf!{D`8eE{*ZC0X&)8uwuv?GV9TlkYEQG$~>#-ogy?SKL zN_(|mz(+mvUT65|5zjNzmGw{PX_)FZc+@zO*daG8Q{ zA}+>mdgL>UdXtJ9x~S>6{&7{2Bo-PYWI%p<;(QQSGO#8qUlV$cE7-mY`!BJGZ;N*0k8bFdOkZ(9E{{coCg00sO@?09l2l=- zgDkRJm0v0l`i^r#-v~Xrz#=Zh<<76aSWmIiOxC2a$YV?Cj1^OOjL!PJ=@w^L1a^`CA!Dt zF^jBy*JyA7@BhNL3UZ&7XWVA$&@5^A3l4GGArqg)LG=W6>)Av}!@z}HH zZQ?xUV8iYsu81$LZr)xq3Fz&Gzv)_E^t-altaK`+XBp?BI17STR-{)_@};1P#g zy$@KV)pdP<^=#-xR^8O#<+6z1P)&T*9O&5zY`ARuh(&7OZ_~|&T|cJja`YV*F^$&c z*5Uaz8ml}F@(^F}mi;fKc}%k=pWEpa7P-GC@0_qCkNKJ~`5r-jBebWD-!&KIW2^V~ zY9x!SH=P&w7x9I*zH@57z#=CCEi#VOgG3*3BBh>b$742bUlCtr~g{Y4oyD#P5XAut}jhilu zd5qdZ#i}-xN89$immDSNnU&3W(wW5~6~hDD8x(jMeXYujw8dN&X|@`7WQsEMvtAuk^mb*DrT3(SwyE-%V(l}ow(cx4H)z`g zmu1kCYMHrUa{!B^3m?sXxEy+-QwM)5`?E-v*#ggfuz#%FG-LBlPVi*3a!5EnHY+wwFhLiWcOxEKPWd z>+1>ET}II3`)@oLBT$Dw;S7&u>HiiDz>tA)bLM$Tu zvFfBe?DBOhU+ffS5%$_EZJ!LGr)O1veP}X^c%J?^?jF*&KUnT!`s{!8yWaUEaLwd` zc+PmFzxB@+8-zDJaO{9rmo6zR$!!kZz+=X>+Ggx- zmKNoa0K4P1UCZt`pg!)~g>8SQOG1o?rapJ%F>jv5doOF&CH)~y{q9cCvz_#ktJJ7V zZtMzvJpy}4)T@y1h;Pu3+OhbA)EqXV!@}QsC^3<>!>3L)H;kv6ek7@t) zt--TQmv|4zDA-y+Kk*!M%_CQrEX~TOa7KFSWp@cIzpqQ&pYIDf1ba(NdVb3-U6RzZ zx%0}u^v?3_o13ajcGngvoko21Q}2CWnygEF>Tb7lU>{Jt6_S5Nmwat<4}ETq{!o4M z+jaMJiGZURKR@cvbN{D1-iXhQ%o?w2$zxuS%iktO>yl}^L>k>}peMJ__0}pgU9#k$fcX3__`b<(&>^D=rquh;@@ZVvSk9Mky z*mY+e><1+eCZIiKKCvJIb~@M(eR}6#{iwY@DPOIkL$0kqzgipR&2DZEj+v%Ij>y(| zQ6f;HgyH+Q|xQ8(IpK?|fI^@CIg6y|XFh3}G5qZE{hXlUdcls#svp z6U0BS<|*`%Ih?Gg;W}ibEWF_g@D+}-*@+mm&-OsY^{)`W)9>fe?{$c>h~Um~Z=t{G zrPSi}U5Bi%Yxi3Oe5*>ZtnaT5F=>e!TF?l6)6f$&o)dJ*NQupbvG*wN!l~z`YoPyG zqNQ66J8^i{tF%U!@cHDrylCYyr_-kYy@v03#mWT-JU>FOb!O7Zdk4|qd|u;N&;tAY zsyR!K=n}8?B)KJR=-R?zu8R8n$i6&)X<{GA~7OCRkLPTGahpq8tV;N zWY(t#zNOQxnTYQW$>L@#6035cA!HWx9w+WT{2SwUPv`t1c}Z*L>EDW}aRDs&n-*~w z!S3KHW4QJ(=EtVC&L5?%8SyVei=Dz*#6P^(@wJpSll;&*bJBUt$D_VPwav9=+MdPe z8)mRby~1(+vW3=+`{wBWKc6r@C7hYGbFnou@xn<>)o#oe@9Rx{tz^yYXXjZ*iR+PO z{{D|z>ekHh4}xtuN?8A`{MbH0!qPDmWFX3EdaKbe5_$c?7T=a#Fj znXA9_JdCkkjH_92?9paxX5}S!le$nna*}=D%M|#X`J3nCuILf(k>ScJb8BYmo!8N& zSpW4$up%!4zZu<}Gb2rpn1|P>$8oKh?KWje##sN|urkR{v$1BZN2lAgKGY*tq2J%$ zwYFv?UPTBhl=Tinl~|t+#-;=~TQf!y zCMTUABR%i8pQs0Z{8hzx#|u3&`=L;{o-4}B(Q;KU><@U_HG>}3jLQne-E~-RelfH= z67PrjBh)&g5nrO%#_>`9){IZT^umI2Jwkr{Oy9Z7n$Z!9i`rYGM-110mrxG0W&(Tu zT67@%|3lW7fK%1IZ%Z;oB_%^CG$=w+8FJ1#W=e%~hze;&p-7=JB$7-iO;Us=G@wy- z<`6;>kz`795~6%}-~ZS5yT0}By1egoU(ecWt-bcyYp-XxpJz(O%|liP5icxbBXs@}a+tlJy@i+cTp( z3qG4-eyMYxB)_;~&$y;X>W4klC&$n0hOfD9&)5tfT^xt?e4ZtoA9lx{8PpqhvM3Dt z`cq$}v~0vn$A5U{zXSScX0~g@J$uF}wdih$gFe}CKYP*q2l#!yIa@Peq)+Up)gIY< z7r!Ut`_Wmt`h;DxO4kVaN0&vLmcQ4)dUob#zItlU{K&nPbo$qFaw*}Y(zNIHjB@vo z^3O5)#GPAyX%JXk+wf}4$a0ca6LB;U@!-ENawEQq>64b}S(OtS?HOKKH)AZUPc-Te z-E(ZUXLgNh79F0fPfQdXH|M;yXRJbFniA&d6DMXLTj8@kvxk+usR`px9ILu{;}?6T zE$rB`aeA<)T-_2b_1&I1k~+}ShWU=Q*Sx;qn>`b>Q(~={nLd#@?^(G8a)Wc#Q}^Ni zX67bt>f?7{f)gR2WEZq!-tokpZ_sZmF0 zz(fbMH1omUF;*mWlewY;qo#aX^mm3n@lkw#@y~1r=9hii%(g0hvh~M}QVkUcrq0*o z#`7M1viL=s$vIUAW~-NdQ_chy*-|!1X&K~wI{W)(%8!jg{^VWknQ5?x8J71q z&D3&WI`rSoEU;&h(!r<8Qx{`C1=}uccZNN2V_9Gl;`43JOHTIL%pz)QzVr&~I56JP zv)yFkSmdJH>!C$#2j;QgCaqq3oGYKJ1AFMoY>yp1hL~@t!)}F}uwRCn z@iUeVjC?^>>!)g9>(5)F);KVxS>58xD_JD*!k?9%)(*_z^04z`U$97COxnsBTn9#U zQNh`X&tc!YVzq3ny#q6w^LCGW6N|_-uJB*#;K1zbpI#&h`?8MhH;<4_4osMu``Xj6 zf3swVVp6s`Favz7o61t~cbw=bUE$@xunKmhF)D0A^D9RBi`w_Wo}x2}|Me(GpJXqR z`Y{vsF7|~#77y%TPnx76@gDZ0OU@l>udMWmh)U_9f)em3Ikt_ZMMjvOG;1{39=e`x(KGsXT-k7xZm z?V&34r~0}TQ7^_rf17`DTGO(%%yhvmPjjIU%?dek=RD@q@i1!b9NFb$aIKrv)uqUn zyxFJgo*4A~1>tgw3{ekZja<%poc9NpN1Ti2U_KjDqLiVZ^zU_%Ek*xM#cQo*;CmPU zA>)rZ3;JJMpY;lS@96Ote^r&@!}&4Q&Y?BeOpjC?T_?E`=eJhTg+cvG^hmvOrnVHY znY8JFKFBR|_OcG(yxzLp!|OB7-?PSxC4F8E`+>yfmRIq5L^D_DVjRw!WP-}UPicC@ zZ{FLM@i<>DxsusYgzvZd-ACtopwHd0eG{kwec<+jnqM!B)-o#3r(Y|q)+2LTcdjR< zYZ;jtg>x&P>yZf+LnB_e-&Rq%x(w(qidN6=^Hw3hx>R42;0ZlK&%e*K`;$4&r)O)n zPwB&XTH3W<`ZLbMdqoF+k7VnTPYx6K-ElvTi<6aPYju%7Ttv{y9_xRqM196bUD9nH zR&@h-ck;fvsOfs7u3dF;7uu_~%3sOGd1lMfGEEk6lvA;wyOtiwZBfrC1^(7{#p9Q> z9(f^61poFgbwc=^aGx&WQx=@@cl_J->vzRI!+FqZ(!xG#)Gt^k{=FIVi;-v>^9J`* zRVSpbi1XSKqo7v@pB-<&A_YPkKY(e^JlwZ`1yIfIA(r=F|s z|7A`gcQ94o?ccTk>c7$T;pqBtbbUFx{v2JOj;>#)lCFQYVcI(8h11@Q&zPHY@bvQK ztqx4>@{Qdufn!~lzO`rqFKUtA`%GZ&>05Qf_Z*nUyO}@V0vGgb$dSM6z!1R6@?N9I=c~^IW*QLV$#1gnx=DK+b_SLm> zJFbWWcfS{T=V0!@>?+rGQvkM|7jQ3O)E;^HQ;RKt@6>b^3>V=${hzg@^L+nX>wkH` zY2FddOQLy8G_Q%~J<+@14JA^*E$OVS-^~I%>O@!0lwm9SZKlvG8P{tRTdk=^ zyho~6O5;Ui9wX8P0{KJo{A96e}-<@CV%{>v{5T@?1M~cH^JZ2m{6*L9$4#Qh{vV_N| z`PC~~^C5m@N5pnfCmwVE?qCuJ*!O1F_SwKMYf^Up#V2(vokfCq%$h5ScP##}$Suno zs}hg#n9_t|sZ!wKcxA=7(>!Lg@IaKteO{A3 zzK6#cJ<-1r(8eOn*xq?=-8`m7WpUs9SMawt1Rl(6=P|2v?r@c!vPh58^42?ValaMw z6cY1Un5`l}gL-VZ(D|G#go+=TI4;@WRZt!EL3z+ZRI;{KL2nx#LmVv)>n z1&3e2mrbK}O-xxtG;-i(*LNNx+w=BgJKi1XZzsRc=lk!wq;$46s`nd@nWS;@Vudk_ zYzi0p;`f!uG}<5imIi&8+9BNEl%%* zWd=O#4MHBX$iAInd|AS_4BI7|e~m4R?8q+H37mr57!v(Ohqht9$8(bcWmesv9k&qAPdoNddo?hB&g8yt@@z7v>QDcx z>9&kPNx+4}9&B=TkMq2u8OS{&BI;0bhfV(N6=7PJw;q4TCT;s~-587Z)}MDs&6|Px zPvZ>K2>QFRVHv4~`M5@yH?6clyrRpeRVGrlOw6wJ{*HSI(OzC$bAKvw z-^|rtv-Kb$;g_#k2TLNCPSueEzb>Qh`X!s%I0@uR;%@R9C_??aS$lT1Pqbz3q?=rk zKs_o;PR`6hG33UIUtt|9gj_n`_A3wQ}M{sf45{FZEqXvNPygvhiw#*S@ z`x%S&ASdD@(G+D|pO=)qe~Rz_&J9NA4x@96(YeRy++=j_GCH>zo%@W=jYj8AqjRg# zx!2ydUa4ENg~wzWu;ZO~A;)TA-xh5b9_k3cxSg>Y{O61+qx6kDX8J7i!3M0)&fRsS zz?sL8@1Ba^4LD?Gcr4ckx!sBqy2i|&gX^=(MXoyVm=#lNwPyWY|E0+#HugNmB~<@r zGwL&W)eBudv6ja?GGFA~fqGBEa|_q{@OaGEhc;pLb*R62<#)wl8y<4@7FJX|CM4%f zm~fRf_S@^8Q$uo5M>xaLc3>5cF}QQ|RYVNxkLumO`wjTjp5gn>(WuXu7vCMTg2&7| z862PBfqIXHHjWWLOhYslu^60{QIYQqo>)2uZnf`Hl;)>~uA@x+NhE<#jP( z_#RBtj}6|6_e0s$y67I8$DC*4SndXdyx6FsXb&uPrAHFxWAf#+^_wz%eE;rd>kcj@ zWb>0Btl7xLXJ>DfU#^LGY4=rc#-P2%6AzO}#Mj!E%zvm3`OP#D_v7lgA2Y&M#P?+I zkolyYs)RI~s&B9ZcG@B}Rbv78`5V5}f7j(PZeKPY37v!bgn80x9339Bt0(ru7De*^ z&Ia+fTW+c<;C)}==J^eB{XmIb-)17-czkB`3_bildhNWg0UtcNr6vjerSop`IM^Q= zvK%~IA^&XlC$~!u_4nq>pWco6xeXfqSOENG-?P5GxWDSfPvl~yQIBzRsZJo~*AeR$ z&?!ZbBRj({8sA5Lg^FOasfY)EIyGPizUQ?|=B6*1gm~_V{SSZOdwMh>)F*j7AyWo+ z>tBTY-m#@}y~3z3=lOkT0JuEbEN`nI@*AvO^y{zu>a0DRbodFO&o?;ssPgfNoBcf} z4H>CS(|m8s?39c(>t2LdtN-j3|MLF*Jtw98{SW@p`zXE7()%#IPt*JO$scMxk=RH1 zrz95GBJOIRzsaOr?91(id-C?%;JcB@7W_5amND#CE8mB>ts-@qvkz2k8OKmr&t$}9 z*{GP$__)lLX>?zoFn1S=3?9=i4c9}?!3oX>@3K~roy%=$5jtZd< zHve~RH?7rNA-c_$i5APa@=PD!)u)%9KHG{Mcl(<%yx1&qBx3W*r;r;wIOJM}{@5#) zcuw4o9AqyKKj2!hNMOn<%^u*Gm!tb0u3!;wohLtvfmPLQ2kKWN?(+SLzEik9{*YV$ zTWj1unZ>;YyvN#qq!;V8Q)e$FM{?n5jxr`+@<7dhUxpHHb94Pg;Y!?1%s$N{(GM*lJS;}|bX)*ul% z=h8xWdey+H)8%>(AxE82?OC-NoHIV27wtKY9Cdp7vBd^yu=~yZveFasd3j?5?g2Lh zkNG6!W6QL({&IVabC-HTPnFzW@W&1%s zb4~eyyASZLz37O$jrpXXG~RRdA?%cneUc5p4zJ%XmdCm1*3gmf;=tT>Gk%8Uhu1+F|RS6xm3&nJ!`4l+=naW!y4qIcIOob$r22)yh7&KK`QIoMg~(?=<@36B*EMPO)W# zuJzP;059?RBqEz*%T#PjHa~k{?uikPg~0ji{9jB&+_dnikz-dnY?*bhk6d0S%qEfR9<1cOvt=^UriQ;m zT=vNBsZq+1KTS@o6+|4j)Yt01tZrMzthw^U%~@;`?|toCQ>QIcA|*EN7UIsee!tV) z+hxl%4UUs6TFS;jz%#xG;~lhb9eZD&P5$le79F}{w?nTxfBn$FDag&57yptngY^J#;;V)1a$N;U~zb9%KSu)F7ckt%C8 zVJ7a8%xkx0r1&Su)I&eNosbgy64=PNAaWP;kW0gZ*M%TtDZw^jQs@+smFh z)T;x}b6nL}xE*oq4>~68$9!i|bAxq1D03-Zz%ix;N!$Ti~}o~|}$QVg3+ znf}x0%`4=f%=tj%PO*uQ+>i8=O~?^@OW~m5S=bfcpR;+6`JVfBOZP7D+em@kvFQIx zjbqy~*a3c<27W&T{G?0#X8mRG7}nj~(~S0#>Z6jE($Ig*`s>CRKjiA4mcWZ_(&B#D zR0;h@U3mXQ5qOh@#+IH&Tc)?gcUgQon@rmxG^ZZ;>-X$~`|iSy@$RtmhL^UCaOljL zyBY8%EG`)20&jd?Q}7PBUdn6clNZRTIcu<9y?{+T7RC8W)!H)pa~50}%x9B;;FXIz zYw)}m_q{p<`5~20FRFnPL_B3@mEe85XP_U7>!0!KJ_#sflS^VxXB|TSdu5bk?257e z%Z|wFG(eAg>~Q^I1?&cVGBbCfJ?Gvj&Cf5{#PNZqoO_)u!@6uGH1L{D1Rq{~_yFVk zTwPZf0en#?O#1F~TW0*sw>q!tz+-w}_kBFZ|H@ZlasfOg>zdcKyQ}eiTcP;9`30M> z$9CTR3VHEUQ;CEYHhD2p6Q2q>yY18UuWf7+TX74E&sVkGTOox0g<3lop#O_4WA29lXHN*7{Sdgc`>MFO7|ww%Puh|T zY?<57+vYtJC*%gl^1=fK`=cGJd!YpCeBbInK!8uzvTU<(&YUqn=Sl1noTqx8E-3IURLuB|^fy(BFVd&gvsLN9&erDJNsRvd53g3UTgM zd0jep4DP?f^OkEha8Afk#T%IKjJ+Mf-*HYKWzF`Fe2kpK@^5w~qQ7IiC0geKhn>hx z^uf9Rutj9zAh4h2s5cH)4mSi-=ELy_j`!% z=fnNFQoyd=xu??~pucyFw+!q8o+hV!gzjPeC7l%$H3+#h>zLFPVD_Cpz8AnloS}wj zw3qlOId=v06xoIES_SfLnH1BkIVlTKr(UdS`WN7iZ_o?W2+ z+86VNreZ$F`#+6OUxMxa`a-?RJnYAnQ!4GW35jl=B&UVzRlg-}8r8w`H^%eF^S#|Z zP?ie$n4Py>7(Bnd0cxFkkjH*V?m3F}Hh8fr2d@WN%sFFn4tT6j>LM9K)J6B0u;nJ! zo1e4Jm}P>x@@>Wjo3Oq$+S0Wb(SF`3Zz@e+4q^P@gqVRHkh8?Z zM{WZ5oQj`{6y(IPK+t(So|jU(ov^Dh)>}MX;Wn_ag7UU*Q`DgzMZFHp&t33BVud-@ z>rtblB*t4=sekCqUptFl-J=ia?_naDR=Enl&y8ap3V;n(DDIgJJ#=5))RthZm;V~x zF;nQN!@gmU3o*aieq&Vy;OmapXH7u=2kPxkrQ`bH_)ann<5xsYNl1p?dh()!(RAGZ zmM=+qfh+MmOdIDr;{9Z0-+Ld0>zU?MldpI`GnFdMySdQoZmxY(hW?gBs-3vFo)GrH zjuZp@UWr%yv9C_hGcT~y^3Xn9Lt14%^l;hDqGn_9d;e7E^DJc}Awu(Rl*pq0IJ4z) zx7<*7S~mW%G}b#QyjN_6C-TG{J*(e{`K=k6;yev`<4!4d&H4)Y?-P9ccfqbC-{Dg< zAM-U2zL~HESZ2AgsUOChXfC>}F#z{dz)rOWE|~84hkqaHqOZ|1dkUOU%A3`?m z%AePY`KV1O=xW|Wh-R4Tm)qF?DiV{o3PHa8)WWRI&ukfmkqD(KU(~(UbAGx3?IkvQ z<%kD@C%lp~-vzi#u*EVfh>#wic@`GHUQhfzDuM}7kAK@M44jh2_dz2RdcX4b+21f< z#phGRC60i{d|WT}7_eY!eaiN5)QR^KE8B|o_#+W~@WL_dcMU#~i!q;?@KTxJDAY~& zkNtEQc=G(ccmBlUd0x;EbH@4Wqj*6)If3^hQ6jY%{XI20&{!S=J6e*e;cPsQGfDMQ zr{W0VHAwS^WBpzqZ??Y^4?BTlsA4CursDg^?(>8!Z@KMs2+#9;LGIQ4NrX%vBQxVP zet&{*#&pTRZZ}Kiao!%tbsN1OC!~TWj7_-%&qtOO+TsB`@qj>a0>)FbvXMkk2(IrH;iAl*SFj3E+Mz~*S7Wm@6I&b)cFAGH7aQE0Q0%h zant4IL+tl)yF(A*dfmCKR0H6B>ukF1(f@M8jmk4~u>Viid|HF?*Q;v8mfwQiD&RnD zBi3_Y&$^W=7+++UNBv|x|Lsv{!^WfiSCggQ53&A%_1i4__oL~3x{)|y8y8x_~^XIq1Bk-WbLWK$1yJx>UJv*O}LnfY8G5CF~ zGP!>*^a&xyBi-ZXVf-PV^^I{1A-$3tca1{6{$ban$C$s|W0!9!kekG|JTXQ8<=q#X zda5wr*s6dMjL!s0`yK-xTjeHOhxJg`cdK84`7g398##yZOXtpyKaT4s4H&IEj`29( zz2kC#du2Z~@)cq|7oMz_K>Inh#=F14E?cZM>-2Tt<$8e`qQzJ*>!`IS(ErNGrL3_(-njaY|&~E#QG5D(uz3n%s(nf-!8!XoxD|Q{_@Vp&rO}2hUXu% zP=9QD753-;iSh4%r~L?cd;B@-(nrgkn}hK#zI3oFdV%%YA@#TfSa)XAqMeP{{~ieZ zJ`Nnx((t{Y33&&WCvbhx|GYoe-2P_#p1QA2c!KqQW%IrLQ3D~jwriO>;`vX#w85yZ z9z6FwMwfQt_pHpdH}-u=NT!`Y`vG9txmt6q(BH_HqO!C2J;ixm4x08F-^+{PVt4R+ z(XcA|a}DzK(o?+*@cbNwFTc{lct0oJsc8Ui&fTPa3hk$GMf>LBeySI5kF07ZWKz?O z#U+q?9~JUyc}GaoEz55V`m?=O@?{IKuilmj4t}qTSl4`ScEAp*{HVJO^WoP%v;Huy zpSS8np*r^O#(@(5XKx94IX-wa2m5{0NrR$d$a{w}$q2rOv*Z_~9L4pkmI{n+g8X}b z=o{lMe6P=*Kiq`tClqS$KiUI3ynX2V5ZvE-n_~+{J|NGg@o$SSz)^R!g!sPT_cFf1 zTm$X5IGx`7dI0COn)@0}n4ey}@u{K_ zKGXoI@yz&x>-B5ywW|HW{*Yavd;#C1U+Y%OJp>ohd8`{C(OTg@Iu_zas44n6GWJ z^U={?u=|}l(#GI>E_fir^cCb21xMNr1DCqIHdy@|_xJW(sypttcY#-tHO6=HYjd## zetcs_tJ@e3kx!py(C`Q~3|!Y23J4(Q{^kBN4{`ped$sha&bYrdBqHzcxogYF?642{ zB8FOh+iIWi<2+I@-eSW7apb_i$PHhO>y_+G926&^cF)C?JH}!BnF`aZen_Gg)5|?` zeQ^HCo)c7AGL1vr2K>)xr8-2e)w^e;~hOJ8-i}^icRs)P!JWe!YkGzij({zs|yW z$?AkhrZQ?2)YOkFg1jfpdfsU@4)Q$o7Ow>ko%J9zO&v7?tgrcvxnav#T7`sd)Zmcq z%XIdixsLtp%Zb%tXkR~FdhaaAcct5Y{*C^UcT4s8L$0nku`U<*%{vpG+%?n)*-)JmPa+U($cWM)Hhf?Tg)M=G7m~lPR04>z^cz4%TSkIwDMNb zWxW4_(=6ms8_w)(x@hCk~YuuN(`f>@{iyeJ`PSuD* zil=GXxFln}<`~Y3HbzYiJx%L@OR%?UTE12^MXg373x^@JcMVR8bF)MZ7q3kYDd^9$ zQZvSB4TroPZXcQgoMrEKJHfl$4N)6@cc&?i^pBGWkiOpL?-S5&a$huv%gDI@DJCvGeCd zj3<5RPoL@*4srg}YsKLHR&Pd)oNXMUbKz{1Fz^GR01X9Ty$RBpQs~dy;mE$}XrI+S z=F7(<*i*+{KP2pe8bOn0-=2o{iFv!*o;jfgj@zz@7HF^TH$2;81BZCa4T;O4eY@l6 zTruE-r_;JS&*Quvkk{Gni1l2V_WmL81XL7?g@Lo9?tYH|&T_B4)O-f#tp(NA z*W5VdaMt92=98GOR_%!e@SEKnt-XIO9^YGw_UuM?)ILio(_VD~`tQ`q+M<|`xsr!9Vf_DRp0a|hz^Je8*}9L4+$@(rvdV{tytnmDNle#cMKG_4||@%_;%xZdJ} z8bdZN&jWzjCqB<#w2yJrT+PJCoi9oa9}-b$zyd> z3{i*iU!6XFwAt_hmP@~ePpx`jrOt8J~-M(NBi(-A0O=lqm-g|}K498MO#6^&A2aQP zrhU}351aOJ(>`$8M^5|DX&*c7gQtD;v=5*5@zXH?bc_HULqNwE&@l*fi~=3QK*u=H zF%Wc&1RX>1?|uAN-;S<-N7u)r>*vw+_2~M0bbUU$eji=mkFNix{41&|T8PKAb_+HK z9{k^RIqABcbX`xn?k8Orv|D4OWGCu`sx4#-58+$nSmQQ+ODUJhOAypt?8hN#3tov7 z1NY2)aQqMO3#*@>=ag}o7fUvORR-=5m?f?cdE=CWJrm>(0Kf z{PX-7@BtsO=Q5BJVe7sRPf-VUcfe0BaPZV(AH{MmlcW%F#s}Zd^^tpCUa8c|eNACom&j{1yo0RfGVxlGk) zbjxqx=oLTQKi=mum#$YQsp;da+Uhhe>MobL*4bVfql>eU>zQ-oa&dnLYLC@wp{}rA zf_8WgmkCbt*?kbY#rz)=>hte#nWeAaGzF=k?)CN$$7FA#?roLtIrTX>o0)nn=(&mK z5oQ(q6S~W_-vUQNfG-*(++-g}4(Kv4_It@5@qRhL8(8ccE zEi^E^_P1{DxTIF-etVytNI8{;=kclG=Pu}u6Ti(?m7A6? zJYgJk;R=g1g#r|+fY~7`B#MlKXmgk^Gw_QPjQ)! z7(u1az0mc4C9Z#eg3Bz?bHCOF-QKpY-|slE+Gn%ch0y(zMI7&m#B!O$)DJ)WVJlFW zQWPf~jXK4dMxq;FJF#D`8raQ#+FiV0%x}w=YGV zX0cn}cGoalE>(%yi znX6}>PIwPo@X;toejk^)^VMp%Njf2FVpWn`eDVCn<7WW;3wyN7jP8lZ5nF z7hAY^aGCZ`u3n*Wgv6ydzMs64%c$I#>h~!6Z;V>t7};%jeuJM6v00zB5du)j>?{C0WJ$IxzXT<_4+hT=Mvh#P1>in z0=V#|NNzgtuB%rgS^Eh&&*#@4=7{~l)8xXt-SDj(I5N3#1L|-)nHTl|>$_FG_z3x+ z{Dul8PxxwLWOLR+UiD(EScC^`Pw%|?lz{EF|B;k)$NN)Lw&waeE|ac5wDgf1Vp#Kh z)D(gHj9UGDU14i`*`*!gbWG>MM^>LeLFw6&k?_Oc8ORoaE(kcD(MigKaovjd-j**1M~>*U|>>)7A3NPLN*^xHcfU2ELBMUd!aQ zT&7T@yW4mrVhZ1xee|?Lo$vm&QL9%FaxcD0Y#;Di$J;9pS`ZST`MUC;EthGENSkmA zc%cNJ`cod4(K{evt7wjx$uAEh6m78ooHTlK-VDCKz8NY}R``7g%vTmL#rym6_Vu~I z?j!Kc8R30AcQ{&pHR_0~7ad43AVhlUC+`C*xy+jD4|DhG;rhEz&Q$=9PMH6xatUe@ zU0D{eeFc|MpWK)fvWO6|a_=X5fu-dpho4eMj@buam8Mv5nZUZqjq6mfegj39rkHaX zRf|dSXH^L?R_T1_X~t#pKaABsv49Z%()ruJnqq&Eo;-Dj5+O@gyt!KNgBZsN)GLc$E;5;?c%O4P9eXt?lx)1X+ z3vb5IvpX*f{e3WS*OFSrW$qW>Y`olo`PO-8AH{sUP6#(YLwgU$uQlRW zA7wu?vA`xa@zPS9f5?E#NW@1yF0Dl@ihbjZ9z(oe6|bgDc-F%iz8J6N*WzndLCEnbZmnmJ`&anz?Bo$&9hA>OAC;yMfSR-;B&%l*%Wc;BRQez>1A#PgUu z$*LXi*Qt!Jza$VVw6Nl2JHh*5uJTd(ya{q@@(O1*1NT_FnJKWqF-cX}?t=E+-(AfV z=CX;w%JDOvV?T_KDYzJ`fLM`P60Z`0Z*cbs|K-$f&Kvj14f~a*XYUy8@odr*ptRq} z=C~biu(+$fX|-`FwoBB4W*kLG_SZ4f4z0y2m2p4txl#qGy_ub+x6N%vW#cSdYnHt;uQMuYQR77=P&->9)2`?aS>3=gq=m5zVb z%J1MZI|DaMrXuz)P*^we%}$&r+p~_;Al6Ukndr?q9{Ap_=~Is?M9#u#x;6*@evb|Z zMhh0R$b7TJQ#ts4S*5cU4HU3Qr_O^j+xFo5Hkti1ADojZ@gI%e;QP8}8(g2@IB%TY z5OxgTYsXS80Wrk-{n;K`I>is)_vZy4*oX~`9;>%%8_olhf&<3z*CBTC+<{$xfGyWa zmJ8J)XP~G3gop!N=HNI7rP2!IyfjXZt2xMJwmH=a8> zvdH0K&vs5U_Afo=kr+5QtXVxlb0E*06w<2_i}?x|-*Y;P?_2N^*YXp{dHQ=6|K&t3 zqZF#HpmGwilHbRvu1@7LwXT~MSDwQCDtF7o-rzFA+ocqOPGdf*#a;W$uwS21(w&4@ z*5uaae6u>7$Ay;smQQApk=(&IZNTFm@5tT=Y}Bxz^j8{%E|^{=#KM zw#@lC^(xkX>z|+F`FKqKCMAKdH&`S%ZSL3x0Ui@F{DyTGv8;ASwiXLc<}pw5uUY5c zM9$JDo&7(j@fiEd8%1v3!gvzXcIz$UF|M`^Dqe_XT|aLcj|W_~Sn1Y#;N8sz4_|46 zlVrL*+ylSo7aBi@;}Ik8lgG~|hUZ_+L=$Vo$nSI-xt~$OBKMUiD`o+AoF8b~_l!k8 z=btS2D{qup)m`1lBI3u(jtDFVr)gc=N4ZXLfHWtq6hKV9bMS%RJj9ZwEs_7U3RpX3 zLe6cx-v^fDbNLZdKlPr|jqG2D9j7>EGzX35sL>oYn&U=u;AoB<&7q??b~Fc%=IGHJ zKAPi4a{y_MAk879IfgU`k>)7U_S5JJqdDWTAAZ=ss%tZ1eNBT`x};(MKT%(2xPK6_ z!yj;7!Tx_C;ifC^D~o7vKIy#x=fwo`z_M-F|4Pi1{IalL+c@iOy8H+^Pb(%RPX?Yd zEZnd28k{^Ct9eVEq5s`Ivi<&B>_^mie@(!Ad_qlsZNdL+0eRAK-S>qF*_p7QaB8Xz zbk};f>t|8F5N!tW_xE2P{^+wkefFny16p^WbqiYepmh^kccFC~TKA!KBU*Q&bt_u; zqIEM`ccXPXTKA)MLt1yFbxT_Jq;*qTccpb(TKA=OV_J8nb!%Gprgd{#cc*oGTKA`I z1GMdcwk^=M2ii74+b(F^25tMGZ6mbpgto2FwintqL)&g>+YW8}p>0F7?TEH5(Y7br zHbvX6XxkQT`=V`QwC#+xtwhhy^W7@V%+n#CLG;O=4ZQHc%o3@S9wsYFH zPTSsT+k9x><*7albkXqrMIJ%U3D z7wwstgtL~3$`J3?F%A)OD_9{2Jjp2URCpxfTv80mJFjz@t(%9xMMrZ;#JOiBZCANW zu7=0bRlur>yrzAyeV$4?#G8A9L&gS$NW@-(?d?gPZzZt5(2dB~IO}nxeofOs{avq* z&87n9p$m-L|KsjS4#`ezU_PGyYkU2l?^o@@t}z7(uvMI+UdMCDr1|?gQlT6A$vJQS1#J4r*lRX)UF%IlqdRcF+g>wjvw+0}A8qZ8 zLEO(6^P5#1*kT5TvxASr7oga4Tg?da?Lul{;T+PrNLnkx0J?$VsfVS9Vf(Hcy1qe= z%Y57wao`u$Uy0$r!q&mrL%mh`8lK-i@9>T%OSnu*_2UtfK-j8V#O@hsa+%F`h0I^y ziI5m{t;JlX+(}*AAHIhYt&-l4N?c}@X|25@aPrdF)@M_=%yRqAg{y&8ghKmY|FU7G zFB2W!1>c0)-eSdfA8Z)4yclL0aHs8UCcD>$@qR987KFHrt}nY<7PQzfep+s7>QIUQ z-mi!wRbwA-Ot)cJU6aDAVGB1_Z0$UR^UR8aBOwH~@SK59PPw@@%}y+=khbZtpM-wpW`&>=_>`ZJ2qVF3E%>z&3s7`YrN<9@99RR{0qXc9Xr758&%cJHEjcz-*>J_Sw1{8Dzyyg0NB zw%z#=>(yZ!4NUy)eMXDRyh)tyXR(-&v*Ct!e=LG+t~WlYU?$?~E4zZ`U_bbG{-PC>ZO^oAnzmijwr$$> zP20w4+c|Apr)}@FZJxH>)3$xu_D}l;Xx{o-*48@PG?&zoI%UvKa5d$JX_fBJf;;OcyQq8I<_ z?#qtdo*k5K$0R)|Oj#L3$WpFN`05?tY#4IArm+7fznay5;<)lE&e%sL+A)(Zz{f4a zA}%d zX7$Q#vc`8fg!AOi)nW9P>(Qq+u@>L)Gpk}`lK$2KI`PWtEn+yoK68Bo^6y@@uhLaWWf>4r;}8Anp{OG< z+}{2Qxcc&w^KVfn#Zjl+>K3qOTB43B>il@^xb!0zSb4U>#j!lp-RXC3XMjgpyVeWY z*fC_<>C`IVW5pLUjx9x!m|$Dz$-guJ4t**yyujtn}6qL?(wAL_iONBW9Da- z0FQcmT-ep#n_`)lCH(hYfWh}WN#8Ds7XtTFTBD}TgKPh#1yF9LsAA}p(ic;BC6 zr`P`NFYfrl;tf&%eU_IPnOBKN5r2HPCc7WBBL9BermrE5Uc4OcG-4g()2 z`ot1BVDUgnpU=Ps7Ht=#mtwsORI>enWm~@)HY_$E?x}0lltRIm>NQP&qiH}6E4m-WXBhc4BlbnUpTA~l{Vjih$$Id(|~-z-upwo za}7wa=!qT6ANWMJ=e63cv^d@5ddt3PQ5Fg8$nxWP>!GLJ#R2^1BeC=y->x8`V1|;{WXOzL| zzx-{Zi5Y)5q-jkPGY9P-q$@P5A>X$=zb#qaaTT8q** zh&j5wz)a5%{H~3u!k5~h7ig8Mya}*nmZ>`mjka(|gOGI87vQ32(yNy=BSxmo)FRm5 zmRZbqcTZdchfHoU?o`_kd6lKlP#y4;i$TulKYojc&mzb@Z`U4bMZSl&*r)y7FF0gV zeru8gaGALB@-x_fva4|+tZ6mp2}ULu-? z`Ik%VR`v$Iaw2YZ0G$ybha|60I0imhy#Gu# z`n!-_AexW&lUt@Ita*z=j$@i*&l}^*`tXlN0{@V)o8Ex=nVgmC8IShURvw*k67rFZ8RCsMIV8|Ec}x-J zcRuXYisnoXxiKPk$r`wNlYs{N#$SF-P+DdZ%ScI){Yp zEV|kXOzibX7GCC%aX}96^RQpq{&*WweThSIBE}d!#r118nIxA1$CkJk6k~j`jaK|8 zF5>x5-{a_p`OOaP>Ciui=jHVAz-{1Kub9XrRdy(TFCv)RJ{rweqxoz! z-;L(O(R?|YPe=3ZXg(gz*Q5D-G~bWr1JZm!nomgc4QW0i%~z!Pj5Obo=0nnaNt#bd z^DSvUCe7ER`J6Q0ljeicd{LTDO7l%=J}S*urTMHh-<9UW(tKH(PfPP{X+AE^*QNQq zG~buz1JitAnomsgjcGnI%~z)R%rxJb=0nqbX_`+>^Q~z!aK03`;r}^wO-<{^e(|mcFPfzph%PBrS&DW>-{50R6&Idr}3!w7}(D?@Fd<1m9 z0y>`oo$r9ohd}2`pz|rv`4;GW40OH*ODdlOo$rCp2SMkHpz}%4`6lRm6m-4{I-dod z?}E;ULFdb$^J&oeHt2jDbiNKcp9h`qgU$y+=L@0piO~5*=zJuj_tW=S*1*1DzxQSy zY)_VN<&QWvgHLOCr|BW^E6Gq%sTT0FU(R}14s7>(oclz9|Jte+`0_9AUTDW~x0jU9 z3r0+^%e8`Z7V7SL?X0l{zA;7T_uEydOZv^M!w-0HaGTUsN7Qj;^?N=6{;||ryU-bR zn=>ts-3ONVGOqUp>dMcN%c$r9Zf#C^bs*S|Ibt##I0~GnvURRKaNKjPWfzi>&n4-` zgVn%~B#aeROaJEoNVxDV1a;q6^3VF4Ph#D+yYHf4f8xsM4i}WeZ=rI?hQy;+R@mJ>pd;r z+6DtZQ)WqfVmuo~?EuMW)U}^0xUK^@a@U@H=23Rc<2?E2BWT}#)=5qi@@uL6K8+Rd zYdBnWJ{fDr{9dp4trzW$4t0*%2^{!i$(lyAKQX~+W-IWztYXzYXn!ZAR6*$k>Y`>A zg)fJELgCcl4Z!Z3)1oYKy{y*3Q1O#?Om?WuR%hJL^)UX*yad?WuCh1DmmmiIR+8oF zGpN)2a@xCxX#X*BMD-=)nZ{i$Yta7TH0Ihv;2&~5P7xT-#?LLw1a*f!U8P6*fosja zPFRHN*DVWIJdgG|!55;0&*J_nr?V&rHSk+~QLy|C`{a?|r4D+7kW=|wjpX=O)bM;8 zud{`NT!#Po?FgWa%HKdJ{Nu;}Me!$tKe{$GUAvmDZB5s{rfXxDuaa?RB~~J6*e-uI*0O zey3~0)3xL2+VXVmdAc?|UAvyHZ69L9+i$oTwab%tT-c5{%!3vwKR2P)z5J6(Pcy{r zS*#Mt=?1n6SKUkAAP)NAc5&@q(gAUu2BrQnX}}%j!YLQNp!R%S zi^gPJAEP2_Jlc;qTAlFNt-x^uQuD?F$B7*0&O(1o$9Fk5#3kN#UeTY5@yAGgINFCC zqU$p2eBMKD-L^*J<8Ky`74?lj2KiOz{#yG{#CfZ}UAhj~=*Yb*>&7DIW{k{>1;E$+ zUhKXhz$Oy~#?=bq`rro%uGb+S$nBc6AA9=a_iJ7GMcL%5sYc8d%-5}dQCTu_cv=K) zvXc2b|ATX8EI?fPwa$3&7npBPis2hYcT4qUi|j5 z-{HT0v=4FfrG3(`8i1uv`0TDwW|QKXVgt2k?;)f6z5ulu>`q47ZG?QEKtiVma)??V zt(^7mzylDkf6%D{>}35&XrVEiEbAuSdx6!?i|MC>H?XR6;dYnpc1(S7bov6|C5ziv za(AHp3y}f`3-B}~CV%8}wPWsEm^yfvmYA(nj?e zTsCpd5puhX{^qVLl+1TR4$sZqU#)O`u3EY7Mqm>Og+;-@8&!EF0$bT6vEx|-zdOG1 zQ?`r~*v=+tCzj=S1HXRTkss>HCekAO_A$VLF&}>)+sP)KZ--z0!g%b=RjNxo!CO%} zY4-(kIb};5^WAKcF}e6v4zTe%_sd5i`jWXPC#4uwRN6xU1|WXw=hN<=6XiT=B_e&6p}|NDJwUDjUb+%xR6&p!K{ zv+rm7L97DTwCo6XXn(8Q=R3ae9f+Kt9j^z(lCMM8sTb4`g+ohp8_>LVV;}QE)R1WA z&oxHKul_~#!rM?a^jF=qLB`Dlqi6h3trxC_9>p_U9)k2Bbit(<=ydO>*3t8@hNCk{ zHBoBl{jP(X-#~eR%VEc7BGph%yhzESrwP^_S{GA!O$~*=8;E%iHp)3&4I7DgHB>+Q z{YA2xk!$qA%Cg8iBs*mz}0!=XYXPp(DZ(+XX zK1xaeT7{&`%=l3aeT(Gc5ezoLw(XhTc^$qZ`WZh!Z3zN-tO?VY|Ez{=*EV*4g7i0y z`upAh?Yg+roHrC=(}y}-v7Q5r;oL9vu|PkF3%YUwzMIl=c*lS0k_ncnFQe!1TMfCb z*$Ec|z1O6x`@}k67SGt^s$Vg|yu94Vg&PoC#*(L21?b~KCC{$!hVN4Qx1Z7k`kHB? z+Aw@ac3dqfHwEZC=~2sNQ~0j#|Lh9)U5`mWJpR`v#oMfSn-*{L;%#EQ&5XCH@isT! zCdb?Cc$*$?^W!lJ@fe19j6*yIA|4|VkD-XiSj1y6;xQWW7>;<1M?3~39wQQuA&JMB z#A8t6F)HyGmUxUyJO<|fU}WMjH1QalcnnTFMkgM_6OZwU#{k7+gyJzo@ff3c3{pHs zDIUWVk8z5}K*eLE;xSb57^`>;Ry;;49>W!n@ruWQ#bd?zJVq@Z!xoQm zi^sslW8~s7bnzIwcnn@VMlT-27mx9a#{kA-1miJ;@fgE+3}QS+F&@Jhk8zC0K*nPv z<1v)+7|VDJW;{kS9>W=r@r=iS#$!a|F{JSr(|8POJVrGh!y1oqjmN;oV`SqowDB0* zcnoelMmHYA8;|jg#{kD;gyS*9@fhQH401e1IUd6tk8zI2K*wXG<1y6n80&Zpc05M= zIu64fkMWMjfX8FR<1ys%81r}xdOSuw9>X4wagWEq$7AHj}d^6A%KrDfR90dk5Pb+VStZufRBNIkCA|np@5IEfRDj|kI{gS;ee0vfR6!z zj}d{7A%TxEfsa9fk5Pe-VS$fvfscWKkCB0op@EOFfses~kI{jT;en6wfsX-#j}d~8 zA%c%Ff{#Ihk5Ph;VS8Vo zX(%yeETf8yws10}fsb5b?Y@PB5UVl5Z{wpK_<)V8GYfEQsiKU}zc&WK2dMOI;u&A~ z&b|FWGq)-DC~BL_Y>lv0ML+s1_&Nb&v-IxbM3*j{#|`bcED0E)%~~;~DTm;E%+hU@ z0vMsP=joFZc>zbE;Ye8-V0tR*Uw`zT4Pr6{Z%*0^K2^7$e_6e?4ZicuBoB;!UjKi&8TorvvWUPn>pT#yIC#ngg&(!Rwn*<-l zv+<#ny{wS_n#-9AK6i{et`Z+_h8R)j6+5r}^9j^!Z;kv;Kr-1y{VkCH(@O9wi4_9U z7n*pO478R$*}Ux|0TrK6j62{6n50cr;Zl^uYRW; z5vNiie*o0nkDE@lI)<|S}mlHUN<>-4q}!F^Csq26Oj3z#)?)O7@x#6 ziG8;T=&M>7^BpKZw^T)BynujSzV#f`JPSUHuW~RFq+jdx7uItz#w7DXi9c!xNGaja ziA&(qDaoO6=PYO_sWdh8VevGdg5VU zYoHewii_LN642IZDt#94yKb^UXTpJiD!+WNOr%17HLq?>69S4!q38NW0gT=D*E`$I z2q++M^x<=$({@cL6HXJ*{{DnJHmAYoF8hsy1*C`Bc~4Qa0h8EkEzSQV0d;p;wcjHd zW6Q%SJ5=QfXzU32kH`t|8Gg;5b3~DV8Xr3BJ4pl|zc+ra!{UVh-qY;5=XAYnKLH&I zd8t&b0X~Q0*3SpH3FyX$Zq40Le%fozg2Funr2RfqLlnxl?cvEokgigH>)M!(F=pyB z+f&R6{I9BRCjjljWx=GyKmg3;nWN|R0MlIY{4#?u0WlAkzY2o>b-(qA$Pkbj%ZOYf@T=Kx&oPV-%4cmJ zo&bLS<}r4t-VOA{N&#^w?@@|Zu_wHL^0Z;(6v+D$#yezloPaWoJ*Dk{=6TPy=&ug) z*NM0Rc7jY1FHD0DqK|EMHj@P{GaGkTjUjEqS3cms2BFQ#~h` zt~-{14lqic>AnV-;sfW#ORf3v7E*rCWM}tpn(M|K;g>D42{U&45>}807 zGvXlh4El4RTR15h()ml9&iw*;lNvgEPXOMhGnPNGQ;&cW0=RbXhxYy6)}Cew{78Ku z^J5M4x!b!_re8t+Sij}yw?O_j)vKc?K>iAio$G!>`qpQmhLX}SpPf}plj6W<`{j>^ zCt&=1+Bqwx5+G)a8^4U!Ap%mnZu~h0)`Rmd4Ugnuyfixcq+Wx5FX|HO^dJ%tDXXbc z27EeqhD;lese-(mIW(k}3O>r;1Cs?Q1oSIbK+rlJ#{0fQ+(Rt_T6kI!!khsZ;W5^d zK86I;`AOx(U>3w+xfxcbXih-vb!p+DfMHEbZrS}(mw z=k!{QLh$}6*-86{fGJ<~HEgT`-fxTX7Y}&|KE0gHKGN{Mu`2(L&yOHx8)H<~4M-<; zB-jexH^wGn%_fo*2uQL1frvfOG{&T7PoRAjPb(zd>tX#Ex_2vFlYo9~7G_^;08D?+ zacmLvQB+^G217ge6pz=4OIQ++`!7D7=nj~FnsfXvz+cv(#-WPm5M!h5dD`qb&_Bb~ zA-%m28+iHoO=<`MEfwCmTQ>$V9~dH;{Xl+u`l1!tKfwIa@4G1g^4sIL=Eny#{osdG zyCMR*B)m9zXa?+qey)*GpdEglX>yzfd!kSueQG11D;P0N?H9y!2rJ6$tq1+}Bu&v_ z#Ta|`(Bl?2q?2b>EzDR~`oZNsIXLc4HX7K)w8A+Z__+~{#A?^)F1LzYLqhv#% zJ;fUPwse608qyu>+z$6OZcJkqtps$O{}_8ey$M#s`~9Q?tPd8(`(&6n;XIrx!=)%1 z*2ga=StrHdzR&j3@v2l9&%g@8G8yi}Y?u7kpnac)=lPDo{Rs67gEne`@m=4-s1NrC z$8L}4^+A7I?lk>PgY$gTod*`pUlPz`<+q1Q;66gjtFESD1oWFw^jI0(pR|TG2ybF%mrHXFSh4q;NJ(jX$=Ozd-FWuKTv*G?{ z7x{v%s*oybI_G$#9_Wp@hHc08f&IW<;5h^LM=RwIUdjXA!5v6$0Q!vI=R=#sRMD%G zGRxhNzw=1X&N86IvU%?PyMH>s(%;M|t%@2&jxX^*eWMXGq2^#8&Q&NXSR zC5XBC?D(#q{7 z=)+_Bi9seHFFZpfq4zEj{--{Tp?;~?gCGw-Ze5Cm`7p~PrLEi#G2X}yX7u{7USFq< zTzmoF(9~Gi7utfn_i*qCK7*JkiEHYIVZDyvuH!Iy3O+n9D8>_@A2|i9jDG;#sv1O} zs}1X6$3`j>jwL; zuPjnolYpK`#rXz8d50A2hr2=l9lU&_H>V3?7)`(Oj|TnJajfx>JkXjca`^&E1T@$f zUsV1IVr)E4+QjrvpW^x_t5nS13-dpNbD7Qv-amNX!*ow0#C%y|{kckj{Ev-fnV!Hm zJ{J^tra_-NC+7Wn0qL`EncEtneaD*&QW}94*dp>P^BYMRuYT#qk*`qyNM>)muq&9quOv=>0xVc*e#oD+!^&AEqJ)`vH7+hv3`oQtQ4WmtQNRcb2Cls^Hu1Xaz4xq5I0{&vOBt z3HUZVPS@arTAVUEVbr2*%m!Eyq0i@=G?bBoNV1Q_cEC1B3EdoZNEu=*?i!}D!nfp4 zOHx^nE2AiL8@nj@);zDe+^mbBjC?Z|_Rn$|VIJB-!f|TKsOs*tYAU3kI9L%EBCm`h z>&}>n!~3RJe+AATRYry(4^9Oz18(RP&&n5mW%T>(*08QEfP<|1aN2xT35~hycTl$i zZtGgsIbERt!}B(NX%X&cN@(SAU9k(~7n?Kkk2O?6shcm}kY)m$vn}C$YQGf`XB5j5 z3uwRdx%r?QEsE&6zbe})PQZ3}y8c2aTM=FItW6T)2Asto_QjqdipV~YC&3WXM~{7H zuRW`X_MSZ;Q~}?{-zj;RzN)5(I=19q{t9%)hZ{P_jwqr;?a@Vj_!fTj)?lc)ydt`v zE>Lhz3UCCq_<7gG6wx)UO@o6le(d^MQQ?Oa5vhNv-A5I$Dmcue97Pq;?-|#h*??nN zd+D2m`5r~|CABZ(f{qb(h9_;mxP&6Qa-Ltf6>t?D`M(nfH5Jjg{|TPI+JIwqKDk2B zKoNaBbL!j^WQ3{ZMh#Cv{g#0t`EOJJ$1ddkg^{C*NQEwXG)c(_TaCU<6NmQOPYmXo zL;d?U|7^Jq@1OLsJ|+eA2^>2Vle4JL6;VoT(ZW9*(BfMJ!rNL! zRFZe7#sSg?=yL^cK37C9V$WCp0$f7XJq_Cr%mTlYOs}p0jv%A?K>ztUMI_07SwI44 zum0pyESr?j{ENZjaW5n6hj;fLUjZdl#I;$S#|PSDdu+5%TM12-d$e2l8DVXikKS*i zDxsc*b)*N_AR|nA1rZ)f$iaH^){_@t{zxTBRQoF-g{G+6KSBVTfGtSU`>GO({N%mG zGuQ}wkTDmqd`$_B&Rkwg3jo}@j(e{%ijB~&$6aexwLgw3!s+@5@+g!Z+uM{7g?E-}|H;oo1B5Yxz~X77uD zJ9*V>Jo={+^7Hi(x)TaG-(f;>0!+$?H-UXJ8q&p!H!tx6eSCl5`I~mGT4(WetJ?J)4Apc!) zZ=-1?WXM?-W&qeS3=`q>S0Fw9czKLx9$=rm7WBWPqKs;V8Cf>~t0sVE6_tNN85Jp@ zQ}d9%uh2qk@|ZFzQP?RH1lTrlq4g@qiOPufu+({10qCQM#=nb3%IL}`J}Fs9C$A-6 zU$;|6{W3yFKmEhX!PwsD1}dYrn!)%FrGP_eLaBRyNf|Zx+kMNr4f^Xq;S-BcWn|`R z#+eWKQSP-(>JgydqHNmLasdaiR5jNwNE!W&<|z6M?fLEatAQg-8L%QuCyX-z$MC1F zz-YKKs_36OI0W_Q`B1A4KswJ+(S&zUpQe3+sBM%oGT2?=|Ky!}X!OR0`@C_Yd|^sE`xPXETY+1)xH)~}0=ENU;R z0jsF>q!JDKv)5i!DFe!1)=J%Ig8Zp28#i6=8ezOQu5hH>RYnYHyW5%S0LMC{KED;p z=d)V6_~stWAAgm?7p=yK5yc#Z!x{6g3?pz9%n-O zCkw0n-(IMonj~t&V@TJHAB^$oh4r|l)v6KbyB80tU3jm8y0VmynpDC3*DOppzL|hR z|McZ1h}3!G?rfhQd7(c0s?zl;E`r@twi%N+vZ{oxR!^Z{^){~T6qeoR0~?nc6I zXN<5!Mz>bE892XiYWQ(@1J=9d^>2fms>nZ{r>p~TI@83&j;HR1^Vf4LzJAP9Y<1hx z_b*(4gWB3zILk`KLKPAYRq(4KS8dt4TRc?oFBbs#SR-)wHEy#Aca_QiF@2sEN*f4nEy8dG88uJISS={ zUFPlC1nrAzZ`^n!NX4+?MbQXV|C~b{&I4_)1&Xh4Xp<>04ki}>voR3ZM?m{Z3r{OLQn9P`k{tKpyj$~tYnvSn`mZ^Li39(O7^WK}tf*L1q{`6EEvm@( z!*EhOwC_kwWiKZa;J{{II9_f+#T-xFkE?_8;Ih}E?4eM;X5U7#JfkW)T-xLj0`$Vi zE$k|(dyW#wNi|4M1PqtLp&%4XkiK?P& zZSBKuZh+I8c$Bvd_ItsmQ`LFSz)yv{b?yjoblsbEoX%1)=H6(ZQ$X97xoLYr`?GS; zTDG1l`ka^?^by`C&e251Gyumnm})KMLB-I-LOlIxRj?l=blm*l{W|~rb`qTb-#peK z=u5>OyEi;chx>&t(b1;MkpJ_yO7ScuRpelys1)K(#rEoN?OhX5MPuC!ijo(p*t=A7 z4t1g`3MkqTeRdP-+tgG5~6w+%sh!PvU%;yG26X?E?t zWjqyPjK?ropN0E?8)y4`f!>+3d8Nip7458&)HIHS`B3k()M^L)vwL*lEzqk{+KEz- zzAb5jjWrH%Wpk&uOkIHcg<~1?{ZRgNYFs+SPZhQGJleLBMa2X++4JfHj&JK>Ujfz< zD#pT2cJ=|>Uq7a%?J3n%%$@C0@n2`ag$`h|v3W+tH0qtY7tCS&hdVVY8>raI1fN1V zlxHfoZ{Z6@#q52_EBC@w(Q=66Q^6)GR%xH@DV7HLpBFqw%~Whw@7obtD&RaPt}nZ{ zQL!bu@I$8Fs%TS!tNBVN@MpqhH8ugpqx4V9@26CZ{c2QWXPhcJ8kH-k1~khAtJ`*e zRm2$PbuIE06|>lgj1IAY{xW2W?i-_GxsGZg;|8jz_Nc1*kx?opqY`&;75KNhBE~B_ zM8#D6g)02q0moS4T)i^T;*3l?n0-}I^!BIkpF@82@!2{`Fv!Ok)16D>faANBZVxly z8jm~-5Tl3kcZsT)`9XUQUsnm29D)9*vb8nlKzY@{=fZ#%wfwS!{tn3V%#!#^C{H0_ zoxu`tgUc0Mz52Mo`IKy-!qZn>VXn zIqV4?_j-qU0KfjTNBn6b%=+#g73;~^@Q(t2?`KNA`@fV^vFPFLYgd3?OFp-f*a}sF(s!xvltf|;=Z=sty)c3?G z+Ho7u^$%8Z|Iv8dVLUD|9=8~eYmCP|#^WO6ag*`5%6QymJT5aHw;7M?jK_V(<3i(c zqw%=Xc-(0`E;Syv8jov@$GyhmV&ieMALDSf@wnS~Ty8vWHy+m;kNb_s1;^ut<8j6D zxZ`+Say)K19@iX?dydCN$K$5sanv&vtJZ?K4*By`hj>m<^?>(cDMV_yZET0tk;5P+pWO9eA>wC0{82C6{t2=mVljk-X-4y z^z{d|OTN_*=j2EoKfAOV;Gi2Q(`*c3e|0P6*%Pp@Xzb25>PB$>pc}VK2kvvj7EHx6 z^o$_JGW*yJ*o!jMmyAEjfX#JAP(};v$>-@BBqlh|NG?c~*yj%C>B~ZU<%xi^pW7PF z2KO!a^Dk$+c0;RLDpn@w$V!FvA*P)3Whtz8bhr0LIm7zPXS0;v0PEq;{6c0CSpUcb znwMe$6$`NQ4B89pb!NREhi^U=i&z>=y9e((wD$M&K>F=6=iRk5Rn)1({z>aD6))N%)`*U1Mq#cgk;Vuj4cJU6UV5o>f%vmdPAuiso?D}?dE^TYDc`6*sF z?>mQt?=QjmU(tT~bEAjhd}aIwop+xKvUCluJS_+Naamhkg^w!e+I4f!t%^q2tBXgQ z+~|SdfmYr{aQ?IM_P}?$op9f%8q^;R>5kj9k2SFokjvu9!?J*VxQo(yaBLmQ!_}wq z@%SEJ8NfQNY*K#KrGg^Lw&w5NZ-j|B@3i1bQ$ba#i+jpMVgIJOC7{?3_KRKH#j_xN zt=K}R-BAUdjiEm6Isn+VmjqdgoK#Rnlia)IeXyUsI1uA=T?K_cIlaeJ49fIPNKUE>vI?&6{RY@@Ccbfpd?4MD2K|?X zxG{VCq*<#}ATC;__%`4d2j|+v)O{6n;LMGZ6Tq+UCcnLl;CwY@kYvLwZiM-&w2zQV zRS>>^7FJO%JE6XdZAw?ZCaa+2jQQ4K$UkSQ@x4D?1;zPyY*~l)-W<+S9ZXa~2C+&~ z&d}bPeFLq|uwNW&BrV(ndR8qhP)is3n?zUH4Eedb9_{4cuYxAhDiZetKRt(9%F66i z5Wc=>R3#S?0ddh9j3k&-;e3W<`&0X>5f;UtWn*(u2{lQm)T=-`SMzT9ll5Cd_g6lGn1R4SpCy^TZ4kS^O?(wuTn3H?v`CZpSavB7?P zbEecAb4B!>RZF3j2jZ?N^RHb#sEF8V9uDNdc>d?i+hw3C^(Lg}m@ln~!FeN}r$pLb zI6rL+*zA@8=aanGN(1-t!hZQ{iS6_$MdXGnACAan;cHOGf9L7<2RW97;cm7?wl9=4 zqKV$u{$%SEI-&t$9<)AS( z>d8Hz3Rh`-s}|p@72)1nQrczaHwneqMSmNB?^C(1`V!yIk&rQ){k!klfL)YQObwnP zA-1mYC#I(%*0*lRNdZV7xN?Mh7U-U`ErdNwBvi3^Fmhc7?uB0$6ez5a;GX~guirm5 zM4UwgVy$=Cy3@0{=Bb#GXZARumIU7w>RBCK06S3IK9^igLK3mN`l^0YvFm!54)sHM zQtNN}6j`A^U%#R1bP^gAInLPK1&2Y6^M?L4g2JmRrmAX5s_AAbL2bF>lfrr ze!ZC>qP>exvmJqcuG24kcbteUSS=MMil~_RyR#dgMv17_uFj5J3h4>Qldirbq9(nX znc+L2w-=YY`$EO$ zRL;cGB?AAF+CNo)rDETj`d2biiHLl@eOeI4|GmRbx!`mnN{zQZcnH!B?XPj|$|Rz| zlqeQ9pub<0iy-F^(S`|^ehh-hb2O1AC{6$=)( z>`f>qqNiB2uq@D0?^N{kD~V`jy4PmU92FA^IwQzk4g6+H^DkP2b$6`#iDU&4g-^Tl zY%EhTE$J(PZ-B1;kb7hJ2k_&tbuJmoJE-zDMd}w7V~mtZw!KS4Ij`5bQi0|fQRm15 zdNTjzkp-a33zT|Ip}x{F;hnK7RBV6P6lDPD!*U)Et$tE5YMpb*S`86>IDa9B74rK| zG_I{YBqANoT=kt$e?`XO)%y)Z^w?oQCmiNa@9#*DcTF(9Y1-!_zk~cdnV_p_C!(|0 zM_z4(_8rs^%;@YQqM5daMFYqm?y*O;9q9Y-j0ZE8s92xFsnC&UL{#P7TYnqqN9Q8V zLS7J&i_O`$fl%M0UzuCmL4I~DGv=HGT4VRx70E&17j2qu?ce)hTn<-xW z(KFEB*`6n}Jd;E;$`!%1b%Tn1dN%)??>&@%{gK|8zf?>>(S)n*BM~L646u^v;okVv zRioS~A`)AP>Z{lSSdr-!FzTJHO zX#5a^F?LVdCaiyfh}!;!ZW3h#zl^5$Sz_Oy{rl64=zzX-$G3h6=688>6@w_e-}SX9 zCE+I#ZHb=f%wq;WODe11>t94vrk$dDjl~$_ChX|n4*J1SZ`Hj8^qoQs!=Mq&|G@A? zPhWP>k2cN$U28;Sshh61pVJr{_8@GC06nD}7N5WcevrY!%1?lH5y{}F;067zlvqgq zOGJH1{CfSn|KSU?c1^B>eqocm!6|5r-F)UXyK55()gPAV{R#R`AyfLu6y$GR4`9oM z^?_*Qf47v5ggS$VUi=09{K~)XsqJPG+7@G=yK66CHQphd4*)uv)3~H>ANZlD7ANnA z{D$-|m{dVOPDV9(`~jM)=#D+@0Qmh`@*1l_`7(L=lSjmju_2>;vjaf8tEXw0O9J-Y zTSeYvC{Os-`L|o8jj?FzV=h;qKP#Gsgvfy34QIGPH$4efd4IvcBj6{s@^|?@q=&B* zsk8tc@<*ap3Ig^ORHnauE^CY_Kbi6=0Xnz5UN!irG1mXv+|V7~zj5otS$XAuSRyR# zF3cp<(6vjNO%2uq>S*rBb`rYdGpT4#1nkb=m}AjSSP$Mmb3Yagem%1#y0c;=R7?Kl z`p=Jxu`c>>k|YTcwnwsC1O3iJ!wF*Ip=q~Ia-31%M;7RG%0-rhlACx{{$Y=P(YyL3 zSAm3f?2jmpiiPoc?x6Dm0euAM1Y^vRF?F+uItk^71dD6l06)MAhQi4cB=oj0Mp8Zr zuoCx)zL(P=p__50PUb-C{rPa|ju!NX{IQ2K*%*tvGo6=CB%!76ieBY6L7u65LpdS+ zKej@F_hrQ{Z4%-sjo5h%{MIZv4SnAjlTc7gu*Jgw@KY^37wBq7LJHp7n+m~iZ}zs# zu>>a)Qu)rf(PV0jZ8L1l>vtug$Yk|*4Z7f0wQ*SEh#v{*J-Ez45B5Of1LIQGV0$`1{6)@Yyk`Q}sRD%xKAB(d7 zF4I{gl>Jt1OFYQS)D?D%t^^XASDSifkM zif!I=mCYMy8Sc3K!LwA1PoDpqpbH7T?yfl9ISl8{JA=lzz<8GEM_F;h`gfuzl5`lx zJDzFxM@KkkyiFZHQLITq&V7=P+8)5Ux=&);B*;gk8dqW_tfvd)mkT{v|V$H}zz&2Tr~+d22<YnXI@DZrk zzKWr1l^_qz`Hw!uGiN={}Oh| z*os z&np7tzfqKYYabQ%!OmahVUe=bo{a>{cpIJdQ~Po1NxhB!hBQ>%HI%q-iSco`rfDeaF+-8?@tZ* z2>NaT4L{`*p<>q50s|#T_nOI|1s#C(RwAuo3iOq0_WQ+MQgF}kOJwyX&DPkV%8IhB20rFsv8QNNJg7qf!`E0`y@PC{oW``uSuWn6y?=e%bkCr9PUqJhN z2G-7H!Fc})igIV!K|fi3{0sSqU%BrXJ z-3YU-nmu)O1J;{8DTkdF!B0SKqVFQqXQYc^8tA*Ufw;q< zUqkpGxlewDeG!`&d)qdU*Hb-M8r+lRKjT;+Lwk_oit(e55c6R9^9>^wm~V}bXQbXk zyiwmC(<@Ma{ps5c+HYYWY1IDx9Mmrm{uyI>2k`-)ue=C>^_1b(o-%%T-!FxF*8|qu zZCz%(x}PDw>({+rY%m_R51ORHp#5kM@5gCaZ$B2tcqT*t1kSg)l*4%Z{z37qUV(jD z&yNq9zz=-?uT6CfyVb({R@t%r&pS;d!rGD_Q9{K|d*^8$Sk^>8OC8_O<$`@TxXIxV zyB4}#JR!B52KL6io!jPxwNUuMSwTV~oI5Ke)U(QKpcYLwq48QM>-#UBVwm4UCmc}UV=eSGSfH{9&Sm!Hj2nCy&_d5O@)t|t-h;8^ zL-nWkTF7fRea~b%+>_+#8jAwW`}!fJD-+Jq?Z5OhPHCa~-t+f7f$ppGdcOJ@%4e4* zI6?Ur9~>#lUDZOlnv)sZpuCQr`;yE+pM93mc{m^J*Bx*Ejx!QbmO%Bv1~^y$Gn7y8 z<|QKP{+a4?(B6h(z8ycLi0FyqM+TL6IA@QKA00VCL|m_3%wiMZ9;dx6;hq^0nb9*l zE`dIh-@WJ9W-B6^2=l#04u|%qMjlSGC!$RHA1qps&i1hW&7>>LkFQT8^NETNjB>{o;vGgT>)y2`qLYSxi|;L|n5#qo zAu)HbmnexEQ z*yEPokJ=NtKp$_{%asLtyxLj&(}5kJPu&JDH$(YO!c{j*n5dYYW z(6+I+NHZ`$x!zibK8F2{kLSq;4HMvZAyC{d1N)bi>Ay)-pcgZ4>W1N{K_;dJMuT4lVQvMF(GVo&zCgdF-A@9)60-r4K_yiS-I252UkmNylUa6^HPJ#JPhN?= zd>{OXHtK3^;r#^t-5<=LKgsVIt$oN^$ai=~efS}q3ugB(`KW23Q_Y;g8xLT;GfMuR zucU>}yxA`p+yLuUxFLUlj260RJYWC5!w5_Nn{4I3UkidjdAT?~Gr|g#*e82gwNP|& z1B(RE`z2G)n=t^tg_fMAV12u4M%(#nToX001msP@`g$wXh^1>x6aCtd=}rTAFeOyp zDF#~H^uFdg*b`Yg_n7iWHPMC~gu~bc_JD!HbQsVTIya(IWU2o>8~hxf;^qf*Da(N1 zIat4h)qC~d$WgHpjvmo)ptZhlb+SB7#jcH7SLx5(k)C+fPTBJGFi@< zig9)@c4Py6)>_)?|MLvstzP0^D-cucv|BOV#)NH^9?{UlD z5q%^Ac@8nbANX;@3qtT5fky)#Uw8uHiG(K$o(g#C;OU2F6rNdlm>D;1QiDemo)CB@ z;b8*jV@`Nv;W-PB2Rs4rT!d!;o{#W+g@+aD<%UNDo?5?5t4awP)|aEoJ$6PHJyUnE zBuLS);jm5G9Ok-cWz=|7Mv{ire*e)aOw~no@7)DL6lfU5YlJ(8sEcm8^H08zr(rjh ztV_?4b!QiZ!^h&ZXqdUa#xk2B&@%a6 zH&tlZ>Y$2<575PZ{iWNF(J-6a-@~lV>Y}dw3=bZ```*XY38FMz)HEDub6Syxy)>cZ zeGk?}5(Yi7?#j?!*)vvNUY@Qihj!tS&m^^oVK+>FkkPg~H-=5v5A8 zu?6TF&tnx2BXyCkK}|uQ8V&Pc3*|LW)I|i!<-AgL8YcG1K(wVm7cHDfvSvXv>n2apSmfVl%(6F{3iRvIBGO~&KQFa&dFZNhVYH^d%&A@00?Nc$_ z^qi}Fc#;NtiqeW&J~I0HI9vXZ9t}$qs65XgMn)}`F-RTS<41@*^AG{eb-P^(=uoG{ zy%s0QC`a6Z`8bRR+2GgJZZk51Y2DLwf`+ZHYst$Qk&$-Cs^K9Rzjr3v8+TZd(F=ka z3oEo&qh{t9pCK9L8#g|^1?_V*O|v&9lM%D*A#{KY{ODV136duxTj!lB(Ynz8PS0Sm z!(`-sA@;Y2Aq`7qPA{7mC!>^s%-TtP8fJ=#ob4AOBe59k)c|9V7qQ+q2BKs%^7Z<8 zA1F^({rs&@ykrz3e|YK)g@%Q{8hyI(R~J3nEbby=O2drYS{v0jLHo-)%l4Qu{Vboa=tE|R`$dW9pBhRw-sqx@RYMH3Eb zVa1THlDgSZ0m>8K=Dg!}EDh6{-Q9DO1^Tb+rmL3nZ~X5LNkjXjN-9PAl4+QbYt-l{ z^rt_fpljkL4GVIoSjz4vBa?xJQ&$RT*b9F-ekNHmYLGDOyjukF#lnz1uRumZdwmV2 z@@QDxSfYKrG8wJdAIP|#OT*3-wTQ%OkztSaS#xVC4KuqJHyEu(M!^(PYh?uuBPAh* zRU#Qh&bFUB4)4zwRy;3+@uPbuc$h1nhS|=(-teN4(dLG`>ob`&?4!vQj@K?^Bq09b zDr*i6t5V|KlH*54e8y)k&u7pu{r1{dZ`{d9Hll^41o|VaeCmyt5A<)-0|tjw8g{8} zD9gr`jQAttn%SWL&06KTV~%7ro_syI`r5zw(UPtL^=U?l#IgZDXPU*IaGH{l-5=k_ zFMxlyi$|W+LwiM!e~qDpLVxEjmA@8yZ5TmmxjH~W6pdV z`Y(SdM?JJ}G~=m_lN%Xz&Py`Q-k@PDamxo@KzUZC!`pp) zR1fW&Z>@Y@m`_GK#!d=^0-ZM)*lJ!zM#{C;f~P@VHS@BA=YU?@wRDpa+M^J6_Tk$K zG77C6%bbGppP!}sp;1dlf4?f;76$%ZDSLhYP!ky?yMDf~4)WIYt!MknOPDX-Qy1jn z{qc4SWtj({zvQ*7iDfjb(Oprk;5He(JctQOmw>(`DMzvu0{`ivjRawS6qZh^CqRFj zUR5Ujy-mZCKS&#FNhTxtkQ7Gxa^T;|0~u4-$moo;|3&p0n2#>w$O`xww3ry(bDxHN zGky5ZC@Ozioj2a%hHkLjUN;(=Bg2f%#S%cYNqR8F8!b zbXa~!!vX>?-|%?^@)(rA;9U>vVSVH&mliVWivRI7`7!X1_3h?Q9Uu?mGH=A&XxJWM zqm4x9|Ajj?=g)P}u>OL1nO~F9?BN_n4h;0ip4rsj<79NpI+Rzr724NDGPd{({NKL9y;u$U&sZ^Q zXpW4SWTIwB@cu+aU);(D$bX-YQc4{SQ+n5C?ZK*tk|U#kmw|lDypgD`(a=NPHO1;f zFkaWLlyy64>LHu^qN{p9=O2?`QBl!DuNWR5k4Xa4=ga*$vEzD(+}dzwFUaq`9e>g; zAJRiAyBi<4C(^K|OH>c4ydHX+KBL?Q`ebmid%uga9xC&dxM~jb%aLB}poO{~dUv`; zsS)&3!}Y&xu>?ItH`jfYmQKSqv03nlYw01BO6)if^*@LR>p|Lj=$7Y-q*FW%Q|x;Z ze8yT2ElBP3juZPygx({80_I)3>J@bvGhrAaL z)k;8mlh*~K5obM=Hvf#T8RW6fB%<-vWj$14#PM1*8rpN_7VNS0(2eo$G9REnVX?LL zn{MbK0dAg$>ep%52LOB!&4TuDwy)G)h5D$89>??bkUix|wvImyBObH5X_u*omc{v# zPWsTW_h((q)e`m4QY_ue6QGYO1-)QfN6C?Ts9{N+ zcZ5pAWNOLfPT6|sh2e+b9$1e(@BIiYD9}TGG8so+!FoPTXg(I(sE5{H+)rl%`-1p1 zFHfRf5B<6*Tow)Xk;d`@m4Jw_DrII$*B(b>n)S|bsr5w@B2e9H|wD` zX+}5I-84+uZDyh2tsc6%cW?W%T{LW$as{v1ydFAYVOo7i2-+uiGUFrz1#S9CF@6p7 zf=kmkNj3@!+|Hc5`ydTV&?vsfyPJXzF4hbX#c0?ux;y>X`6!5zST#pJ0{U^~)pX@9 z3JS3(SDylVb~((~LV6Dcy?y4+cR+-O$rs93*@#k*9p|Il5BGq5nK5=uYA*#%4u?$= z_-L5_;+7V(0~GX_@Fiduw6{{tp5uiy@nV<-iN_Z{~5t2Hjg!Em@#F^xee&b zBc=Tj1~iQ8GQXn^^e3^(I^vWO*oUWIbb><?8P?C+w~;1!*3Upw-wye#?A}2>j5lJ8s43K*Q)5OfLNvr=XYx z+kKZ@L0;sX{66oepv_4|VnoRQX5)10nh1)4BWcs?@EG6^J9GCAc)z{S@-QI{=5GsA<+>2`r=ZEl8st z*V_{($^z}159c`dG#m6=c4wXtjOUvnA-iip?-~f=Q-=PmTzz+^0Oqeikerhhl&5j+ z>Cj)WhdNX)_gM%~&_WZJ*X>RkCNfd^Xtx*zT}U#%+4>UnQRrE@br}l!Hh*F&@EHxe zy1pLySAl~1MYtH9hiI7gIYaTwY7}IXz`DC%h$C*Sy8K30R{a(>9 zzMnVSXi#2$x~uT77hrF^&NCiA1@aaek|Seg@X7no&Nm;G)#Q0H&@++f_V2mEq(Bnh8;E>>uENppaWqaHV$rf zz$WQDTzDKPsOnXm*&4kA);w@F$kmOLzzNb{N%f3($tA_*@vqVZZizP^USV zf?l{ruKoi4W{RAQQ4IxoSoi)KJVL_`s(j;Z1UmfA=SM$ad}@z%e)<|lK@yKPHRpHH zus@Ft3|wL;sH-q&^iw?z%RQHS_Id>L_i1CgU=_&E;S(*&Q55u3D)N`;W3XRKxA`22 zqadGw9VfFN(y;bM!|;>I6jXng+^t$n!`x5IwisqnkO^IX{egTM)-r2Klu4%`(~~w) z+d+OqANjZo6;jYK&cfEN>on}~8{e?AfXBB^OX?b&v(W3 zDWJ7}z0KkQeXlL^TJ%H_1zin#dXPVqhFMjJ@QK3vO9TAIgaFW|<{z>I@+rvuEkiX40OGI9YhJB5h*06x_n7LeCG6S@chjGW#OHlqM zRv*RVpx@%1`US#4e%>8h<_CQ@$JF@p54^w76Y!cH^x^1B*TcJ@{h4;f?#lAXt}S%rq( z8swQ>&ZMCC^#ZxvkUvq;=!bj(1?l-6eU3;pj3AVh?_L4=eLIm?1oo>R1d=$9+@+u# ztKAa+^tGePzC@`9pdaph#QgO@zdEu_?|(u;?A%IM{b0QfXWT8P0QB>Hy9(lAy}i=A z5?Xl=yRA)&A^-k~b+R)%=R6|5g|e3)Ylq;K$+1g@Ml);e5d0!pCrsw}jcL)7INyf8<1- ziUoN+=vOn)0r}yc-seym1wCWF8SlXX_G@`qZ#c-SlFc>OIVk^O)2gZsw8wK{*FqBT zKkJ;_tOMkCKE>3>0qb|+yC!}fm@oW2hh=pS1AY0hnFzFRyLL&=E8u5X%_v7=B?WmE z+@z}kc@a5&%__Pc+WYeqQvu|EcD6B+S z@Viq>N(n!7z*QVa(hZaV(pirG-16x`qlMT1iUXF!^VHaAJ+51IKAzi z(BGC&JEtHT7E=FEzjO%Xap`$(!zHi}Ptdx4yr-b_$5F>0fc@B_;`{N}G6jX1TD_8m z{fjemgz)4heN=xv`9bL&8g{2EZ^d`BKC0RfV!QH$h85{LTsLIWM=ty68Oh!MkE=J2 z$MX5&#zmH*Bqg$EElY(qp|M2njoVrZ_u>|k5G|tYNi1nKE6cg{Vkt zSGwGm4`5#hl|q6=sOZYyU3=T=!G1lvp~o*qMKb$kI?QU}JZ2Ono+wL2;zo(DJ!%}V zj@UlsaAhj$jXSFm0{jT^sY9~|sHm;*n1Nq7tml3M;h@7*bldy-2VU6E?^}D#Y8g|} z)`Woc71-|;{7;;(wxJ?f1F}w)6Rek?sy5YTRCKr1TwsO?{-R3nWZZEodUSxVdk+K7 zXDy}urVdp2mq54^c5uK4oX!sfJ5mw(nOnmFI|t0yoy1RarJ}h2rIS-M2RvfkMZDui zMG|L~Jw}25EL2Id#)FD1A9-y~fc3~2;VE{ArJ~#dWcdmFC#~NHxJkFE=%V|@nzt~Y zE}fXxw2!7D*VhJX6kt9%ydhxaNT`2V6K-l@{I1M4nMP32q-XKx1@NzWcU|2zeuIkM zM@;_>g8Jm>Cc#wTj|LrxEmH^oy8dGm73v#1J22}b0se{6SN6~tD%zO-(%%yN!7CEZ z0i|(N^rzB8YkVz?m+HBZoJUj?Mw96gU14G!r~5He52=W;OF%399}`EOjB7fVPDL-y zsZ3sg^*6%Hku9G>MUmTiQzC&c{Jibm7|?-iHR6Y%zU4&k4lAhN++nBuVW>}jmh;)s zJ5wsxMA71^F9^How~c}$S4!rT@p6^nMOsA7g8TyoM+_QMJ^8ynYS`%;S9iLO%3QMcSL&59)hz^hM+PmsIqIvr&flory`g!c^;GD!P}8 zPyQTdVu6yGxy4s7e&_$J?-*j@oJZ3iJPW93u~*{Fi3ukDO6*SSeGYN*M^3T)!%Xby zMccFdgo?74*8g4|VdAZoZ+<&Gqk@0m>$R<)iP=28w>Uug8LeW@JA=^Q;h9bAp}vk= zKYk2`Bjg;c~v=4d!p&cu0- zbJo-Xjc5OOj%PD*FH_;J$XhBZ$`-n+mBhqkX85B$tyCoK{Pkma6!ZVa^=pp43zLgr zVu{@gA#EL0)c;~>&B<6MR&LAjwr{7Rvrp^CoFkc7wshc7XEPP$WSaNd1TnGNufjO@ zCMx>Z(9Z7{z{E3pn&zxVDx&QedzTFSq^h6mmp)L@g{8_{w5v>PcGFK}-A5|o*|MRm z%nRmIUi{OZdMZkNb$?eT=3C%7t${(` zdEZ5h9GRHQq*9v%^6(gF@sORF_(IgBHRs=fy~*a%lxH&Wr5*L3Xh0Jl|BSN%n(tzE zmwz1KH!s-n1OLc?L+JW9ROGE%dm@v@#HYerjMaf|TZms4 z1v>h%dEW)l2gAnf0Mi`w!+kk>4CKj8?r!6=Wa4?n!aL7FAKA50&4VYH`15eGo)XMg zv7T{V3qvN(P1)Q382UeS7D+L+nRtD>V(kF%pBA-qX(}*r2Hlw33Hqx-D*M8-or#U? zwn@VlfLvY@Uq%SQeD*Tw>4Np(ZxQ{@cq{0)^URZWm|yuFoEG)ln0OthYt9)c-^XiM z&a;t;ksL33b`cc`1qQ~laWk?1nsvtC;QJR(I`$vm#Ke`CH=bhTQjz>(2yKL)iAxzP zR=Xclk&)yX7gu&BKEL9=APx2<`As;?5hf<+eYk3qK}9L$QhZJG418wuE%8r4|9A8?y&x0y+`^c(AKUpaO1?CN!w&9T4hz+%MUZK!$?v{ zDcBp5L;x-AI|Cco1Rmo^g#C(T`|r#T25#;2{Mc}hiuf<2zUA*?K<-KR21%&jJgg>T zxSoM!D{XbRLH#IGN_TA|1J53}_^p~mMZ)*Wnu5z2xb1$<>J*eub8EOXkf$uBa+z}IDDS#?Z2y3q}C&R zx0{WDHC85@X9KC|)-wuA3HTZ+r#5X2p`!0^bp^gJ&@rv6&6N>KMRC(KZ=G>E9&yj+ zodcS$=0ktrC>`@m;$yk+{glLgH36VCFC1x&3ZtSG_tjGyTj_Y`gU=6i{b77>QWRdk zr{g~6i3ZXID(dPIl)F+y#}lLE>IWWFwA95Z5R^;DL*dd>Yn@@g%K5y;C7X_GOsKhc zoWOpoJ6FqR(cwIgRk_blQLxc0rSCBvkEQLMJK#!1uSmD;R8#5rph{q&2z>v4XuFLO z=Y@gZt$X9SJn&7kwdNV3RKzM*m=**&I{g?|wYMqyL|+z~e?Z3_OKUCte^bztYw2-g zK&x(c-EF8qK}3FL!3fYg&$FfsfTqf2F20VY@b6l6|U(y@~Tjf%_TWk5l1?H zES^T30`Z7K&bS;I9 z{3z)fbE?6W;k}T7-@Xmn6!!$CHu0#?X_B%^?4_Tc^InkR22RcigB z|CWrB94pl}@zQbHtrj!!Mly=s`omCdo`we%!`_kl$f(z&xOx914IA3H8{GRrMmk>A zej)udZ0)HcbN4+N#@G5+8PK;NQA!W$=L*_8^>2`dFG%od-76%cygDpf*hj-^<#vAk z&q4k?FZxlS$BTz6pX8F!SwD5rkX{;A*Yaa;$RMMi%qps2JMiy(Z`cm^ojHzerK&A7 zJohJ+`6QZ*jvCy%(DIRnW3Cr3D*>&1U4FIT0}U(WJ5?pY{p=lG|4n1HG;Go##PjGZ z$fH>GFu0V4_Y0=v{5VBMM*@moHs{fB+cr0!GEFjabx(9oenP`?f88Q`Wyq+Y#$5CA zLmGD9aCBwu7BcdD&L-@hNy9%6glQiaAfsCf&nwbz((tCrMZ>-)B(xm0BaIS9!ymXi zNIR2A==idWVtY6Z*Q|Ntrhk)!{#i5odVThlW2tx>ssTBB6(|1DeAgG#vd(WpO>3gmlzzYcm-% zob58Xt6iOhymEK@HPdNWvB7q9SdD}}zrS*%{WuN#%+$JlQY9hT$Gj_&7Bp<@g9Z4d zNoep5L%W?q!|R90*_$Ou=-+mezZ5bJbJ>wDd2J%0fNi9bZ+bM`zjP~bGd~H{U9R=I zfoS-k>+~6`St2UAld=2ZQ5s&e{bG1n|No;K>tuI@0?pUimYro~$Oq`ws%ag3mWVWt>@;JyqG9Ray$5~iM6|eDjm`gh~%maHp86m!9iw+QGL zK_iqlsShWloNZ%3adBCbOGaIGB;2e+t__2K&! zrF~u&G#cI*F7nXNpNQV(orxZ^p<%Y8Nh5N%h{&2?a)S@*uR45H#m{A>y=1EgS+y@^gzC=UFEg)8AP;K^l#q_a~f7{ zpZM|RArW!>el)NQ<1057B6TR2hYWApu&*j~JVnEAij4ngb`ufu z>Dr$M_1#=DFg^W^h?wb1{ZnyG#Je-P0ZcRhDMM;Pyx zfsu(ccEdkPzmXS(yLt z8gx!&eunwLv&60DOT!d49@*FiBAWYLS+C+x!`!@|=|ycs6gh2bTmkiO4nLh-Tu(&z zSJXnguhQ`3{2Hg4N+MELi8Fp0Lc=AR@)Ci-58!{_c>6jH`#GEzD2MsV<7IN|Ago`- zQyu@}dx_{ES5m`WDBr#4&s+oYEN5j!qypV8I}%|7`bdiLrp5=*a9xXEf(Xc;n`?K6 zJ&=Y2{<>(4&qIBqiF5;KpZ!~5*d*v9);;C;d!RY2B)H_Zf_)f&ELU-k25$+UH9ROr zLcXhSV+29|TKf&UNSuT;u8&!4zedAGbsmaORY*wfUEw-mXg}*y>v`=xB;-~v5+)i- z!=6TE2c|ViXj5{{CTXxo_3Z&^m-R`gMd|f6Hn2b6Rbp$ zDG3p-Y0KV^q+$NMHz)sCk`QSoXjgkQ*lU$D`R*r4Xx2he?jh8-P|RC1<3vKbYu(3W zfltX(tR4%vArw1#*YM;$8&t?z`wfqYD(b> z38jaoZJ^(!VVA?6e-42?Ol>TSJDCjQ-K{*L7)U~~K40NyUNqb@U@xu~PC{n)wNi>Q zX!zNldjlRZB(%ZjuY?fr-E{Yht%)Zg5l?&B&sj9gdw!TAnm|IGvb+l6k7+o5ae?0= zf`n@MwEcfP1N$r~-0%U)Hx%q2!p~{AgXh_;Z%}?QqexaUpN30QoYhM*NT^H5E3p~s zW9-hT+XHmdopFuxX*8_Sn~*#Al!P{_tD9;9pXMuG!=Ft;#{8s#LD)Y|{?Qb*%_X6O zVOoTm92)L5Z94s=h=k-hk5~yn{o7_YADac*EHm1880f;|7M88mV1KuBm9ay4P9MDl z_j(daS`HN{dqKncGJZ`j-^2K2YDyk?1@muWF=e@#gvtxo4ki@Puxb3w{c7F7e{xw& zAr;nB{ZN4V8`w{>2E_xQe~GpNb$fBw}E~h4BGzFpCh5Cx3`RFK>M#oKb_kAhlKPh_Km9){%4;amA@Gxq3+@aU5~Q= z>@ioxh9#IEhU&}ji)lE5*X8}U6%ra*OPTHhdHG&)w{b0y(C?kfGZ%pd41#?58Zw$L zxmX$p<;BiyJ0rtQM(fWK3POON!7p^8eKQ%MR?g7Ia#+8v)PEDpBy_Lvu$e>^4O_Vh zx=gPpqeh`qHmX4X5LmOr3HIk%OTJh_B@HJ~9KD_>l979H4p>j%U(NJsS0f`u5t*+c zuW9(xr}#&?N61KQn--_j|v#6B)^$+q8~t0Om&miEeBM z@|PXl`)&x@m$=DFIYUODUCNAAzSHobS=*L!Uox7I;$|HN`V*49AaIS0>V6ygz8|My zU1`m_CBTd6)6M;^1MO>c-*Dv#83|Ro07qxD2G8u@el{eS*rw!_2Y zT?fc0W{etI4|J}Jl2@HL8428N*ti+wyB!?w+AmE;dtBd_xJ}aV@$F5q?OJ4nUk8bJ z0ez|8Mq~l#0b;V163`?5CoAsmCZnvtiJdmHG<;*>`aZV(Wb{_|sK&{EV9z<$doRQI zx)s%ZT>eYLp|eSy!{8roTI{a3UIBlDw&7C*(9Gk){F{I;P-Zi*2I>zwJ*N?{OvA>! zGVzVZWF)P|Jx5uh;UmiVeMNvjl*6|+9buP`M3p5-zRNcE9{QH&Y zL#Mw({Yx5qt&Vt;Q5s`|jq@5fAI$MQpo0IZ_GEyi0Ce%u42}Qzdffuvh?9=beJGSc z!Qd|&o_oYxOUHIC6K7K{lhHoPKQRRUy|e5d9@7hCG(>e^#BtDZ+nMAv)gX zrux-3kc|HP<*W@Aq~q+=+7VeVz`sTChV0x5=bfah9lW04kKfnp{2)z-oaQ+F-Pg%T z>r=~)WidKF-u1RF4dj=9Q>VZQ{4PN*yHD_aoN3jFpe!9zRKyn*K)&E(DH-{abUeBD z(Qp9N_wkUThRc8FrOka(!Iv6z0_?$=yMH<1{B-04Q&`lJj0mUHZARdGJDJxi%b>4IWTm<<>*-i4m@(pO zO-5^*59hbQ_Y(E#uKNI=xPL-aTvzyCEcSaN`Z{NXg}cF4eif07tL=`CnJtRvY95xw?4&1 z%l9A|-H2N!)eij$U#AnQ2K60zwR5^#fR5Aatc{ysy{Nw8%)J2oy;ff?KHCNMP4Dt& z*$s5uDerNA4EWQ3(}XhN{1`c@(Wfj=M!YSz%E>UkK00yQd0;Lz5{_2V? z$oI%Y@cIJO|6xr*dzlm&O^ZIL`U$bR1KEnEaYY;$8gZUDgp=A{Z?Tv|w zw|h&_u}S|#N*3%7ZtM(iP8pbg><&)-Q2x)mVO>!TnD5L?dNPa;PuAadML2(F4T^=a zV0^0hjw!hAqvMNOb;NWNGCIb{Wfp4Fac*DR;5^J9uHS>ZrgR|w@L8Pr7S^*VL(etb zn2xXJ_EKtYfIYt8QGN=}`{Du*jphLl?B8_HpUsqxNv~>l8ApLWdKC8~3dlF2^7v{Z z;G3O^^II(Fc>YOeT+?l^r}V>RRuE6ntxYyQ0`}Qpn053ila4okbB{6x`)w$b?xlYk z=BEYWGW8ksU;cw~vNMeD7+zg^LPnA&K0j{oq2n78ewQ?>;XKc$vc}94)~7^uf#63n z3V%EJW$QVZpBWK^$ZoKAezJKj=jm8J;`vR(X)-Dj|E8J@{1vXXcI#Kj=ryOSr7P4Q zDcE$@@fR6Ad6E`4<^t>6NL)JWD;Y`X6>sx&0{Pa+zt3%g_yv{Y)(r67ePmJ{8)5v+ zIYjru_Z+vr>=mg4y!4ajG%b+-q;pzO3&_*Non^iN-~Tbnw9*26Y_V!p;e%6jJRQy4 zxqq09+UcuQZz%8gmvZ;WBpJ!J^uG3Ur{jH@>Gv;k0iG~%;rCbQU;gI7PcDm4UqxKv z8|c58?cW#2M`6FK&Oi7kf{uOV3HK;nWVFt_m~A7(j~Y&lBsu0rU8FRUm;>W2L+K_Uta!GNXJrP<#IhBpWW2NMfMJ`7ZJo} z4k$mlK|;!NnU2qdHIx*CzSV1TPrO;nz};%C2R5vwppp^VNfC(mQ8sSS@!*7bRPU#s zTX-1wgNgpjD8P4ro1uGb=3-#24CdUp2;f;Wkg5B3tK?cs`Ikz_SB($$0<7ol>12|0P zC*WuGgDSqcZD(NHG?kDcBH%|09c%U}Fz}~iJkxT!DabxyyicocRq9YTn+f-t6Hi`3OWqTnf`RS9q6i?!!Lz^e=s7BbIOu}?r`V3E*@rJ|K4vRw05UcfL|T1(yB0KU~&0KyvzXnDU-S* z*@S`Fqt<(U0R2oBKKs02!N4!-4PCB+JWurJN$ac_IJasfh642~b=F;-(qrJmo&PQr zxSYQ#ert8U)Qo{o@*8ZN0KD)su{Mbt&_45IKXXyr!W3A0D2&4Q(7jpCw{>EaUtMal`=;-zJh!$ zeC%?A;S{v1KvMiU^ndYF=hM7s81K##qwnE+xwkvoD}mN|U-GKY4#uxQfA-iV3Yxo= z?7M*u@o}rWv9loGzosKhU5o@3xm`>A|qZvdWlbiGs* z@Hc;tka-DsXYK3O`$W$(uwG-=+1WtAH!Gu^XT2Ht^k%~C6F|4WO<0NmzT)Yg)l85- z;h22TexQ#EmJ4UOQ4n`Vrjg@C2Hv^7GUhtSe^`Y+J9z>6b5r}fH|TG-_lc`Bml@dd z^lsh{=Kx>+(c&22CAeQmI6aEPPpI#ZAGX*8_~zN()_4YdU)u3(kMun#PdMv*4B9W-wqNW#;FWC{I@<@}`$re~ z-cUe(k@%GlJ;4ktb2#$fx+uWsA9#P9?G~)JGp2;^p|E~6Z*W|>$-oygqjl>;D9GW> zOyxP?8|(E#OdZC%kLd4$Jqhi6y#^2`*9=iWt@^*QlP)H$M?uRj9}n<7Ci&zXJNdX1#iy+ zUzTU+r3kE-Y`*vc?*y=qO8oEHSrl}CyhWKA$-qjpDQaI}eSc>9p45+HV3&VF7v^An z>O|x&+y&Z-^=#7;tapmf34YIbSbzEwv~aLbFUX0?Yk*eyLI_iW^-g(y@Xj3c_j70z zpAZx7BffjpghPK%=w~_4g8e$uy2LI01dir+`@t`$hEJn9!CW2D}C6RQ7`e`6WFP zNqkpf{k^HO76y3=Lw!F~2QsjI>bGZ|dqE$j+}5q|{kZxKwFM2x?_im@xxsjIP2Na3 z2>Ze83MFR~j3?2#{MgYwkiX=#^S}`3<7k|Uv7QnIHBJQdiGV(O!arWvDMdk&a_{~I z|2Mu7XEKdtA)kah#xorBtrgq5E=e5htGZc zP5oo0G-!`rT&J4{_d_cVc0ZdzK3=;^f>I(B#Fe)6{SlOZERb|bhyYaiqf z*#&%Qe+=WD`3P0D&B1-f7NVzlI+PbgIh=pteAb>h_aTFU=N`WhWC#BuQgwRLFpYt4 z-)jq!=K}nHM#lbN;6H6!t9=RR=Fh@U`m-50&}!T3`3W+ro;@5K{Fs6HpB)n&Ujyqe z;ThFA5B8r=m)bY;!*~R}dmEVx`h<62@2$dl{MR|3T?Gv6`@#LEANXIQp5q$dpTYbc zDzkt51@42)hDx=7uWr7}TC5Mw*CpTP>wtEQsJXdw2K?K~{Ra*efxnQK8L0;H59K_% zY*Wm@vD!ClN5Magi<&u5PzL)K(cE71C)_6;m^l;<{Bt*d+gE>s^OPd}?^pOfUxU`5 z*+xd*?0%PazJdKro?z7T9nOck6@TnsGjJkP^N2IZqvIQS{%Hm5SE1WVh@auSW~TYH z3&Z!!QYY7DGFqN3c2}=vV6r#8dax7P?`?nBRR!Z8K5nj74|Kbt-NHMVe_jXPJ_lOb zIllEd(4rDj2JLUj=uA%K1a|`iC#+uG8e9hFkpnk=1=KR|<|uc=cfj{a@_(+SeFOhWFT;a$kBp*fk3F*b1?z>eSDP~p%DZ>9?4Jj|O?&3G zI5N7PvQ6Ol0@(Y6P*dY%h}Z2^Bq^=Jev;UjeIMdmL2l%wjVoYJbK5R2zX9J*?ms7=MqG^6hi|IV17rqI`9ZW|J^*PDh2&KP?%9a%E0lfUo#ECKCTNF4H^J_ht==DHU;)xL)T!tB7AS}zio)umVzEv zeyT5n@$s=XH+kzoK|2ZuUWk7Lf6h*%vN4E)1m^{1>tO!>tG;-l9sHR+0&(ezr7(WI z?Z30YzuTx{yn!tZ_T!)6|3bc*&-T^4`BdO1CG@xhoqW?P20dipkV`vCy1>7(G^AOY zJ^_42PJ(*z1q!Oz*fJ;y>p?-6?aT}CXU^oOT;MHa;Md`cJI}nJprW8w|F$gfhxTg( ztX5D^`R~W{7NBQpZS2=nfch{>?Za@}n5PX{$WV4D4@VFncT&^2aue=~}@2FT3b$^5`k}W9F+z27vC8dFyMH zNkRPzJN5ivK7Y6wej^9`m)vF(4iOmN;KGA?-x4Xv`$FQj^DrOGe|t`tgFmwGfXAsW z7%%Hf*BcIj|F9{3S6<5^%(p;au^jM^Y(kGe{qTo@pBH{o4TAl?qoteuGmJO$ZtZy+ z8U;lZ9V+>~2Jma)oBn)&{eRY zNzj)vXNSiX+nme?SwjT1iTHhPK1U!+$d4a?8u%ASHZ#Cls{LxcamAl+< z{+?{{Pk*P##FAQ;Sy^C@b3S233n?bPah#&G6X?s#80J-JCO&C&W6}xY3tYsKF*1Cg zo;1E<3G3?}w`yCS3KQ=QGtDNpl99E^PGgr{P=A@wf-1xlPHw&3=cWpHJkCw)Cx&2u zOZN7Av73o$%kTD+;rnn4xtT9}nRw#8Zz3J+bJgIk=PPP}zsjc_E(iPkoBuiYqy0?W z#67?m1%5w&^%o)Fn>h6St%UU%Bb28z2K2V6hLa+-5Kq3g%)_t6#0idF&tHK(J-2yh zNW?)VUP*eIbQA3F)dSIaO9ucC=dkVQOR%3|4V`~e4Vd^9{f^n;XAsYP(s`Dt!^G1g zu58_4pK_c^Db>wxZHi|1K@_-U`3=@naDCNA2-lf5Mz;xh-6oFk!s!V__N z)z=|j8@cgTDd@Ap?xAn49~tRRu(WJ|p$OeAw$>oh2ipoY`Lt^zfPZ57&T}&hIVAh4PIVE&kun!2a#@OM?aD5xt>k=+sFv z%5U22qn7rdpJI*5n!DPtU-Z`GZADC6@KG<2ya&dU+v9-~k%=kY0Y;UwWF(Uj zK*)yjBF)c!{*oc1&e}Iux{a9lXNr1EHpC0fkZ<*0&~HNKwCOc5i1+MC4}S`HIv2t3 z>IT9PZ*NK+EjDK2n`ZmcC4mlWq>Agn_enWt&2E5-6Uds>d z{A9#nOA@xDGO_uNIzJ&Em_KRJc|K-LeBz7S{x|GoL{BLXIsPPIdo4Zt@luHljZ zJYWOGeP%Bk8I^r57uX5-!(*lkQs$Mg6&S1`}`m^&qBq zii9%nKRy!a0^<`Vx%c}R2{GNDf8qrC)a#r5V?8AFSid*j{yY;4%J&WJ_)bE5Y6Npm z1D^9aS1_69D+%!5TX*(*GI3CRYh~1D654+3?Cw{WV0`;a&JtSS{!y}ZA_MT9LGe8a z>NO;kuhA~JGXV7E#7wfNBcUyksT(5v0T1|k>%i@|aNkK*CEEru@fU%+0>(gV?;Tys z2Ib|-ZCVwINl1ER*)S}WiK(|dYPntm{^gY5#p}0WeYE`1wuJjunWZB4pFp?7Srufa zlTe3utZH326W7dq>oUzGq3h$nvra@Z@xbLbRRzf;bfkhOT@&!I1x9fy{}SQ;mGVf6 z_a+moC~h?yh5O&7T7qad(7{8`jRfI-IoeIidVdrXv)`?}`Livv8e^{V9ypi{cjxE_E!5tTIzF1YXhoWo(U_AV1|TKVal6GB1@UygEa z2fQvT%Tl4mlZ0ApmaNlLnE1@@jrY`{{3UY>iV@Hq#>(+Kyx_k4r*K41I?R7t|9o?2 zXkRvvy7dvP&)!NaFIQ-<;l;=5Ouz?ASbQml`}ieC)f=n8ADMdpH-iE9=Sv+)m!C4R z*9Vt7X;A)p{fUh@mx;^GY8$`MNocEKtf}Eku-~3<9LM2)eB%kdXehVaoC=Ca!lJ;qC{#(P7K+KHU<~_lCb`OReC3JLM~H;#(#r zK9$T%K1M?Odv0@aR5S7FgTyfneG*a)JldjJ2J&?6N;+gjLdsm!V5xGzqc-~7d3_k@ z;+Jb#Q2x`#S37U$l2Ee3ieC)SN+}^OuK>?rs4{vrsga3|!yST(4w8_)qOLfu~?VJrf^qk|O*7JXDzOw?1`{&#%C@k^{>Bd8TrL4s>4e0j)`H5-NJ| zQ&kM`-Dj68`Z@VXh|OP2Mh@hW4KbPAvPwiVanuKUS^zID)YCo74)_a-`->A$zUZO} zLl3@xd^bo?ycNnnl#dNr3;2*xb;096{}Ua)Gs8K` zn*l#@c1`GB;J1jY5FbJN+84E6r*r_G_zvZDDzv|`O;^7j>R+9B@X-k5H8*n54uJCB zPUo)6ED_OG*%SqrFHG#5aAx7y8WQ5=a;H`FzUUBVrH&K+{7di z<;WRtdGw2kH#;}*M-CCum2G44|0clxkayG<{3as#4Y~E+(@dOb9AB9?1^A=SIefdP zVEIwLus??vDbC!v+y!@PAIwzbStrBB^bpHLw1_{^M}yegfc!$~FjmFoN&1dM-_NcM?(5ow%@1 zbAZR5xLaxqc%Z&wqq=$c{@3rc?#&}a^eXtY(qG7DuJ&WA> znuwhL6!EPUg!eg?^%|>yZ+==Zgdq(1-YL{Q@qoXZIJ%2#%T{==gX*(N#ei4h8hRHA zv>xwDDQ6iGo$k&&v?2oebwUGE1tmnZA&~X7Q_KM$y=qusQbj}uj&`j1s^EZkwPUXc zz?ZG92r1bK`B2A=BndzA0Doq_n$5MCLNcBV;RV%&iRh+)8ga_Q7~0*UHXb zlZE#dreD96tR|u})2HG$sX_i%1FyopCL(%Yblzzyl=vvW55ga#X5yG`yrp5Q|wx(b2<^-TI|)|Xa@S`6f!vnc)dP~b?9rzzdNrL z&YlmnOVO0JqO}87D)>IOc#nv9D}H*p!uLz%Y)koZfTu)hc}-Bhim#>5?>da%uh*wG zL4A^g)Wn@WMEG}LQ|>hI3zJIHPKLsIOZ_=~7wT(`-CU$~nTVbqY~7)7$^mm--SLz-r5ue*FPZ-?cqYeCZB&ta@V2ref&d#g@c)7@vEyKcemx5>f4i zh7cjh?^^7AY|0PzE9k34?MZofKjRF2Z6)9zk$Ujk4kcLcWx?rB7K!LY*f!j+;DCwp z+G^JqVEuiYUEziGcCPN}p8OTCC!=%oDWdSc)Rf??G2jU;rbR9cNkP8eH^ZbHs6V+< z?fE-#c%O*(+7J)e#|&rJUDl8f_ONEE|HLQ})%>)u4u|!uVfbS2KsVTn^tR4ju-@}x z0y!eU-uP_mwzCHNplM%lnF9FFaI({sK9p}7I_`ZP>Prb69~J@ol2d6g@O^>^c&x3G z;~QZ@Mc%kMw*w4g5w5g(O7WfP49G9$nfzSCMJoF6UE2p!fBDx@d$&#uc3V3VZ^OCG+ z*xwS4OI|huf2kvcU;iTPkG)Yo<$rxih%d&vp$+)N`*%NeoXGod=CEP z<&?b#)Zl!1^}hVIY$o17d#?8%9;WJ$(|dy~CSFn&%bthxVwGYIK^XkuwLjVJZL=dI zekb?nQ{WG4K5p4iOo#KJzpVCg;8#3UBMAV$;%lKYr~5tdFQpUMQm?^#YKclkY~X(@ zPJU}Y77OS3eS$A4fghb#D1(XDJerIB4Cmjo zGmSk<(BHVGw#ZO7z<)&QPPl=;_ zXBynI?4kVeE1PA(-zqTd5gk4a^reopAK;(i;_*P%Wx)6C8%YZU|FE#@Vxx&K#BZPF zoo)lZe4Y*GXOQ0@@3L6SJ2)RjDNKlgd?hSpgI3JM{5DNL?La==H65YtmEbRy|GQiP z{co-akk}9YazI}B->@KvKMJM%x)1e}n5TO24Z!c!QcDr|!w*WF2CNbQU-|yAvK9EB z6)bnFkx;<<>~a2P0sftTvQT0P)PJ>kYBvSe13n%iw(0};EB;}RA>hxY-^k$r`4%rr z6}WVRJnJ-Q8X!N(rpo0~C!A00%tc(h$>^Ig+R6dnpA%?(mBb(;i(52n`>$|bdX?Va zXbbPaQ`7=v%3ZS$)Uy8d|?NO z_w-p``Ud`7{-mGtoC_J{Kb4%50ea$+nN|yoNA#7>z#Q-oHyq7%4!j8Xn7uX?K0qtw zi%Mxhyd$!fz4XdI(0A>-WB>KXZdU$XEE~ML@jrb;r>kF^aas-EL{2hTkLObh?mm)u3n8`VVloAfyNMz z^mszuQ5(v_y9o~%mlYx2`#GbAW$Annnq% zgZSravYwZE0yYA?%01=>voi}fu=z@IrBoDoJG8A<7FFkdt z3G(Un(=#35e)4d}WZ#KMQt5?pFduq_v`6=Xf4vf%XLrv52b}zvMBNYmmGU30;1qaYAtrn`9})b) zqKM~-3GiN;rn-ZhCHMyqo@hLc0{`o!wCQf}ud?%NIfTL-@EP~{K3@+aakpY2$eC*J|kHZu(3( zzf`u^7dXIsXx^Q%zv)C|Q#=};0{)lJi?vuB zOCcc6Eg`Mkf8qV9m|s)Pj|fQQ@~y(%e;_|zVz-&}a{_v^DfwUMqyv^-yXj?gJ^|rt zH}p(_&N$a4mr+DO89IAZ&cXbW6!7n?e@Q@59_N&*MnRsp)b1_i1a#ZLXZybIpkKAD zml0(IWaPmkF$A>fp|?b5=>M+7pPRdT;krfW8}CqHzwu`!nLC;qop5$~qI(*AMz=PER)q z=_a6xf->6z&|hTic->qR0qwuOV($aAif8$oc^PcT5j{9SppJ@ zpdFa5f%j0#CpHxQAt1f=(%$IPf9E&hz^y|Bw4vf*aXzfy$w;|R)q@0d`T$>^Imr7{ z`LNAL(2t|JCArrgA3%*=ZgOJkbc}Ui+x5?psIu7kn+)hB}p9P*ZhW6)6>(g|ap?$+r$9ZUPedx6} zr>hCbn03XG0`oh;eu1qW#`k~MLuA+k^R2b;9?rEOb77FriaROZ6W;5XD^~yJ_#NiM zt2zB?u)l3i9XVFx1oXe<8*W?>30j2np1e_`X(a(2eBCap3G+LjuT(AV8O)cO887v9 zEIjQ}n6e$_|HiNNs^a`C96MQ2G#Elae@1Py17uj3(}iqM>_kAnEmO~HiLtQh(p#OS ziv)CMgj*p?n1vZ((Z3_E!+I3N*Di^$@V)gj)^B29JQST2X9QXJ%5Bw}l5_$(XSXLL zf|rHuI!EoK(g?^Y;%#T`1{S_h?iw4ELqO4%4=$Mlf2ma2=nbqF1C#q{+H5RL9*mgd zg7yBt{dv;AJ9+0j7CxLJfA7pbn7<|0d0tntu>P*6KNb}UNM&&1fN&MOH^oyB>@7w> zN0K#HjB8kUU%~SoOrVXdjTH~SW#OpdqO=_w2=HFt2jd@&EWDbK>l+31e87#h0u3y@ zDdpow1|I=+9y+OG3jE;A=8W{U1f+k(tO+->@L);$mb1JB$k*LamDI$-Y4`txm;&E$ zWh5!AorRwd5qYh-2q;IPzg@J0g$r}|_e!vVyy^PeTgO=VWQ{>x<{ARJnswTzwx5MR zSZ`6yWGA3}$In`vJuG}iYPnEqBLVq)&RayZuyDI({JYm6Pqu+g>k_mVm;B~cs{jEx ze;PfR`H6)OB`vA@3W5ATZ?Ol~vhd%4;BUVrV7|MhF>F3U|2156M-&L?fAzOccjlD1 z#Q%F|diMbD!_-D}f`y6e?NSyPqw#s%#yi@mFfBt_{1HtvhlvOb;HIe zOPzlO8yz6t~ix-kwEWZ+kS*RYmAb{*!|PCIAY2N-7OKn zj8SCY(_JMS9PzfZtIUBhV{}|F`*s`9J}0?MNj=7hw*Oo=VUr`Ax04Ew^cy4ItrUS2 zkY``~_hSl;#)!-m-4hD^nUOn`e)x+q`rrP)t%)Z}Ydc~;Z|@gz^&lUmznY@&h-=-4 zpMD2@vuwtQUS#1{m2x5``6fun@8F2(6&4n5>9PKcO%Rvi z-hH*eU+tCTGy80U)_MiM&jR|(WhwdjArtiU(3^md-Y{P$LM}LbhxZiUmYsF*V&SJ3 zpX66}nxJFazi%wLvatKLYU#Z#CdmJvd566_3kTi5#q9xfN4WjAAP*K^r(;zV{m}&3 zTWQ}aImg1Ti`rbe4JPQFNO|Qo7Zxt6EuCKb$prabMr&`u}6gzNbJdmPDU9Cd$Hf z2}|$a#zK38LT*=gv9Qy9fd|w(Cdh!ew1;gUlz+~D;PoXFG`>{&u=@}Tk4TL@9*6fb z(>2)pZI7@p-9<2a87deKgq&`-dI77G8m8l8NZ=b z(~rCK0WUIo&O`(J?;klv{8r|GFMV|0)KP+h*bY7`udoDshfY?&Te#0Z^=;$#6cYx1 zcl1&3Jm5#_efF?xTQjh8-w(y1-SFP8;QN6`W(>?dCu#N!{QLi{U%F3MRL~B7FCJ|6 zH6H4-di+!N)oBLSjm{DD0{oiT^&gbCX8_M}TT9py@M!C}=C@e@eeVU^@i)tWmoWys z<~at=xyawKaS`%4_$@+2fSwOYb>BV-=do7j(!G9wpKdOQmi-R*_p-W|YA*ubL`H7H z@;AgI)J*!H0Y3GH*_E^1fH!y%R`}f2n}LPny0yij{%;Z1S+*A#kPqT#SOfn0S!ubm z+Kzxn384tu0RBPv(Vic=K$96i^W*?8=;1)-vY|1sKt`#1HTd7vBJooDnSdYr$MyBj zj{oq(|Ns6ja7kwUh4)OR8GSNbs$}Fbn}4NZExeCoV34094tO4oQKu2WCvmw;D>0QJ zKTB;{su%D{=>rF=zrlU<*HGJo8Swr22KDnl1<7cJ!q%wH1?TDcR4XkZ&_~$X#wBjR zb9~EcmD>UNC#Q^iN$Vk=)?FtPgES)2KV`?S@LbXa6c9M;*q!r zoUeT@_IB|AeuR^_QGXNUhh%-Ra2H_WcAr4hvL5m;cAGXX0Da#ntHdATIXYf6dGRfP zmuR^uz5w^vN#P~lAA})Zwd?#pd7u-5?_wkZ_qm@`41PmCXICbQysQA{-$`@c=&g|7 zB&}K6BM0%g`ozdgi1!e~3a)U913u&BnyD@T`~Q3zGLltE(z4 zu)0?wB%y1Sm6TPeY^7|nvI(JVQn*A$LXn-B5kitx@qPN^_xL>?{(L=N=braDuYJyW zy8MHcKaP z|FNYrV}6q%ZOz1<^#aFH4>qkg_ni&zMNO;&?f8BD8I2dMEQU1Z)Xo46VP(Sc%{Fau z(2uuF)vpcnt>$mX{IX4A36++pXT)dpoI?EfN#@>4J*aH44v$C{>W$N!@?W9;G&9pl z$1|2f#IAL@7xDr9CS;1ukE33>`RbR(*YVa zj%Q;7m9}&+x=|D32`5%FD(2U2p5aQw{a}WQQHV9xA5#10zd_t5&i_43x(@vNzA%nl z8sKL71&J@{*YPaOtKcJr@Gj8rt87O-C1ErbxlWlFwxQdp+lqbxJ9l}JAfHPbcZ`&Q zx662PSZ5>hLnGQ|PNDwQhKJ$camXLRMX{0YqSB^gSnhj*|DR=z9M5-yUTo5i%*f~P z4Y4mx1DwAvw^b1QdlDEzs>Xbc`)#)9?}I(iuIiveKaj-xb?&uXxL*lW*?kB6`9t28 zcMb6Q4LiF^0KX}J8j%9|cVo$u4o5xlqe3r9x*-ZN$L>1B1vp`;=MYB=g}AbiWTgF? zN;@t+mhMtVA<_jRUsi+v(UpI1`nOUD<@;~1I8>pYvup1|y*0?^a4J^ci23*M2Up21 zQ-}b%_o;KB_bdM_zt^}wjnxh1v#h4l1esivLNMMctY(M|{*e9toQVcJvtN{33iD;` zm~biki+l{bz`e4-=RRF~#}4}TBKS$W1m-Jzt1sR99q-LQLN6v@eC0&g;y2W5U)?%s zYh8wZ9n=>jTdYs)nDy*U$m3=Xwf#Q0&vl$rHe$f~842KV-|MuyEd6H@mDYKln}4JT z@54!^(vcWH6WC_32mEbmYWi)A`2us=wQF(zZ0fUAAn}e$BQ9LLqVa}8{BS&ZYd7kn zf9X5))+0Z|NX6B981O<>Ym8Vfh478|J}D1+E(l#W5XeJ6fq)ygUZOsno$_Ub$iVyg zDt%h6r$6rxdG`;vbncj_GBFnLaK9etW#DX@J&ODrR`uN>ZqOg09j^LDuz$VT+aLH7xnfr7pWGYpH*0Qa1iQ&n>891mj@}thfd*5q){rZE3V2d ztQYU^w``v}06+A1$X2Y2LJYKoJ=B7Ha`apo*9Sgbk2{~C1N=WC{wA=PLc}=!c>A)4 zN{i^RRgZm5A-ev_hSn}%z8%kaQ~S^##cWj0ca}=KIO=VE67qC|uYmJC@Fz*jp)H?~ ze-c!fb77iFi|?>2Bcr~bpHy;$<2RLdPLDYB@ICTj^2`skp1@bA9j~e>{ zPLcR7{|@|Wzw=3m6Z6Zw{*_{g_XM-Dxv@#q6EE1wkETGsGd(L5j8Xr2iq&YZV;S-t z@(mW3QBUk;_>WwSegoM7>U%^`4=TaP^*6 zDne%fPleCre#7|SwI0n{kgs!RnU=Ir4{Y|nNAlt~3b96oRYMK+o&lVSK0Z`>U2eTTEl9PGOo2TR(^X1V!G5bc zZJx<9p#D{Gc&rKa(^moo4D#TQGisI&_yDih7WbT<7x{Nbt$SxduXU+r%a*V&7X0Vq zAAr9iS4;fw!oRC2s8JrEo}48}uKO$eyIftxXZ>I32hjB~I}rAfQdOwSi1B#>feM{~ z4|BfZng@QJLyUj~J@SiPrg+VPzh=aABk>=F*s%Gca}Daz3&>{MN9VyWPKnxP)XPVE z#Pxn;S0)@{m`o3Jqo2bv`Z=t{`W7x;VTSxF{q1}{FbjX+Dbvagc}(>N))&0STj*QqFOBm1;O}bvd-%O;A%J8^6SLMOO9WF0kpUK` zwgZMVhCeUY3H!pH-nhi>auEK$?>?m^ghIFn?RVu6$NN^odI!#jc%Ng;%2qmpe2^mY z){h>@XDdF}E-8Wd$yTSF1@CQXY3UA6Bypdg7F)y*|2lSm{B(W^|Z3&mw-g!zOXFm?5@AQ8EMFQ(N zz^H8<1o(INxwI$P?}ywQg43AKr`qCuGw7v$5q8Q!W<`Y2Fm+ho|Bw#JwWAro7AOS9wN2%PSH2M8f$a;#!FBVlrY(5r6SM z+U_?9{F~fEJVt30;ykr=Z>%QrojM#mJ6|BbsI8P+_Y(4N`Ai3X!T)^JlUa(!_aB$V zkB3!4p3`Q%?&AB8UUG0W!#?`y99)b7?2&Tg)_e`}qn3qkH-mnu9d=uU@g7SO{57Ks z`tEV?>*KA3{LHx+A0gv=X?iox@UMzJu2muo@%M<7a8o8yo zB!@z{)`pjf9!9>S%=Aek#D60?M=d5GKShnxYxe^m>~48k^|&Fe!dZQJ<^7nnC5>|c z@}2r0^gGp?zj(eL_GQgJnGxnj^cQ+LSTX`Q@6GYl zh)xP2`Jr+4&wj}FIeJT%cECIa>Hh)$d`su81O4bvq+>(bCt^tJ31J9tfWKu``{7Lz zGo-nyF0z%v9PO$JHvAfKx^KvDiT=Ic<&mBsmF zVlk!U zl;Mb`E$H*4U7MK?{$KQByjbHlysuiGFzG`)SMIi~uZR7(Xa)v8%7lOJ*)Dzs=RvP{ z{nEe`*k|`|y@Q~izzlEA1;}5?I(g3Hpx0kv-keM6=x?;sSM(J4!`>ln>H+xOmQSQ- zfSKdk12mwontk_&UtoQ|U+GhfW1*i_DfaI02du#pmnpdU`%fMQ4x@<+R7K3o&)gnt@8cR1$~=2tT$&A|T&NlH6^SD_FFT+I7f z0MqGm|7+4l|2tnd$(!(h`dShX3a-H3Y(9`(G5~+JP$3>jf&cKRo>~CR#M^DQK?Cw> ze7>k2<40D>jG)yKcb3GoPz0^_5d+2D^^nQZ4274L&8=_Qb#E?trD zu7LMiS1|O^C1ibm`Sf27Q?UnqRa>PKR3Xo-_agFlA>P<({IZ!3_HFCWt)eUOjJKAO z=)M<-pF3(T8GtVj9rA02{nxIhjj4m*ZrOfa>ryF%Tv2_h2l&?_skG^366F8&pFQP> z-_u1D7ji)l>&oz!Dx4?2nFc1E(gD3z$(xLpf}TC%>(Foi3Twq_GDh@n@)4fU$XV?ezb8o*QKEJ@} zbk_p@FJ1ltuO;9`sW0i#;P3Xy#V^J9US2RK@eKSV(G9Vl0AA8bN}ByC;+Zf~r!e{n zk(Fn4ov@zO>tmXUx|$ca5`SulaNfiD;X`9^Nfi1EY9^iGfWN-+LU!aZ=C$`(zNaMWe^nLPuF8cXN2Z_8rg8gvr zw$BC}A92c;TM_ZrbmYH1`QVqrLU29cMVSOUD)3b?3xV@w3Nc}vACm!mK+|kb9`xH> zl_57I5BC{#!<|2F;QmD6RnF-`&|igQ|LGdeM=g7!eQ>@lKh8e)4*D$ihs)+!0s1?3 zM_8T(JSd+d6i-7xL0+nKE8@|3hV5cYIR9=RQ?p$JzZWaL`|V!fexq?b-2VZEILjSo zeme#CNm0VXv$n9Wr9#Hmpf`IR1FsF_XElF&lbs9tO3?r8c%A2UCd7{}yr-pbURRXL z5x3ij_}e*3=`CQMq(I|eLa^`a#=VcdgZ;VPY;j==^7n=MPcBtbX$J+;h5Z3fpGsyB z1U~%k*H~e83UO38bjTj?K{;AWEAr737h@0Y#C?NxuvUZP2FRzK;PT=3&`0%?R|h!( zN2iT?;=GW-7N-~V?n}h<^Y9AKA*YM4eL>pJQ3U^0sUDjAQG@Y z-mv$Xzw!I7KZR-Yp!XKGl$QN6us^@rM`UoHAu_?F$)Esx*LchKN(J=u=q?^bS)8Z8 ztUJ_&?>~z>DW3-XOkPD+1I8DX)Q9g>gT30dF>M?6BXe{*#SrpKKKA+B8tmUC^5;$& zE$r`y*g@%9_=lU)@;lTKUwu61m{^DVl}VP^bFe4pv>BJ$01y7?3gCtOOi%k?`-u6> zA|upljG=!ar9UKNcngK7)u+^3u`b=tj_~334?(3YMZz~4BKXF~m5eD9#S@Eo)6zYZI z^NscKdsENPdaw2(-V!PKDuMm~pS+Bhi$8k;xHMkKT5k`9sLW>HW)FSrVXbj_#EbnM zjSMh=ez>!Ejk#@uKCIH0FTEmO1>xvdcPD248T|F8wLOVU0k|*Amec%>^$s;Gt!E8G zKUr6I`h0nuf9fa8SVA$r(7e+06opWshA@2qK1lwem%>TJ3xjP>_e7&#=`BMk3E)NR zUN)uxj`bo{(}P}P3KKzRpRM$Ni%4};g*^`HH5dYZiS9?pH^{^4^&HzrPj2vre^@-h zXAF7nwrEw9@L1_@wdyer3;ampVnWtr7I5p zynsca9P6t{)@$O&ey6TziOL;Ce7kD?>Gt$_AwS&53TwApm6D0sWQFpp520_P%-NyH zr*`yN=f(njw9UTELs?|PVnLE#2zc#7PCxB)QLo`%w(#2>{gtzCm)gE06S~=RhpS!D zAGPpoyASfU6XU3o1zuDd$z`V0Hx~Kb?@6P%fDKAEa=i#66QYqDZggNgy&L=1>~Jzs zcz(CE5$O9@Db=JK`T6cs+m*k>9U-E}&i?^HoMgcF&<6fM+`TC=0$985mK$be~K-e{4U|eIN46?=f;3^#lG+ zAv)LaJz>gd+E?UPF`6nz2x0!nY_(yx+xWf<|GEP3Q)GF^To3ZG=0~FL+{EuWW1>EG z8IcL9LYc&A{QirrfOk3atys8ytM&snyHfg6OAGnv-Cr(J!2d6jvC&pn$V99`vDdig z%6+Ux^Gg5Z6!~8~I*S)GB)C07H$qCYfKZT+`_IItxM9f0ppf~!*I#0mPd^a+|079X2>R{wJn*~- z^M#II@qL8+viSRxJzp{Y=}75!Kjim2EjMew#(a#EWhD*BN8@;|)jbP*VE@l?{tz-D zMsxZY6pVhuj;HSZL;kU1~sTQ!tPp*#Wk?MY0A zq-45t>DQGfGx?Y2FM z@E3Q3-ba*>iDuc5rjr=YpnSXUb`APP?>^Pc7e%GrAf2hZ)JP_-N_SL=Vm;&rwI(X? z??)?DwGr=a4W26BP){a)c;B~VOvm}CGiboMi%b|C*Paf?ey%0*Py6+dQUCS#kvYB> z_U6z@)1PD_hp+BT74W~=it7u1p`UTlrl)F{|J`M!?ROC$Jg%Sjs?UM{e(rbDjR*Hb zDGja4h`-V_I)im#f6R}Hvk4%cla8IF<dL)(q|p#dxg;qWC9j)SsPrZE5}j{YM2~ zsK40V)fClysnP zKh8QCB~_3KF7Khr7qCwka=z!=q5jF*VBEzX_MH6j@8MnG-;LYdH#H!y+AazoaxfoP z&5gv3kbi|_<>J_C=(AY=ZC!up-yTuPj7soN^37=)wTxfq#6!tFNMi{bbeim&kD=UcbR0a{~HLm)Y~-)po?6EhWrO zuxAs(T}xG<&wY`ZIcwO9{K7+x!-zMv63~Ae`-u|`K3Y8s`w^Nd--!JPhI36t{ek{a z63_R+e|cmsNKDM2U-$O`jj{f-USBWkKz}t^OgIDKZ@5HlLi|9V1Z|r# zGW^Zz{dWg{x^yrV>#@pKvu4BnY@a5hG~|a%#`>`+;@g3`9@C4j&``@xy`G2u6v*dATfrtz9e8`e(O=>U8%>6uwQ&Ibo)4>4FH9IZ1emu*y3fCb zOtkiQwBND9d9pb~bOQRzq|W7A2E6npdfp7w_w4H0&}nrO@1Yq!&kDYg34ZNDwJBS? zf06rKqeszyJ%2%W;RfPc*>{^=$6&vW;=*)qgTAJlPtHTX9S+ZC*k4D#=(6Hk7VtAG zJvHZ#F5(~dpYy_>QBQVhchaIB{6prkjE&i3f)!DxLt>0p&O@#eBCw|(m1j7*O;_UCo2-n{sPFqj?)ZHT@rrNDHuhxHlUZ{0 zDd77aai3}@G2i;+^}&`Hf55m+ZVl{jU5<`&sU^;rsxp)Ffa^CN>(4TQz3nh`m??n2 z3cu7RZ4P^kVpq`!&Z5_TDRGDnAeX|L3&WTk!k&DlxI?ROsttp>P?%w>7Wi zMZE+4Ha}^~0(}`@aoJD2g?-(%Dfln&)Mrzs&KWE9jej1O0dMBWaos)%^>eHEZMB}Q zT7HG!YV==g5WmP+)GF10za}PWBd6dmX0Iduxw8^4y$wi%zmk=u{v+&H{P(~867Wx2 z7jysWj?N9{m{h&Dr7Y z%Kq;S;r|#go-RXcP8{n`al`s3m~@$?Ka8u>Ta+4K)dfA6<@{9v*{#SBrn=6ajmk z{+hLE${X>-0fT5M`0FnrfA>q6A|A6X9GHN;_`190U@!8`-BdRWD6Yioo^qK9dWfg8 zC%bRMe!5Mt6we{QJo6C?@6QOt$MK3RhY>G%$|l7G;Cs&l_Kl;RZ QU$^gWMt%t5g;o3eAMPMVWdHyG literal 0 HcmV?d00001 diff --git a/src/ocgis/test/bin/shp/state_boundaries/state_boundaries.shx b/src/ocgis/test/bin/shp/state_boundaries/state_boundaries.shx new file mode 100644 index 0000000000000000000000000000000000000000..ecd94a5a62b1ddc4a087a3330cedbfd40b022faf GIT binary patch literal 508 zcmZvYO(+9k7{`C_#=gu!iJ{~ml#+Hp4w{m7(zbTck{q;@3lfsh+~lA*EQKY9m2#3C z)*5jjNm4%Afs$4(4oLF#zbfzL>G^*=&;NO$*2VwtJ7!BC^- zcf;_4YCnv;2zS7^LOX=d^nhg#Vjl6zIX5Nz92T_8mVu>WaQR_X922Tp>3pzWrQ2p< zYg2k<*r^eI4Z9ZQ55a!7a=pV*SUTN!QLQ;~$Y!+v4({c1ihoL|>h9M{-J`r;`nU2G zOleZ{a+tDZ=@&7Tbx`QX)Q@Vu1=A#t{T9=vJ+1`C{|QB7n8=v;O-xktTmzV)d1ycT E1yOHY4*&oF literal 0 HcmV?d00001 diff --git a/src/ocgis/test/bin/test_csv_calc_conversion_two_calculations.csv b/src/ocgis/test/bin/test_csv_calc_conversion_two_calculations.csv new file mode 100644 index 000000000..da97d2055 --- /dev/null +++ b/src/ocgis/test/bin/test_csv_calc_conversion_two_calculations.csv @@ -0,0 +1,257 @@ +DID,VID,CID,UGID,TID,LID,GID,VARIABLE,ALIAS,CALC_KEY,CALC_ALIAS,TIME,YEAR,MONTH,DAY,LEVEL,VALUE +1,1,1,1,1,1,1,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,1.0 +1,1,1,1,1,1,2,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,1.0 +1,1,1,1,1,1,3,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,2.0 +1,1,1,1,1,1,4,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,2.0 +1,1,1,1,1,1,5,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,1.0 +1,1,1,1,1,1,6,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,1.0 +1,1,1,1,1,1,7,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,2.0 +1,1,1,1,1,1,8,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,2.0 +1,1,1,1,1,1,9,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,3.0 +1,1,1,1,1,1,10,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,3.0 +1,1,1,1,1,1,11,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,4.0 +1,1,1,1,1,1,12,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,4.0 +1,1,1,1,1,1,13,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,3.0 +1,1,1,1,1,1,14,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,3.0 +1,1,1,1,1,1,15,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,4.0 +1,1,1,1,1,1,16,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,4.0 +1,1,1,1,1,2,1,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,1.0 +1,1,1,1,1,2,2,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,1.0 +1,1,1,1,1,2,3,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,2.0 +1,1,1,1,1,2,4,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,2.0 +1,1,1,1,1,2,5,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,1.0 +1,1,1,1,1,2,6,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,1.0 +1,1,1,1,1,2,7,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,2.0 +1,1,1,1,1,2,8,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,2.0 +1,1,1,1,1,2,9,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,3.0 +1,1,1,1,1,2,10,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,3.0 +1,1,1,1,1,2,11,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,4.0 +1,1,1,1,1,2,12,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,4.0 +1,1,1,1,1,2,13,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,3.0 +1,1,1,1,1,2,14,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,3.0 +1,1,1,1,1,2,15,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,4.0 +1,1,1,1,1,2,16,foo,var1,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,4.0 +1,1,1,1,2,1,1,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,1.0 +1,1,1,1,2,1,2,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,1.0 +1,1,1,1,2,1,3,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,2.0 +1,1,1,1,2,1,4,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,2.0 +1,1,1,1,2,1,5,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,1.0 +1,1,1,1,2,1,6,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,1.0 +1,1,1,1,2,1,7,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,2.0 +1,1,1,1,2,1,8,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,2.0 +1,1,1,1,2,1,9,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,3.0 +1,1,1,1,2,1,10,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,3.0 +1,1,1,1,2,1,11,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,4.0 +1,1,1,1,2,1,12,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,4.0 +1,1,1,1,2,1,13,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,3.0 +1,1,1,1,2,1,14,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,3.0 +1,1,1,1,2,1,15,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,4.0 +1,1,1,1,2,1,16,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,4.0 +1,1,1,1,2,2,1,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,1.0 +1,1,1,1,2,2,2,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,1.0 +1,1,1,1,2,2,3,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,2.0 +1,1,1,1,2,2,4,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,2.0 +1,1,1,1,2,2,5,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,1.0 +1,1,1,1,2,2,6,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,1.0 +1,1,1,1,2,2,7,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,2.0 +1,1,1,1,2,2,8,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,2.0 +1,1,1,1,2,2,9,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,3.0 +1,1,1,1,2,2,10,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,3.0 +1,1,1,1,2,2,11,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,4.0 +1,1,1,1,2,2,12,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,4.0 +1,1,1,1,2,2,13,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,3.0 +1,1,1,1,2,2,14,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,3.0 +1,1,1,1,2,2,15,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,4.0 +1,1,1,1,2,2,16,foo,var1,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,4.0 +1,1,2,1,1,1,1,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,1.0 +1,1,2,1,1,1,2,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,1.0 +1,1,2,1,1,1,3,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,2.0 +1,1,2,1,1,1,4,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,2.0 +1,1,2,1,1,1,5,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,1.0 +1,1,2,1,1,1,6,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,1.0 +1,1,2,1,1,1,7,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,2.0 +1,1,2,1,1,1,8,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,2.0 +1,1,2,1,1,1,9,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,3.0 +1,1,2,1,1,1,10,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,3.0 +1,1,2,1,1,1,11,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,4.0 +1,1,2,1,1,1,12,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,4.0 +1,1,2,1,1,1,13,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,3.0 +1,1,2,1,1,1,14,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,3.0 +1,1,2,1,1,1,15,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,4.0 +1,1,2,1,1,1,16,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,50,4.0 +1,1,2,1,1,2,1,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,1.0 +1,1,2,1,1,2,2,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,1.0 +1,1,2,1,1,2,3,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,2.0 +1,1,2,1,1,2,4,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,2.0 +1,1,2,1,1,2,5,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,1.0 +1,1,2,1,1,2,6,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,1.0 +1,1,2,1,1,2,7,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,2.0 +1,1,2,1,1,2,8,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,2.0 +1,1,2,1,1,2,9,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,3.0 +1,1,2,1,1,2,10,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,3.0 +1,1,2,1,1,2,11,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,4.0 +1,1,2,1,1,2,12,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,4.0 +1,1,2,1,1,2,13,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,3.0 +1,1,2,1,1,2,14,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,3.0 +1,1,2,1,1,2,15,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,4.0 +1,1,2,1,1,2,16,foo,var1,min,my_min,2000-03-16 00:00:00,2000,3,16,150,4.0 +1,1,2,1,2,1,1,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,1.0 +1,1,2,1,2,1,2,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,1.0 +1,1,2,1,2,1,3,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,2.0 +1,1,2,1,2,1,4,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,2.0 +1,1,2,1,2,1,5,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,1.0 +1,1,2,1,2,1,6,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,1.0 +1,1,2,1,2,1,7,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,2.0 +1,1,2,1,2,1,8,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,2.0 +1,1,2,1,2,1,9,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,3.0 +1,1,2,1,2,1,10,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,3.0 +1,1,2,1,2,1,11,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,4.0 +1,1,2,1,2,1,12,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,4.0 +1,1,2,1,2,1,13,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,3.0 +1,1,2,1,2,1,14,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,3.0 +1,1,2,1,2,1,15,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,4.0 +1,1,2,1,2,1,16,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,50,4.0 +1,1,2,1,2,2,1,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,1.0 +1,1,2,1,2,2,2,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,1.0 +1,1,2,1,2,2,3,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,2.0 +1,1,2,1,2,2,4,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,2.0 +1,1,2,1,2,2,5,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,1.0 +1,1,2,1,2,2,6,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,1.0 +1,1,2,1,2,2,7,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,2.0 +1,1,2,1,2,2,8,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,2.0 +1,1,2,1,2,2,9,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,3.0 +1,1,2,1,2,2,10,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,3.0 +1,1,2,1,2,2,11,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,4.0 +1,1,2,1,2,2,12,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,4.0 +1,1,2,1,2,2,13,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,3.0 +1,1,2,1,2,2,14,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,3.0 +1,1,2,1,2,2,15,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,4.0 +1,1,2,1,2,2,16,foo,var1,min,my_min,2000-04-16 00:00:00,2000,4,16,150,4.0 +2,1,1,1,1,1,1,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,1.0 +2,1,1,1,1,1,2,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,1.0 +2,1,1,1,1,1,3,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,2.0 +2,1,1,1,1,1,4,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,2.0 +2,1,1,1,1,1,5,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,1.0 +2,1,1,1,1,1,6,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,1.0 +2,1,1,1,1,1,7,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,2.0 +2,1,1,1,1,1,8,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,2.0 +2,1,1,1,1,1,9,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,3.0 +2,1,1,1,1,1,10,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,3.0 +2,1,1,1,1,1,11,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,4.0 +2,1,1,1,1,1,12,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,4.0 +2,1,1,1,1,1,13,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,3.0 +2,1,1,1,1,1,14,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,3.0 +2,1,1,1,1,1,15,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,4.0 +2,1,1,1,1,1,16,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,50,4.0 +2,1,1,1,1,2,1,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,1.0 +2,1,1,1,1,2,2,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,1.0 +2,1,1,1,1,2,3,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,2.0 +2,1,1,1,1,2,4,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,2.0 +2,1,1,1,1,2,5,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,1.0 +2,1,1,1,1,2,6,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,1.0 +2,1,1,1,1,2,7,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,2.0 +2,1,1,1,1,2,8,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,2.0 +2,1,1,1,1,2,9,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,3.0 +2,1,1,1,1,2,10,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,3.0 +2,1,1,1,1,2,11,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,4.0 +2,1,1,1,1,2,12,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,4.0 +2,1,1,1,1,2,13,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,3.0 +2,1,1,1,1,2,14,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,3.0 +2,1,1,1,1,2,15,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,4.0 +2,1,1,1,1,2,16,foo,var2,mean,my_mean,2000-03-16 00:00:00,2000,3,16,150,4.0 +2,1,1,1,2,1,1,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,1.0 +2,1,1,1,2,1,2,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,1.0 +2,1,1,1,2,1,3,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,2.0 +2,1,1,1,2,1,4,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,2.0 +2,1,1,1,2,1,5,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,1.0 +2,1,1,1,2,1,6,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,1.0 +2,1,1,1,2,1,7,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,2.0 +2,1,1,1,2,1,8,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,2.0 +2,1,1,1,2,1,9,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,3.0 +2,1,1,1,2,1,10,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,3.0 +2,1,1,1,2,1,11,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,4.0 +2,1,1,1,2,1,12,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,4.0 +2,1,1,1,2,1,13,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,3.0 +2,1,1,1,2,1,14,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,3.0 +2,1,1,1,2,1,15,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,4.0 +2,1,1,1,2,1,16,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,50,4.0 +2,1,1,1,2,2,1,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,1.0 +2,1,1,1,2,2,2,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,1.0 +2,1,1,1,2,2,3,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,2.0 +2,1,1,1,2,2,4,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,2.0 +2,1,1,1,2,2,5,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,1.0 +2,1,1,1,2,2,6,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,1.0 +2,1,1,1,2,2,7,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,2.0 +2,1,1,1,2,2,8,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,2.0 +2,1,1,1,2,2,9,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,3.0 +2,1,1,1,2,2,10,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,3.0 +2,1,1,1,2,2,11,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,4.0 +2,1,1,1,2,2,12,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,4.0 +2,1,1,1,2,2,13,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,3.0 +2,1,1,1,2,2,14,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,3.0 +2,1,1,1,2,2,15,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,4.0 +2,1,1,1,2,2,16,foo,var2,mean,my_mean,2000-04-16 00:00:00,2000,4,16,150,4.0 +2,1,2,1,1,1,1,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,1.0 +2,1,2,1,1,1,2,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,1.0 +2,1,2,1,1,1,3,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,2.0 +2,1,2,1,1,1,4,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,2.0 +2,1,2,1,1,1,5,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,1.0 +2,1,2,1,1,1,6,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,1.0 +2,1,2,1,1,1,7,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,2.0 +2,1,2,1,1,1,8,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,2.0 +2,1,2,1,1,1,9,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,3.0 +2,1,2,1,1,1,10,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,3.0 +2,1,2,1,1,1,11,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,4.0 +2,1,2,1,1,1,12,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,4.0 +2,1,2,1,1,1,13,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,3.0 +2,1,2,1,1,1,14,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,3.0 +2,1,2,1,1,1,15,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,4.0 +2,1,2,1,1,1,16,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,50,4.0 +2,1,2,1,1,2,1,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,1.0 +2,1,2,1,1,2,2,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,1.0 +2,1,2,1,1,2,3,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,2.0 +2,1,2,1,1,2,4,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,2.0 +2,1,2,1,1,2,5,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,1.0 +2,1,2,1,1,2,6,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,1.0 +2,1,2,1,1,2,7,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,2.0 +2,1,2,1,1,2,8,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,2.0 +2,1,2,1,1,2,9,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,3.0 +2,1,2,1,1,2,10,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,3.0 +2,1,2,1,1,2,11,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,4.0 +2,1,2,1,1,2,12,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,4.0 +2,1,2,1,1,2,13,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,3.0 +2,1,2,1,1,2,14,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,3.0 +2,1,2,1,1,2,15,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,4.0 +2,1,2,1,1,2,16,foo,var2,min,my_min,2000-03-16 00:00:00,2000,3,16,150,4.0 +2,1,2,1,2,1,1,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,1.0 +2,1,2,1,2,1,2,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,1.0 +2,1,2,1,2,1,3,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,2.0 +2,1,2,1,2,1,4,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,2.0 +2,1,2,1,2,1,5,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,1.0 +2,1,2,1,2,1,6,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,1.0 +2,1,2,1,2,1,7,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,2.0 +2,1,2,1,2,1,8,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,2.0 +2,1,2,1,2,1,9,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,3.0 +2,1,2,1,2,1,10,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,3.0 +2,1,2,1,2,1,11,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,4.0 +2,1,2,1,2,1,12,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,4.0 +2,1,2,1,2,1,13,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,3.0 +2,1,2,1,2,1,14,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,3.0 +2,1,2,1,2,1,15,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,4.0 +2,1,2,1,2,1,16,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,50,4.0 +2,1,2,1,2,2,1,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,1.0 +2,1,2,1,2,2,2,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,1.0 +2,1,2,1,2,2,3,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,2.0 +2,1,2,1,2,2,4,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,2.0 +2,1,2,1,2,2,5,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,1.0 +2,1,2,1,2,2,6,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,1.0 +2,1,2,1,2,2,7,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,2.0 +2,1,2,1,2,2,8,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,2.0 +2,1,2,1,2,2,9,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,3.0 +2,1,2,1,2,2,10,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,3.0 +2,1,2,1,2,2,11,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,4.0 +2,1,2,1,2,2,12,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,4.0 +2,1,2,1,2,2,13,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,3.0 +2,1,2,1,2,2,14,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,3.0 +2,1,2,1,2,2,15,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,4.0 +2,1,2,1,2,2,16,foo,var2,min,my_min,2000-04-16 00:00:00,2000,4,16,150,4.0 diff --git a/src/ocgis/test/test_ocgis/test_util/test_shp_process.py b/src/ocgis/test/test_ocgis/test_util/test_shp_process.py index 5cbc433d6..438442002 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_shp_process.py +++ b/src/ocgis/test/test_ocgis/test_util/test_shp_process.py @@ -11,16 +11,16 @@ class TestShpProcess(TestBase): _test_path = '/home/ben.koziol/Dropbox/NESII/project/ocg/bin/test_data/test_shp_process' def test_shp_process(self): - copy_path = os.path.join(self.current_dir_output,'test_shp_process') - shutil.copytree(self._test_path,copy_path) - shp_path = os.path.join(copy_path,'wc_4326.shp') + copy_path = os.path.join(self.current_dir_output, 'test_shp_process') + shutil.copytree(self._test_path, copy_path) + shp_path = os.path.join(copy_path, 'wc_4326.shp') out_folder = tempfile.mkdtemp(dir=self.current_dir_output) - sp = ShpProcess(shp_path,out_folder) - sp.process(key='world_countries',ugid=None) - + sp = ShpProcess(shp_path, out_folder) + sp.process(key='world_countries', ugid=None) + sc = ShpCabinet(path=out_folder) - select_ugid = [33,126,199] - geoms = list(sc.iter_geoms('world_countries',select_ugid=select_ugid)) - self.assertEqual(len(geoms),3) + select_ugid = [33, 126, 199] + geoms = list(sc.iter_geoms('world_countries', select_ugid=select_ugid)) + self.assertEqual(len(geoms), 3) names = [item['properties']['NAME'] for item in geoms] - self.assertEqual(set(names),set(['Canada','Mexico','United States'])) + self.assertEqual(set(names), set(['Canada', 'Mexico', 'United States'])) diff --git a/src/ocgis/test/test_simple/run_simple.py b/src/ocgis/test/test_simple/run_simple.py new file mode 100644 index 000000000..a97436b83 --- /dev/null +++ b/src/ocgis/test/test_simple/run_simple.py @@ -0,0 +1,37 @@ +from unittest import TestSuite, TestLoader, TestResult + + +def main(): + modules = ['test_360', 'test_dependencies', 'test_optional_dependencies', 'test_simple'] + simple_suite = TestSuite() + loader = TestLoader() + result = TestResult() + for module in modules: + suite = loader.loadTestsFromName(module) + simple_suite.addTest(suite) + + print + print 'Running simple test suite...' + print + + simple_suite.run(result) + + if len(result.errors) > 0: + print + print '#########################################################' + print 'There are {0} errors. See below for tracebacks:'.format(len(result.errors)) + print '#########################################################' + print + for error in result.errors: + print error[1] + print + print '#########################################################' + print 'There are {0} errors. See above for tracebacks.'.format(len(result.errors)) + print '#########################################################' + else: + print + print 'All tests passed.' + print + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/src/ocgis/test/test_simple/test_360.py b/src/ocgis/test/test_simple/test_360.py index 9320ecfbc..2a92ffb58 100644 --- a/src/ocgis/test/test_simple/test_360.py +++ b/src/ocgis/test/test_simple/test_360.py @@ -6,157 +6,151 @@ from shapely.geometry.polygon import Polygon from ocgis import env from ocgis.api.interpreter import OcgInterpreter -from ocgis.util.inspect import Inspect import os from ocgis.test.base import TestBase import ocgis from ocgis.util.shp_cabinet import ShpCabinetIterator -from ocgis import RequestDataset class NcSpatial(object): - - def __init__(self,resolution,lat_bnds,lon_bnds): + + def __init__(self, resolution, lat_bnds, lon_bnds): self.resolution = resolution self.lat_bnds = lat_bnds self.lon_bnds = lon_bnds - + @property def shift(self): - return(self.resolution/2.0) - + return self.resolution / 2.0 + @property def lat_values(self): - return(self.get_centroids(self.lat_bnds)) - + return self.get_centroids(self.lat_bnds) + @property def lon_values(self): - return(self.get_centroids(self.lon_bnds)) - + return self.get_centroids(self.lon_bnds) + @property def latb_values(self): - return(self.make_bounds(self.lat_values)) - + return self.make_bounds(self.lat_values) + @property def lonb_values(self): - return(self.make_bounds(self.lon_values)) - - def get_centroids(self,bounds): - return(np.arange(bounds[0]+self.shift,bounds[1]+self.shift,self.resolution,dtype=float)) - - def make_bounds(self,arr): + return self.make_bounds(self.lon_values) + + def get_centroids(self, bounds): + return np.arange(bounds[0] + self.shift, bounds[1] + self.shift, self.resolution, dtype=float) + + def make_bounds(self, arr): lower = arr - self.shift upper = arr + self.shift - bounds = np.hstack((lower.reshape(-1,1),upper.reshape(-1,1))) - return(bounds) + bounds = np.hstack((lower.reshape(-1, 1), upper.reshape(-1, 1))) + return bounds class Test360(TestBase): - + def test_high_res(self): ocgis.env.OVERWRITE = True - nc_spatial = NcSpatial(0.5,(-90.0,90.0),(0.0,360.0)) + nc_spatial = NcSpatial(0.5, (-90.0, 90.0), (0.0, 360.0)) path = self.make_data(nc_spatial) - - dataset = {'uri':path,'variable':'foo'} + + dataset = {'uri': path, 'variable': 'foo'} output_format = 'nc' snippet = True geom = self.nebraska - - for s_abstraction in ['point','polygon']: - interface = {'s_abstraction':s_abstraction} - ops = OcgOperations(dataset=dataset,output_format=output_format, - geom=geom,snippet=snippet,abstraction=s_abstraction) - ret = OcgInterpreter(ops).execute() + + for s_abstraction in ['point', 'polygon']: + interface = {'s_abstraction': s_abstraction} + ops = OcgOperations(dataset=dataset, output_format=output_format, geom=geom, snippet=snippet, + abstraction=s_abstraction) + OcgInterpreter(ops).execute() def test_low_res(self): """Test data is appropriate wrapped with a 360 dataset at low resolution.""" ocgis.env.OVERWRITE = True - nc_spatial = NcSpatial(5.0,(-90.0,90.0),(0.0,360.0)) + nc_spatial = NcSpatial(5.0, (-90.0, 90.0), (0.0, 360.0)) path = self.make_data(nc_spatial) - - dataset = {'uri':path,'variable':'foo'} + + dataset = {'uri': path, 'variable': 'foo'} output_format = 'shp' geom = self.nebraska - for s_abstraction in ['point','polygon']: + for s_abstraction in ['point', 'polygon']: ops = OcgOperations(dataset=dataset, output_format=output_format, geom=geom, abstraction=s_abstraction) - ret = OcgInterpreter(ops).execute() - + OcgInterpreter(ops).execute() + @property def nebraska(self): - sci = ShpCabinetIterator('state_boundaries',select_ugid=[16]) + path = os.path.join(self.path_bin, 'shp', 'state_boundaries', 'state_boundaries.shp') + sci = ShpCabinetIterator(path=path, select_ugid=[16]) geom = list(sci) - return(geom) - - def transform_to_360(self,polygon): - + return geom + + def transform_to_360(self, polygon): + def _transform_lon_(ctup): lon = ctup[0] if lon < 180: lon += 360 - return([lon,ctup[1]]) - - transformed = map(_transform_lon_,polygon.exterior.coords) + return [lon, ctup[1]] + + transformed = map(_transform_lon_, polygon.exterior.coords) new_polygon = Polygon(transformed) - return(new_polygon) - - def make_variable(self,varname,arr,dimensions,bounds=None): - var = self.ds.createVariable(varname,arr.dtype,dimensions=dimensions) + return new_polygon + + def make_variable(self, varname, arr, dimensions, bounds=None): + var = self.ds.createVariable(varname, arr.dtype, dimensions=dimensions) var[:] = arr if bounds is not None: var.bounds = bounds - return(var) + return var - def make_data(self,nc_spatial): - path = os.path.join(env.DIR_OUTPUT,'test360 {0}.nc'.format(datetime.now())) + def make_data(self, nc_spatial): + path = os.path.join(env.DIR_OUTPUT, 'test360 {0}.nc'.format(datetime.now())) calendar = 'standard' units = 'days since 0000-01-01' - time_values = [datetime(2000,m,15) for m in range(1,13)] - time_values = nc.date2num(time_values,units,calendar=calendar) - - level_values = np.array([100,200]) - - values = np.empty((len(time_values),len(level_values),len(nc_spatial.lat_values),len(nc_spatial.lon_values))) - col_values = np.arange(0,len(nc_spatial.lon_values)) - for ii in range(0,len(nc_spatial.lat_values)): - values[:,:,ii,:] = col_values - values = np.ma.array(values,mask=False,fill_value=1e20) - - self.ds = nc.Dataset(path,'w') + time_values = [datetime(2000, m, 15) for m in range(1, 13)] + time_values = nc.date2num(time_values, units, calendar=calendar) + + level_values = np.array([100, 200]) + + values = np.empty((len(time_values), len(level_values), len(nc_spatial.lat_values), len(nc_spatial.lon_values))) + col_values = np.arange(0, len(nc_spatial.lon_values)) + for ii in range(0, len(nc_spatial.lat_values)): + values[:, :, ii, :] = col_values + values = np.ma.array(values, mask=False, fill_value=1e20) + + self.ds = nc.Dataset(path, 'w') ds = self.ds - - ds.createDimension('d_lat',size=len(nc_spatial.lat_values)) - ds.createDimension('d_lon',size=len(nc_spatial.lon_values)) - ds.createDimension('d_bnds',size=2) - ds.createDimension('d_level',size=len(level_values)) - ds.createDimension('d_time',size=len(time_values)) - - self.make_variable('lat',nc_spatial.lat_values,'d_lat',bounds='lat_bnds') - self.make_variable('lon',nc_spatial.lon_values,'d_lon',bounds='lon_bnds') - self.make_variable('lat_bnds',nc_spatial.latb_values,('d_lat','d_bnds')) - self.make_variable('lon_bnds',nc_spatial.lonb_values,('d_lon','d_bnds')) - - v_time = self.make_variable('time',time_values,'d_time') + + ds.createDimension('d_lat', size=len(nc_spatial.lat_values)) + ds.createDimension('d_lon', size=len(nc_spatial.lon_values)) + ds.createDimension('d_bnds', size=2) + ds.createDimension('d_level', size=len(level_values)) + ds.createDimension('d_time', size=len(time_values)) + + self.make_variable('lat', nc_spatial.lat_values, 'd_lat', bounds='lat_bnds') + self.make_variable('lon', nc_spatial.lon_values, 'd_lon', bounds='lon_bnds') + self.make_variable('lat_bnds', nc_spatial.latb_values, ('d_lat', 'd_bnds')) + self.make_variable('lon_bnds', nc_spatial.lonb_values, ('d_lon', 'd_bnds')) + + v_time = self.make_variable('time', time_values, 'd_time') v_time.calendar = calendar v_time.units = units - - self.make_variable('level',level_values,'d_level') - - self.make_variable('foo',values,('d_time','d_level','d_lat','d_lon')) - - self.ds.close() - - return(path) + self.make_variable('level', level_values, 'd_level') + + self.make_variable('foo', values, ('d_time', 'd_level', 'd_lat', 'd_lon')) + + self.ds.close() -if __name__ == "__main__": -# import sys;sys.argv = ['', 'Test360.test_high_res'] - unittest.main() \ No newline at end of file + return path diff --git a/src/ocgis/test/test_simple/test_cfunits.py b/src/ocgis/test/test_simple/test_cfunits.py deleted file mode 100644 index d227dbe3c..000000000 --- a/src/ocgis/test/test_simple/test_cfunits.py +++ /dev/null @@ -1,22 +0,0 @@ -from cfunits import Units -from ocgis import RequestDataset -from ocgis.test.test_simple.make_test_data import SimpleNcMultivariate -from ocgis.test.test_simple.test_simple import TestSimpleBase -import numpy as np - - -class TestSimpleMultivariate(TestSimpleBase): - base_value = np.array([[1.0, 1.0, 2.0, 2.0], - [1.0, 1.0, 2.0, 2.0], - [3.0, 3.0, 4.0, 4.0], - [3.0, 3.0, 4.0, 4.0]]) - nc_factory = SimpleNcMultivariate - fn = 'test_simple_multivariate_01.nc' - var = ['foo', 'foo2'] - - def test_variable_has_appropriate_units(self): - """Test multiple variables loaded from a netCDF file are assigned the appropriate units.""" - - field = RequestDataset(**self.get_dataset()).get() - self.assertDictEqual({v.name: v.cfunits for v in field.variables.itervalues()}, - {'foo': Units('K'), 'foo2': Units('mm/s')}) \ No newline at end of file diff --git a/src/ocgis/test/test_simple/test_dependencies.py b/src/ocgis/test/test_simple/test_dependencies.py new file mode 100644 index 000000000..0fc4f0282 --- /dev/null +++ b/src/ocgis/test/test_simple/test_dependencies.py @@ -0,0 +1,9 @@ +from ocgis import CoordinateReferenceSystem +from ocgis.test.base import TestBase + + +class TestDependencies(TestBase): + + def test_osr(self): + crs = CoordinateReferenceSystem(epsg=4326) + self.assertNotEqual(crs.value, {}) diff --git a/src/ocgis/test/test_simple/test_optional_dependencies.py b/src/ocgis/test/test_simple/test_optional_dependencies.py new file mode 100644 index 000000000..87f226844 --- /dev/null +++ b/src/ocgis/test/test_simple/test_optional_dependencies.py @@ -0,0 +1,34 @@ +from copy import deepcopy +from shapely.geometry import Point +from ocgis import RequestDataset, OcgOperations +from ocgis.test.test_simple.make_test_data import SimpleNcNoLevel +from ocgis.test.test_simple.test_simple import TestSimpleBase + + +class TestOptionalDependencies(TestSimpleBase): + nc_factory = SimpleNcNoLevel + fn = 'test_simple_spatial_no_level_01.nc' + + def test_cfunits(self): + from cfunits import Units + + units = Units('K') + self.assertEqual(str(units), 'K') + + def test_esmf(self): + rd1 = RequestDataset(**self.get_dataset()) + rd2 = deepcopy(rd1) + ops = OcgOperations(dataset=rd1, regrid_destination=rd2, output_format='nc') + ret = ops.execute() + ignore_attributes = {'time_bnds': ['units', 'calendar'], + 'global': ['history']} + self.assertNcEqual(ret, rd1.uri, ignore_attributes=ignore_attributes) + + def test_rtree(self): + from ocgis.util.spatial.index import SpatialIndex + + geom_mapping = {1: Point(1, 2)} + si = SpatialIndex() + si.add(1, Point(1, 2)) + ret = list(si.iter_intersects(Point(1, 2), geom_mapping)) + self.assertEqual(ret, [1]) \ No newline at end of file From ba8e1d9783d0b6542a797a46d92b93efc9896101 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Thu, 16 Oct 2014 17:05:29 -0600 Subject: [PATCH 07/71] Added "misc" folder to collect things like release checklists. Prefer the "conf" suffix for configuration files so renamed those. --- RELEASE.md => misc/RELEASE.md | 0 RELEASE_CHECKLIST.org => misc/RELEASE_CHECKLIST.org | 0 ocgis.cfg.TEMPLATE => ocgis.conf.TEMPLATE | 0 setup.cfg => setup.conf | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename RELEASE.md => misc/RELEASE.md (100%) rename RELEASE_CHECKLIST.org => misc/RELEASE_CHECKLIST.org (100%) rename ocgis.cfg.TEMPLATE => ocgis.conf.TEMPLATE (100%) rename setup.cfg => setup.conf (100%) diff --git a/RELEASE.md b/misc/RELEASE.md similarity index 100% rename from RELEASE.md rename to misc/RELEASE.md diff --git a/RELEASE_CHECKLIST.org b/misc/RELEASE_CHECKLIST.org similarity index 100% rename from RELEASE_CHECKLIST.org rename to misc/RELEASE_CHECKLIST.org diff --git a/ocgis.cfg.TEMPLATE b/ocgis.conf.TEMPLATE similarity index 100% rename from ocgis.cfg.TEMPLATE rename to ocgis.conf.TEMPLATE diff --git a/setup.cfg b/setup.conf similarity index 100% rename from setup.cfg rename to setup.conf From c663069487c72ac6e7fba9ed51c66c0e338f2483 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 17 Oct 2014 10:49:32 -0600 Subject: [PATCH 08/71] added "size" property to test data collection --- src/ocgis/test/base.py | 20 ++++++++++++++++++-- src/ocgis/test/test_base.py | 13 +++++++------ 2 files changed, 25 insertions(+), 8 deletions(-) diff --git a/src/ocgis/test/base.py b/src/ocgis/test/base.py index ff034f8df..318a0d694 100644 --- a/src/ocgis/test/base.py +++ b/src/ocgis/test/base.py @@ -13,6 +13,7 @@ import numpy as np from ocgis.api.request.base import RequestDataset import netCDF4 as nc +from ocgis.util.helpers import get_iter class ToTest(Exception): @@ -311,6 +312,21 @@ def tearDown(self): class TestData(OrderedDict): + @property + def size(self): + """ + :returns: Size of test data in bytes. + :rtype: int + """ + + total = 0 + for key in self.keys(): + path = self.get_uri(key) + # path is returned as a sequence...sometimes + for element in get_iter(path): + total += os.path.getsize(element) + return total + def copy_files(self, dest, verbose=False): """ Copy test files from their source to the base directory ``dest``. The folder hierarchy will be recreated under @@ -370,8 +386,8 @@ def get_rd(self, key, kwds=None): def get_uri(self, key): """ :param str key: The unique identifier to the test dataset. - :returns: The full URI to a dataset. - :rtype: str + :returns: A sequence of URIs for the test dataset selected by key. + :rtype: list[str,] or str :raises: OSError, ValueError """ diff --git a/src/ocgis/test/test_base.py b/src/ocgis/test/test_base.py index d517fa98f..ff123dcf5 100644 --- a/src/ocgis/test/test_base.py +++ b/src/ocgis/test/test_base.py @@ -1,4 +1,3 @@ -import unittest from ocgis.test.base import TestBase import ocgis from unittest.case import SkipTest @@ -14,7 +13,7 @@ def skip(*args): raise SkipTest("long-running test") skip.__name__ = f.__name__ ret = skip - return(ret) + return ret def dev(f): @@ -25,7 +24,7 @@ def skip(*args): raise SkipTest("development-only test") skip.__name__ = f.__name__ ret = skip - return(ret) + return ret class Test(TestBase): @@ -72,6 +71,8 @@ def test_multifile(self): self.assertEqual(len(rd.uri),2) -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() \ No newline at end of file +class TestTestData(TestBase): + + def test_size(self): + size = self.test_data_nc.size + self.assertGreater(size, 1138333) From 327da8fcaf4dc354a711168063dcbe4651341530 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 24 Oct 2014 12:06:31 -0600 Subject: [PATCH 09/71] Conflicts: src/ocgis/test/test_ocgis/test_api/test_subset.py --- doc/conf.py | 4 +- doc/install.rst | 8 +- .../install_dependencies_ubuntu.sh | 2 +- .../v0.06b/combinations/parameters.py | 2 +- fabfile/base.py | 161 +++++++++ fabfile/fabfile.py | 310 +++++++++--------- fabfile/helpers.py | 40 +++ fabfile/packages.py | 251 ++++++++++++++ ocgis.conf.TEMPLATE | 29 +- src/ocgis/constants.py | 2 +- src/ocgis/conv/meta.py | 2 +- src/ocgis/conv/nc.py | 2 +- src/ocgis/test/base.py | 169 ++++------ src/ocgis/test/test_base.py | 20 +- src/ocgis/test/test_misc/test_conversion.py | 18 +- .../test_misc/test_dependency_versions.py | 36 ++ .../test_ocgis/test_api/test_interpreter.py | 2 +- .../test_ocgis/test_api/test_operations.py | 100 +++--- .../test_api/test_parms/test_definition.py | 4 +- .../test_api/test_request/test_base.py | 78 +++-- .../test_request/test_driver/test_base.py | 8 +- .../test_request/test_driver/test_nc.py | 67 ++-- .../test/test_ocgis/test_api/test_subset.py | 65 ++-- .../test_ocgis/test_calc/test_calc_general.py | 16 +- .../test/test_ocgis/test_calc/test_engine.py | 12 +- .../test_calc/test_eval_function.py | 14 +- .../test_library/test_index/test_duration.py | 76 ++--- .../test_dynamic_kernel_percentile.py | 8 +- .../test_index/test_heat_index.py | 4 +- .../test_calc/test_library/test_math.py | 2 +- .../test_calc/test_library/test_statistics.py | 8 +- .../test_contrib/test_library_icclim.py | 36 +- .../test/test_ocgis/test_conv/test_base.py | 10 +- .../test/test_ocgis/test_conv/test_csv_shp.py | 8 +- .../test/test_ocgis/test_conv/test_fiona_.py | 4 +- .../test/test_ocgis/test_conv/test_meta.py | 2 +- .../test/test_ocgis/test_conv/test_nc.py | 2 +- .../test_interface/test_base/test_crs.py | 12 +- .../test_base/test_dimension/test_spatial.py | 11 +- .../test_base/test_dimension/test_temporal.py | 2 +- .../test_interface/test_base/test_field.py | 2 +- .../test_interface/test_metadata.py | 2 +- .../test/test_ocgis/test_regrid/test_base.py | 18 +- .../test_ocgis/test_util/test_environment.py | 9 +- .../test/test_ocgis/test_util/test_helpers.py | 4 +- .../test_ocgis/test_util/test_large_array.py | 16 +- .../test_util/test_logging_ocgis.py | 2 +- .../test_ocgis/test_util/test_shp_cabinet.py | 2 +- .../test_ocgis/test_util/test_shp_process.py | 6 +- .../test_shp_scanner/test_shp_scanner.py | 4 +- .../test_util/test_spatial/test_index.py | 99 +++--- .../test_spatial/test_spatial_subset.py | 18 +- .../test/test_ocgis/test_util/test_units.py | 4 +- .../test/test_ocgis/test_util/test_zipper.py | 2 +- src/ocgis/test/test_real_data/test_cf.py | 2 +- .../test/test_real_data/test_cf_exceptions.py | 10 +- .../test_real_data/test_multiple_datasets.py | 14 +- src/ocgis/test/test_real_data/test_narccap.py | 22 +- src/ocgis/test/test_real_data/test_package.py | 4 +- .../test_real_data/test_random_datasets.py | 221 ++++++------- src/ocgis/util/environment.py | 1 + 61 files changed, 1242 insertions(+), 827 deletions(-) create mode 100644 fabfile/base.py create mode 100644 fabfile/helpers.py create mode 100644 fabfile/packages.py create mode 100644 src/ocgis/test/test_misc/test_dependency_versions.py diff --git a/doc/conf.py b/doc/conf.py index 4b756ccc1..8b0a63c7e 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -51,9 +51,9 @@ # # The short X.Y version. import ocgis -version = str(ocgis.__VER__) +version = str(ocgis.__version__) # The full version, including alpha/beta/rc tags. -release = str(ocgis.__RELEASE__) +release = str(ocgis.__release__) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/install.rst b/doc/install.rst index 47ae78193..6363c4e06 100644 --- a/doc/install.rst +++ b/doc/install.rst @@ -11,9 +11,9 @@ Required Dependencies Package Name Version URL ============== ======= ======================================================================= Python 2.7.6 http://www.python.org/download/releases/2.7.6/ -``osgeo`` 1.9.1 https://pypi.python.org/pypi/GDAL/ -``shapely`` 1.2 https://pypi.python.org/pypi/Shapely -``fiona`` 1.4.4 https://pypi.python.org/pypi/Fiona +``osgeo`` 1.11.1 https://pypi.python.org/pypi/GDAL/ +``shapely`` 1.4.3 https://pypi.python.org/pypi/Shapely +``fiona`` 1.4.5 https://pypi.python.org/pypi/Fiona ``numpy`` 1.8.2 http://sourceforge.net/projects/numpy/files/NumPy/1.8.2/ ``netCDF4`` 1.1.1 http://unidata.github.io/netcdf4-python/ ============== ======= ======================================================================= @@ -26,7 +26,7 @@ There are two optional dependencies. OpenClimateGIS will still operate without t ============= ======== ====================================================== ================================================================================================================================= Package Name Version URL Usage ============= ======== ====================================================== ================================================================================================================================= -``rtree`` 2.7.2 https://pypi.python.org/pypi/Rtree/ Constructs spatial indexes at runtime. Useful for complicated GIS operations (i.e. large or complex polygons for subsetting) +``rtree`` 0.8.0 https://pypi.python.org/pypi/Rtree/ Constructs spatial indexes at runtime. Useful for complicated GIS operations (i.e. large or complex polygons for subsetting) ``cfunits`` 0.9.6 https://code.google.com/p/cfunits-python/ Allows unit transformations for ``conform_units_to`` argument to :class:`~ocgis.RequestDataset` or :class:`~ocgis.OcgOperations`. ``ESMPy`` 6.3.0rp1 https://www.earthsystemcog.org/projects/esmpy/releases Supports regridding operations. ============= ======== ====================================================== ================================================================================================================================= diff --git a/doc/sphinx_examples/install_dependencies_ubuntu.sh b/doc/sphinx_examples/install_dependencies_ubuntu.sh index f60c9f7ee..ca9cf130d 100644 --- a/doc/sphinx_examples/install_dependencies_ubuntu.sh +++ b/doc/sphinx_examples/install_dependencies_ubuntu.sh @@ -76,6 +76,6 @@ sudo -E make install ## ESMPy install ## -cd $ESMF_INSTALL_PREFIX/esmf/src/addon/ESMPy +cd $ESMF_SRCDIR/esmf/src/addon/ESMPy python setup.py build --ESMFMKFILE=$ESMF_INSTALL_PREFIX/lib/esmf.mk sudo python setup.py install diff --git a/examples/filed/v0.06b/test_scripts/v0.06b/combinations/parameters.py b/examples/filed/v0.06b/test_scripts/v0.06b/combinations/parameters.py index f505248e9..bd499d3b6 100644 --- a/examples/filed/v0.06b/test_scripts/v0.06b/combinations/parameters.py +++ b/examples/filed/v0.06b/test_scripts/v0.06b/combinations/parameters.py @@ -93,7 +93,7 @@ def values(self): # values = [] # -# tdata = TestBase.get_tst_data_nc() +# tdata = TestBase.get_tst_data() # # rd = tdata.get_rd('cancm4_tasmax_2001') # diff --git a/fabfile/base.py b/fabfile/base.py new file mode 100644 index 000000000..5361d1fe8 --- /dev/null +++ b/fabfile/base.py @@ -0,0 +1,161 @@ +import abc +from helpers import fcmd +from fabfile import parser +import os +from fabric.context_managers import cd +from fabric.contrib.files import exists +from fabric.operations import sudo, run + + +class AbstractInstaller(object): + __metaclass__ = abc.ABCMeta + #: Set to a list of string package names to install using apt-get. + apt_packages = None + + @abc.abstractproperty + def prefix(self): + """String prefix for the software.""" + + def __init__(self, version): + self.version = str(version) + + def execute(self): + self.initialize() + self.install() + self.finalize() + self.validate() + + def initialize(self): + if self.apt_packages is not None: + cmd = ['apt-get', '-y', 'install'] + cmd += self.apt_packages + fcmd(sudo, cmd) + + @abc.abstractmethod + def install(self): + """Install the software.""" + + @abc.abstractmethod + def validate(self): + """Command to validate the installation.""" + + def finalize(self): + """Any last steps with the installation.""" + + def _get_filled_path_(self, base_dir): + ret = os.path.join(base_dir, self.prefix, 'v{0}'.format(self.version)) + return ret + + +class AbstractMakeInstaller(AbstractInstaller): + __metaclass__ = abc.ABCMeta + dir_install = parser.get('server', 'dir_install') + dir_src = parser.get('server', 'dir_src') + configure_options = None + make_check = True + j = parser.get('server', 'j') + + @abc.abstractproperty + def template_uncompressed_dir(self): + """The folder name when the WGET file is decompressed.""" + + @abc.abstractproperty + def template_wget_url(self): + """String with prefix and version used to get the WGET target URL.""" + + @property + def configure_cmd(self): + cmd = ['./configure', '--prefix={0}'.format(self.path_install)] + if self.configure_options is not None: + cmd += self.configure_options + return cmd + + @property + def wget_url(self): + return self.template_wget_url.format(version=self.version) + + @property + def path_install(self): + return self._get_filled_path_(self.dir_install) + + @property + def path_src(self): + return self._get_filled_path_(self.dir_src) + + @property + def path_uncompressed_dir(self): + return self.template_uncompressed_dir.format(version=self.version) + + @property + def path_configure(self): + return os.path.join(self.path_src, self.path_uncompressed_dir) + + def initialize(self): + super(AbstractMakeInstaller, self).initialize() + + self._make_dir_src_() + + with cd(self.path_src): + # download the source code + cmd = ['wget', self.wget_url] + fcmd(run, cmd) + # uncompress + fn = os.path.split(self.wget_url)[1] + self.uncompress(fn) + + def configure(self): + fcmd(run, self.configure_cmd) + + def install(self): + with cd(self.path_configure): + self.configure() + cmd = ['make', '-j', self.j] + fcmd(run, cmd) + if self.make_check: + fcmd(run, ['make', 'check']) + fcmd(sudo, ['make', 'install']) + + @staticmethod + def uncompress(fn): + cmd = ['tar', '-xzvf', fn] + fcmd(run, cmd) + + def validate(self): + """Checking is implicit to ``install`` command.""" + + def _make_dir_src_(self): + # create the source directory. this should not exists to avoid duplicating installations. + assert not exists(self.path_src) + cmd = ['mkdir', '-p', self.path_src] + fcmd(run, cmd) + + +class AbstractPythonInstaller(AbstractInstaller): + __metaclass__ = abc.ABCMeta + package_name = None + + def validate(self): + cmd = '"import {0}"'.format(self.package_name or self.prefix) + fcmd(run, ['python', '-c', cmd]) + + +class AbstractPipInstaller(AbstractPythonInstaller): + __metaclass__ = abc.ABCMeta + + def install(self): + cmd = ['pip', 'install', '{0}=={1}'.format(self.prefix, self.version)] + fcmd(sudo, cmd) + + +class AbstractSetupInstaller(AbstractPythonInstaller, AbstractMakeInstaller): + __metaclass__ = abc.ABCMeta + + def execute(self): + self.initialize() + self.install() + self.finalize() + self.validate() + + def install(self): + with cd(self.path_configure): + fcmd(sudo, ['python', 'setup.py', 'install']) diff --git a/fabfile/fabfile.py b/fabfile/fabfile.py index f4a78fd08..6515d0ec6 100644 --- a/fabfile/fabfile.py +++ b/fabfile/fabfile.py @@ -1,171 +1,187 @@ -from fabric.contrib.files import append +from ConfigParser import SafeConfigParser +from fabric.contrib.project import rsync_project from fabric.state import env from fabric.decorators import task -from fabric.operations import sudo, run +from fabric.operations import sudo, run, put from fabric.context_managers import cd +import os +from helpers import set_rwx_permissions, set_rx_permisions, fcmd, parser +import packages -env.user = 'ubuntu' -env.hosts = ['ec2-54-186-255-159.us-west-2.compute.amazonaws.com'] -env.key_filename = '/home/local/WX/ben.koziol/.ssh/ocgis-bwk.pem' +@task +def deploy_test_local_copy(dest): + from ocgis.test.base import TestBase -env.dir_src = '/usr/local/src' -env.ver_hdf5 = 'hdf5-1.8.12' -env.ver_netcdf4 = 'netcdf-4.3.1.1' -env.ver_netcdf4_python = 'v1.0.8rel' -env.ver_cfunits_python = 'cfunits-0.9.6' -env.ver_ocgis = 'v0.07.1b' + dest = os.path.expanduser(dest) + if not os.path.exists(dest): + os.path.makedirs(dest) + tdata = TestBase.get_tst_data() + tdata.copy_files(dest, verbose=True) -def tfs(sequence): - return(' '.join(sequence)) +@task() +def deploy_test_rsync(): + remote_dir = parser.get('server', 'dir_data') + local_dir = os.getenv('OCGIS_DIR_TEST_DATA') + # we want to create the local test directory on the remote server: + # http://docs.fabfile.org/en/latest/api/contrib/project.html#fabric.contrib.project.rsync_project + assert not local_dir.endswith('/') -def install_pip_package(pip_name): + # update permissions so files may be copied + local_dir_name = os.path.split(local_dir)[1] + test_data_path = os.path.join(remote_dir, local_dir_name) + set_rwx_permissions(test_data_path) + try: + # synchronize the project + rsync_project(remote_dir, local_dir=local_dir) + finally: + # remove write permissions on the files/directories + set_rx_permisions(test_data_path) +@task +def ebs_mkfs(): + """Make a file system on a newly attached device.""" + + cmd = ['mkfs', '-t', 'ext4', parser.get('aws', 'ebs_mount_name')] + fcmd(sudo, cmd) -def install_apt_package(name): - """ - :type name: str - :type name: list - """ - base = ['apt-get', '-y', 'install'] - if isinstance(name, basestring): - base.append(name) - else: - base += list(name) - sudo(tfs(base)) - - -@task(default=False) -def deploy_build_from_source(): - upgrade() - install_apt_packages() - install_pip_python_libraries() - install_hdf5() - install_netcdf4() - install_netcdf4_python() - install_cfunits_python() - install_icclim() - install_ocgis() @task -def deploy_simple(): - upgrade() - install_apt_packages_simple() +def ebs_mount(): + """Mount an EBS volume.""" + + cmd = ['mount', parser.get('aws', 'ebs_mount_name'), parser.get('server', 'dir_data')] + fcmd(sudo, cmd) @task -def upgrade(): - sudo('apt-get update') - sudo('apt-get upgrade -y') - -@task -def install_hdf5(): - url = 'http://www.hdfgroup.org/ftp/HDF5/current/src/{0}.tar.gz'.format(env.ver_hdf5) - tar = '{0}.tar.gz'.format(env.ver_hdf5) - sudo('mkdir -p '+env.dir_src) - with cd(env.dir_src): - sudo('wget '+url) - sudo('tar -xzvf '+tar) - with cd(env.ver_hdf5): - sudo('./configure --prefix=/usr/local --enable-shared --enable-hl') - sudo('make') - sudo('make install') - -@task -def install_netcdf4(): - url = 'ftp://ftp.unidata.ucar.edu/pub/netcdf/{0}.tar.gz'.format(env.ver_netcdf4) - tar = '{0}.tar.gz'.format(env.ver_netcdf4) - sudo('mkdir -p '+env.dir_src) - with cd(env.dir_src): - sudo('wget '+url) - sudo('tar -xzvf '+tar) - with cd(env.ver_netcdf4): - sudo('LDFLAGS=-L/usr/local/lib CPPFLAGS=-I/usr/local/include') - sudo('./configure --enable-netcdf-4 --enable-dap --enable-shared --prefix=/usr/local') - sudo('make') - sudo('make install') -# sudo('make check') - -@task -def install_netcdf4_python(): - url = 'https://github.com/Unidata/netcdf4-python/archive/{0}.tar.gz'.format(env.ver_netcdf4_python) - tar = '{0}.tar.gz'.format(env.ver_netcdf4_python) - sudo('mkdir -p '+env.dir_src) - with cd(env.dir_src): - sudo('wget '+url) - sudo('tar -xzvf '+tar) - with cd('netcdf4-python-{0}'.format(env.ver_netcdf4_python[1:])): - sudo('ldconfig') - sudo('python setup.py install') - -@task -def install_cfunits_python(): - sudo('apt-get install -y libudunits2-0') - sudo('mkdir -p '+env.dir_src) - url = 'https://cfunits-python.googlecode.com/files/{0}.tar.gz'.format(env.ver_cfunits_python) - tar = '{0}.tar.gz'.format(env.ver_cfunits_python) - with cd(env.dir_src): - sudo('wget '+url) - sudo('tar -xzvf '+tar) - with cd(env.ver_cfunits_python): - sudo('python setup.py install') - sudo('cp -r cfunits/etc /usr/local/lib/python2.7/dist-packages/cfunits') - -@task -def install_pip_python_libraries(): - sudo('pip install shapely') - sudo('pip install fiona') - sudo('pip install nose') - +def list_storage(): + """List storage size of connected devices.""" + + fcmd(run, ['lsblk']) + @task -def install_ocgis(): - sudo('mkdir -p '+env.dir_src) - with cd(env.dir_src): - sudo('git clone https://github.com/NCPP/ocgis.git') - with cd('ocgis'): - sudo('python setup.py install') - +def put_file(local_path, remote_path): + put(local_path=local_path, remote_path=remote_path) + + @task -def install_icclim(): - sudo('mkdir -p '+env.dir_src) - with cd(env.dir_src): - sudo('git clone https://github.com/tatarinova/icclim.git') - with cd('icclim'): - sudo('gcc -fPIC -g -c -Wall ./icclim/libC.c -o ./icclim/libC.o') - sudo('gcc -shared -o ./icclim/libC.so ./icclim/libC.o') - sudo('python setup.py install') +def remove_dir(path, use_sudo='false'): + """Remove the source directory.""" + + cmd = ['rm', '-r', path] + if use_sudo == 'true': + fmeth = sudo + elif use_sudo == 'false': + fmeth = run + else: + raise NotImplementedError(use_sudo) + + fcmd(fmeth, cmd) + @task -def install_apt_packages(): - cmd = ['apt-get','-y','install','g++','libz-dev','curl','wget','python-dev', - 'python-pip','libgdal-dev','ipython','python-gdal','git'] - sudo(' '.join(cmd)) +def run_tests(target='all', branch='next', failed='false'): + """ + Run unit tests on remote server. + + :param str target: The test target. Options are: + * 'all' = Run all tests. + * 'simple' = Run simple test suite. + :param str branch: The target GitHub branch. + :param str failed: If ``'true'``, run only failed tests. + :raises: NotImplementedError + """ + + path = os.path.join(parser.get('server', 'dir_clone'), parser.get('git', 'name')) + + if target == 'simple': + test_target = os.path.join(path, 'src', 'ocgis', 'test', 'test_simple') + elif target == 'all': + test_target = os.path.join(path, 'src', 'ocgis', 'test') + else: + raise NotImplementedError(target) + + with cd(path): + fcmd(run, ['git', 'pull']) + fcmd(run, ['git', 'checkout', branch]) + fcmd(run, ['git', 'pull']) + + cmd = ['nosetests', '-sv', '--with-id', test_target] + if failed == 'true': + cmd.insert(-1, '--failed') + elif failed == 'false': + pass + else: + raise NotImplementedError(failed) + + fcmd(run, cmd) + @task -def install_virtual_environment(): - install_apt_package('python-dev') - install_apt_package('python-pip') - install_pip_package('virtualenv') - install_pip_package('virtualenvwrapper') - - ## append environment information to profile file - lines = [ - '', - '# Set the location where the virtual environments are stored', - 'export WORKON_HOME=~/.virtualenvs', - '# Use the virtual environment wrapper scripts', - 'source /usr/local/bin/virtualenvwrapper.sh', - '# Tell pip to only run if there is a virtualenv currently activated', - 'export PIP_REQUIRE_VIRTUALENV=false', - '# Tell pip to automatically use the currently active virtualenv', - 'export PIP_RESPECT_VIRTUALENV=true', - '# Tell pip to use virtual environment wrapper storage location', - 'export PIP_VIRTUALENV_BASE=$WORKON_HOME', - ] - append('~/.profile', lines) - run(tfs(['source', '~/.profile'])) - - run(tfs(['mkvirtualenv', env.cfg['venv_name']])) \ No newline at end of file +def install_dependencies(): + # packages.NumpyInstaller('1.8.2').execute() + + hdf5 = packages.HDF5Installer('1.8.13') + # hdf5.execute() + + netcdf4 = packages.NetCDF4Installer('4.3.2', hdf5) + # netcdf4.execute() + + # packages.NetCDF4PythonInstaller('1.1.1', netcdf4).execute() + + geos = packages.GeosInstaller('3.4.2') + # geos.execute() + + proj4 = packages.Proj4Installer('4.8.0', '1.5') + # proj4.execute() + + gdal = packages.GDALInstaller('1.11.1', geos, proj4) + # gdal.execute() + + # packages.CythonInstaller('0.21.1').execute() + + # packages.ShapelyInstaller('1.4.3', geos).execute() + + # packages.FionaInstaller('1.4.5', gdal).execute() + + # packages.RtreeInstaller('0.8.0').execute() + + # packages.CFUnitsInstaller('0.9.6').execute() + + esmf = packages.ESMFInstaller('6.3.0rp1') + # esmf.execute() + + # packages.ESMPyInstaller(esmf).execute() + + packages.IcclimInstaller(branch_name='master').execute() + + +# @task +# def install_virtual_environment(): +# install_apt_package('python-dev') +# install_apt_package('python-pip') +# install_pip_package('virtualenv') +# install_pip_package('virtualenvwrapper') +# +# ## append environment information to profile file +# lines = [ +# '', +# '# Set the location where the virtual environments are stored', +# 'export WORKON_HOME=~/.virtualenvs', +# '# Use the virtual environment wrapper scripts', +# 'source /usr/local/bin/virtualenvwrapper.sh', +# '# Tell pip to only run if there is a virtualenv currently activated', +# 'export PIP_REQUIRE_VIRTUALENV=false', +# '# Tell pip to automatically use the currently active virtualenv', +# 'export PIP_RESPECT_VIRTUALENV=true', +# '# Tell pip to use virtual environment wrapper storage location', +# 'export PIP_VIRTUALENV_BASE=$WORKON_HOME', +# ] +# append('~/.profile', lines) +# run(tfs(['source', '~/.profile'])) +# +# run(tfs(['mkvirtualenv', env.cfg['venv_name']])) diff --git a/fabfile/helpers.py b/fabfile/helpers.py new file mode 100644 index 000000000..ec8e6ec25 --- /dev/null +++ b/fabfile/helpers.py @@ -0,0 +1,40 @@ +from ConfigParser import SafeConfigParser +from fabric.operations import local, sudo +from fabric.state import env +import os + + +conf_path = os.getenv('OCGIS_CONF_PATH') +parser = SafeConfigParser() +parser.read(conf_path) + +env.user = parser.get('fabric', 'user') +env.hosts = [parser.get('fabric', 'hosts')] +env.key_filename = parser.get('fabric', 'key_filename') + + +def tfs(sequence): + return ' '.join(sequence) + + +def fcmd(name, cmd): + return name(tfs(cmd)) + + +def fecho(msg): + local('echo "{0}"'.format(msg), capture=True) + + +def set_rwx_permissions(path): + # set the owner for the files to the environment user + fcmd(sudo, ['chown', '-R', env.user, path]) + # set read, write, execute... + fcmd(sudo, ['chmod', '-R', 'u=rwx', path]) + + +def set_rx_permisions(path): + # set the owner for the files to the environment user + fcmd(sudo, ['chown', '-R', env.user, path]) + # set read, execute... + fcmd(sudo, ['chmod', '-R', 'a-rwx', path]) + fcmd(sudo, ['chmod', '-R', 'ug=rwx', path]) \ No newline at end of file diff --git a/fabfile/packages.py b/fabfile/packages.py new file mode 100644 index 000000000..6e2f2f85d --- /dev/null +++ b/fabfile/packages.py @@ -0,0 +1,251 @@ +import os + +from fabric.context_managers import cd, shell_env, quiet, path +from fabric.contrib.files import append +from fabric.operations import sudo, run, require, prompt + +from base import AbstractMakeInstaller, AbstractPipInstaller, AbstractSetupInstaller, AbstractInstaller + +from helpers import fcmd, parser + + +class NumpyInstaller(AbstractPipInstaller): + prefix = 'numpy' + apt_packages = ['python-pip', 'python-dev'] + + +class HDF5Installer(AbstractMakeInstaller): + prefix = 'hdf5' + apt_packages = ['zlib1g-dev'] + configure_options = ['--enable-shared', '--enable-hl'] + template_wget_url = 'http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-{version}/src/hdf5-{version}.tar.gz' + template_uncompressed_dir = 'hdf5-{version}' + + +class NetCDF4Installer(AbstractMakeInstaller): + prefix = 'netcdf4' + apt_packages = ['libcurl4-openssl-dev'] + configure_options = ['--enable-netcdf-4', '--enable-dap', '--enable-utilities'] + template_wget_url = 'ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-{version}.tar.gz' + template_uncompressed_dir = 'netcdf-{version}' + + def __init__(self, version, hdf5): + super(NetCDF4Installer, self).__init__(version) + self.hdf5 = hdf5 + + def configure(self): + kwds = {'CPPFLAGS': '-I{0}'.format(os.path.join(self.hdf5.path_install, 'include')), + 'LDFLAGS': '-L{0}'.format(os.path.join(self.hdf5.path_install, 'lib'))} + with shell_env(**kwds): + super(NetCDF4Installer, self).configure() + + +class NetCDF4PythonInstaller(AbstractPipInstaller): + prefix = 'netCDF4' + + def __init__(self, version, netcdf4): + super(NetCDF4PythonInstaller, self).__init__(version) + self.netcdf4 = netcdf4 + + def install(self): + kwds = {'HDF5_DIR': self.netcdf4.hdf5.path_install, + 'NETCDF4_DIR': self.netcdf4.path_install} + with shell_env(**kwds): + super(NetCDF4PythonInstaller, self).install() + + +class GeosInstaller(AbstractMakeInstaller): + prefix = 'geos' + template_wget_url = 'http://download.osgeo.org/geos/geos-{version}.tar.bz2' + template_uncompressed_dir = 'geos-{version}' + + @staticmethod + def uncompress(fn): + cmd = ['tar', '-xjf', fn] + fcmd(run, cmd) + + +class Proj4Installer(AbstractMakeInstaller): + prefix = 'proj4' + template_wget_url = 'http://download.osgeo.org/proj/proj-{version}.tar.gz' + template_wget_url_datum_grid = 'http://download.osgeo.org/proj/proj-datumgrid-{0}.tar.gz' + template_uncompressed_dir = 'proj-{version}' + + def __init__(self, version, version_datum_grid): + super(Proj4Installer, self).__init__(version) + self.version_datum_grid = version_datum_grid + + def initialize(self): + super(Proj4Installer, self).initialize() + with cd(self.path_src): + wget_url_datum_grid = self.template_wget_url_datum_grid.format(self.version_datum_grid) + cmd = ['wget', wget_url_datum_grid] + fcmd(run, cmd) + cmd = ['tar', '-xzvf', os.path.split(wget_url_datum_grid)[1], '-C', os.path.join(self.path_uncompressed_dir, 'nad')] + fcmd(run, cmd) + + +class GDALInstaller(AbstractMakeInstaller): + prefix = 'gdal' + configure_options = ['--with-python'] + template_wget_url = 'http://download.osgeo.org/gdal/{version}/gdal-{version}.tar.gz' + template_uncompressed_dir = 'gdal-{version}' + make_check = False + + def __init__(self, version, geos, proj4): + super(GDALInstaller, self).__init__(version) + self.geos = geos + self.proj4 = proj4 + self._ld_preload = False + + @property + def configure_cmd(self): + ret = super(GDALInstaller, self).configure_cmd + with_geos = os.path.join(self.geos.path_install, 'bin', 'geos-config') + static_proj4 = self.proj4.path_install + ret += ['--with-geos={0}'.format(with_geos), '--with-static-proj4={0}'.format(static_proj4)] + return ret + + def finalize(self): + try: + with quiet(): + require('LD_PRELOAD') + raise NotImplementedError + except SystemExit: + # add the gdal library to the LD_PRELOAD_PATH to ensure osgeo may find the shared library + lib_path = os.path.join(self.path_install, 'lib', 'libgdal.so.1') + append('~/.bashrc', '\n# for some reason, the python osgeo library needs this to link properly') + append('~/.bashrc', 'export LD_PRELOAD={0}\n'.format(lib_path)) + prompt('The .bashrc file on the remote server needs to be sourced before osgeo is available. Press Enter to continue.') + + +class CythonInstaller(AbstractPipInstaller): + prefix = 'cython' + + +class ShapelyInstaller(AbstractPipInstaller): + prefix = 'shapely' + + def __init__(self, version, geos): + self.version = version + self.geos = geos + + def install(self): + kwds = {'CPPFLAGS': '-I{0}'.format(os.path.join(self.geos.path_install, 'include')), + 'LDFLAGS': '-L{0}'.format(os.path.join(self.geos.path_install, 'lib'))} + with shell_env(**kwds): + super(ShapelyInstaller, self).install() + + +class FionaInstaller(AbstractPipInstaller): + prefix = 'fiona' + + def __init__(self, version, gdal): + self.version = version + self.gdal = gdal + + def install(self): + kwds = {'CPPFLAGS': '-I{0}'.format(os.path.join(self.gdal.path_install, 'include')), + # 'LDFLAGS': '-L{0}'.format(os.path.join(self.geos.path_install, 'lib')) + } + with path(os.path.join(self.gdal.path_install, 'bin')): + with shell_env(**kwds): + super(FionaInstaller, self).install() + + def validate(self): + """Requires the LD_PRELOAD path to gdal/lib/libgdal.so.1 be set.""" + + +class RtreeInstaller(AbstractPipInstaller): + prefix = 'rtree' + apt_packages = ['libspatialindex-dev'] + + +class CFUnitsInstaller(AbstractSetupInstaller): + prefix = 'cfunits' + apt_packages = ['libudunits2-0'] + template_wget_url = 'https://cfunits-python.googlecode.com/files/cfunits-{version}.tar.gz' + template_uncompressed_dir = 'cfunits-{version}' + cfunits_install_dir = '/usr/local/lib/python2.7/dist-packages/cfunits' + + def finalize(self): + src = os.path.join(self.path_configure, 'cfunits', 'etc') + cmd = ['cp', '-r', src, self.cfunits_install_dir] + fcmd(sudo, cmd) + + +class ESMFInstaller(AbstractMakeInstaller): + prefix = 'esmf' + apt_packages = ['gfortran', 'g++'] + template_esmf_targz = '/home/ubuntu/htmp/esmf_{version}_src.tar.gz' + template_uncompressed_dir = 'esmf' + template_wget_url = None + + @property + def path_esmf_targz(self): + return self.template_esmf_targz.format(version=self.version.replace('.', '_')) + + def initialize(self): + AbstractInstaller.initialize(self) + self._make_dir_src_() + with cd(self.path_src): + cmd = ['cp', self.path_esmf_targz, self.path_src] + fcmd(run, cmd) + tar_name = os.path.split(self.path_esmf_targz)[1] + self.uncompress(tar_name) + + def install(self): + kwds = {'ESMF_DIR': self.path_configure, + 'ESMF_INSTALL_PREFIX': self.path_install, + 'ESMF_INSTALL_LIBDIR': os.path.join(self.path_install, 'lib')} + with shell_env(**kwds): + with cd(self.path_configure): + cmd = ['make', '-j', self.j] + fcmd(run, cmd) + cmd = ['sudo', '-E', 'make', 'install'] + fcmd(run, cmd) + + +class ESMPyInstaller(AbstractInstaller): + prefix = 'ESMF' + + def __init__(self, esmf): + super(ESMPyInstaller, self).__init__(esmf.version) + self.esmf = esmf + + def install(self): + path_esmpy_src = os.path.join(self.esmf.path_src, 'esmf', 'src', 'addon', 'ESMPy') + esmfmkfile = os.path.join(self.esmf.path_install, 'lib', 'esmf.mk') + with cd(path_esmpy_src): + cmd = ['python', 'setup.py', 'build', '--ESMFMKFILE={0}'.format(esmfmkfile)] + fcmd(run, cmd) + cmd = ['python', 'setup.py', 'install'] + fcmd(sudo, cmd) + + def validate(self): + cmd = ['python', '-c', '"import {0}"'.format(self.prefix)] + fcmd(run, cmd) + + +class IcclimInstaller(AbstractInstaller): + prefix = 'icclim' + git_url = 'https://github.com/tatarinova/icclim.git' + apt_packages = ['git'] + + def __init__(self, branch_name='master'): + self.branch_name = branch_name + + def install(self): + with cd(parser.get('server', 'dir_clone')): + cmd = ['git', 'clone', self.git_url] + fcmd(run, cmd) + with cd(self.prefix): + run('git checkout {0}'.format(self.branch_name)) + run('git pull') + sudo('gcc -fPIC -g -c -Wall ./icclim/libC.c -o ./icclim/libC.o') + sudo('gcc -shared -o ./icclim/libC.so ./icclim/libC.o') + sudo('python setup.py install') + + def validate(self): + cmd = ['python', '-c', '"import {0}"'.format(self.prefix)] + fcmd(run, cmd) diff --git a/ocgis.conf.TEMPLATE b/ocgis.conf.TEMPLATE index 413e64bf0..743b2779c 100644 --- a/ocgis.conf.TEMPLATE +++ b/ocgis.conf.TEMPLATE @@ -1,21 +1,28 @@ [fabric] -host = +hosts = key_filename = user = + +[server] venv_name = -clone_dir = +dir_clone = +dir_src = +dir_data = +j = 2 [git] url = https://github.com/NCPP/ocgis.git branch = master +name = ocgis [aws] -instance_name = -instance_id = -aws_access_key_id = -aws_secret_access_key = -key_name = -region = -image_id = -security_group = -instance_type = +instance_name = +instance_id = +aws_access_key_id = +aws_secret_access_key = +key_name = +region = +image_id = +security_group = +instance_type = +ebs_mount_name = \ No newline at end of file diff --git a/src/ocgis/constants.py b/src/ocgis/constants.py index 56cf5a89f..35c41848b 100644 --- a/src/ocgis/constants.py +++ b/src/ocgis/constants.py @@ -54,5 +54,5 @@ prime_meridian = 179.9999999999999 -test_run_long_tests = True +test_run_long_tests = False test_run_dev_tests = False diff --git a/src/ocgis/conv/meta.py b/src/ocgis/conv/meta.py index c18971b5f..a7cc63356 100644 --- a/src/ocgis/conv/meta.py +++ b/src/ocgis/conv/meta.py @@ -34,7 +34,7 @@ def __init__(self,ops): self.ops = ops def get_rows(self): - lines = ['OpenClimateGIS v{0} Metadata File'.format(ocgis.__RELEASE__)] + lines = ['OpenClimateGIS v{0} Metadata File'.format(ocgis.__release__)] lines.append(' Generated (UTC): {0}'.format(datetime.datetime.utcnow())) lines.append('') if self.ops.output_format != 'meta': diff --git a/src/ocgis/conv/nc.py b/src/ocgis/conv/nc.py index 1840fb53f..774897e66 100644 --- a/src/ocgis/conv/nc.py +++ b/src/ocgis/conv/nc.py @@ -206,7 +206,7 @@ def _make_spatial_variable_(ds,name,values,dimension_tuple,meta): arch.spatial.crs.write_to_rootgrp(ds, meta) ## append to the history attribute - history_str = '\n{dt} UTC ocgis-{release}'.format(dt=datetime.datetime.utcnow(), release=ocgis.__RELEASE__) + history_str = '\n{dt} UTC ocgis-{release}'.format(dt=datetime.datetime.utcnow(), release=ocgis.__release__) if self.ops is not None: history_str += ': {0}'.format(self.ops) original_history_str = ds.__dict__.get('history', '') diff --git a/src/ocgis/test/base.py b/src/ocgis/test/base.py index 318a0d694..4b2e477e6 100644 --- a/src/ocgis/test/base.py +++ b/src/ocgis/test/base.py @@ -37,7 +37,7 @@ class TestBase(unittest.TestCase): _prefix_path_test = 'ocgis_test_' def __init__(self, *args, **kwargs): - self.test_data_nc = self.get_tst_data_nc() + self.test_data = self.get_tst_data() self.current_dir_output = None self.ToTest = ToTest super(TestBase, self).__init__(*args, **kwargs) @@ -245,7 +245,7 @@ def get_temporary_output_directory(self): return tempfile.mkdtemp(prefix=self._prefix_path_test) @staticmethod - def get_tst_data_nc(): + def get_tst_data(): """ :returns: A dictionary-like object with special access methods for test files. :rtype: :class:`ocgis.test.base.TestData` @@ -253,36 +253,49 @@ def get_tst_data_nc(): test_data = TestData() - test_data.update(['CMIP3'], 'Tavg', 'Extraction_Tavg.nc', key='cmip3_extraction') - test_data.update(['CanCM4'], 'rhs', 'rhs_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', key='cancm4_rhs') - test_data.update(['CanCM4'], 'rhsmax', 'rhsmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', key='cancm4_rhsmax') - test_data.update(['CanCM4'], 'tas', 'tas_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', key='cancm4_tas') - test_data.update(['CanCM4'], 'tasmax', 'tasmax_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', key='cancm4_tasmax_2001') - test_data.update(['CanCM4'], 'tasmax', 'tasmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', key='cancm4_tasmax_2011') - test_data.update(['CanCM4'], 'tasmin', 'tasmin_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', key='cancm4_tasmin_2001') - test_data.update(['daymet'], 'tmax', 'tmax.nc', key='daymet_tmax') - test_data.update(['maurer', '2010'], 'pr', ['nldas_met_update.obs.daily.pr.1990.nc', 'nldas_met_update.obs.daily.pr.1991.nc'], key='maurer_2010_pr') - test_data.update(['maurer', '2010'], 'tas', ['nldas_met_update.obs.daily.tas.1990.nc', 'nldas_met_update.obs.daily.tas.1991.nc'], key='maurer_2010_tas') - test_data.update(['maurer', '2010'], 'tasmax', ['nldas_met_update.obs.daily.tasmax.1990.nc', 'nldas_met_update.obs.daily.tasmax.1991.nc'], key='maurer_2010_tasmax') - test_data.update(['maurer', '2010'], 'tasmin', ['nldas_met_update.obs.daily.tasmin.1990.nc', 'nldas_met_update.obs.daily.tasmin.1991.nc'], key='maurer_2010_tasmin') - test_data.update(['maurer', 'bccr'], 'Prcp', 'bccr_bcm2_0.1.sresa1b.monthly.Prcp.1950.nc', key='maurer_bccr_1950') - test_data.update(['misc', 'month_in_time_units'], 'clt', 'clt.nc', key='clt_month_units') - test_data.update(['misc', 'rotated_pole'], 'pr', 'pr_EUR-11_CNRM-CERFACS-CNRM-CM5_historical_r1i1p1_CLMcom-CCLM4-8-17_v1_mon_198101-199012.nc', key='rotated_pole_cnrm_cerfacs') - test_data.update(['misc', 'rotated_pole'], 'tas', 'tas_EUR-44_CCCma-CanESM2_rcp85_r1i1p1_SMHI-RCA4_v1_sem_209012-210011.nc', key='rotated_pole_cccma') - test_data.update(['misc', 'rotated_pole'], 'tas', 'tas_EUR-44_ICHEC-EC-EARTH_historical_r12i1p1_SMHI-RCA4_v1_day_19710101-19751231.nc', key='rotated_pole_ichec') - test_data.update(['misc', 'subset_test'], 'Prcp', 'sresa2.ncar_pcm1.3.monthly.Prcp.RAW.1950-2099.nc', key='subset_test_Prcp') - test_data.update(['misc', 'subset_test'], 'Tavg', 'Tavg_bccr_bcm2_0.1.sresa2.nc', key='subset_test_Tavg') - test_data.update(['misc', 'subset_test'], 'Tavg', 'sresa2.bccr_bcm2_0.1.monthly.Tavg.RAW.1950-2099.nc', key='subset_test_Tavg_sresa2') - test_data.update(['narccap'], 'pr', 'pr_CRCM_ccsm_1981010103.nc', key='narccap_crcm') - test_data.update(['narccap'], 'pr', 'pr_CRCM_ccsm_1981010103.nc', key='narccap_polar_stereographic') - test_data.update(['narccap'], 'pr', 'pr_HRM3_gfdl_1981010103.nc', key='narccap_hrm3') - test_data.update(['narccap'], 'pr', 'pr_RCM3_gfdl_1981010103.nc', key='narccap_rcm3') - test_data.update(['narccap'], 'pr', 'pr_WRFG_ccsm_1986010103.nc', key='narccap_lambert_conformal') - test_data.update(['narccap'], 'pr', 'pr_WRFG_ccsm_1986010103.nc', key='narccap_wrfg') - test_data.update(['narccap'], 'pr', ['pr_WRFG_ncep_1981010103.nc', 'pr_WRFG_ncep_1986010103.nc'], key='narccap_pr_wrfg_ncep') - test_data.update(['narccap'], 'tas', 'tas_HRM3_gfdl_1981010103.nc', key='narccap_rotated_pole') - test_data.update(['narccap'], 'tas', 'tas_RCM3_gfdl_1981010103.nc', key='narccap_tas_rcm3_gfdl') - test_data.update(['snippets'], 'dtr', 'snippet_Maurer02new_OBS_dtr_daily.1971-2000.nc', key='snippet_maurer_dtr') + test_data.update(['nc', 'CMIP3'], 'Tavg', 'Extraction_Tavg.nc', key='cmip3_extraction') + test_data.update(['nc', 'CanCM4'], 'rhs', 'rhs_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', key='cancm4_rhs') + test_data.update(['nc', 'CanCM4'], 'rhsmax', 'rhsmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', key='cancm4_rhsmax') + test_data.update(['nc', 'CanCM4'], 'tas', 'tas_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', key='cancm4_tas') + test_data.update(['nc', 'CanCM4'], 'tasmax', 'tasmax_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', key='cancm4_tasmax_2001') + test_data.update(['nc', 'CanCM4'], 'tasmax', 'tasmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', key='cancm4_tasmax_2011') + test_data.update(['nc', 'CanCM4'], 'tasmin', 'tasmin_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', key='cancm4_tasmin_2001') + test_data.update(['nc', 'daymet'], 'tmax', 'tmax.nc', key='daymet_tmax') + test_data.update(['nc', 'maurer', '2010'], 'pr', ['nldas_met_update.obs.daily.pr.1990.nc', 'nldas_met_update.obs.daily.pr.1991.nc'], key='maurer_2010_pr') + test_data.update(['nc', 'maurer', '2010'], 'tas', ['nldas_met_update.obs.daily.tas.1990.nc', 'nldas_met_update.obs.daily.tas.1991.nc'], key='maurer_2010_tas') + test_data.update(['nc', 'maurer', '2010'], 'tasmax', ['nldas_met_update.obs.daily.tasmax.1990.nc', 'nldas_met_update.obs.daily.tasmax.1991.nc'], key='maurer_2010_tasmax') + test_data.update(['nc', 'maurer', '2010'], 'tasmin', ['nldas_met_update.obs.daily.tasmin.1990.nc', 'nldas_met_update.obs.daily.tasmin.1991.nc'], key='maurer_2010_tasmin') + test_data.update(['nc', 'maurer', '2010-concatenated'], 'tasmax', 'Maurer02new_OBS_tasmax_daily.1971-2000.nc', key='maurer_2010_concatenated_tasmax') + test_data.update(['nc', 'maurer', '2010-concatenated'], 'tasmin', 'Maurer02new_OBS_tasmin_daily.1971-2000.nc', key='maurer_2010_concatenated_tasmin') + test_data.update(['nc', 'maurer', '2010-concatenated'], 'tas', 'Maurer02new_OBS_tas_daily.1971-2000.nc', key='maurer_2010_concatenated_tas') + test_data.update(['nc', 'maurer', '2010-concatenated'], 'pr', 'Maurer02new_OBS_pr_daily.1971-2000.nc', key='maurer_2010_concatenated_pr') + test_data.update(['nc', 'maurer', 'bcca'], 'tasmax', 'gridded_obs.tasmax.OBS_125deg.daily.1991.nc', key='maurer_bcca_1991') + test_data.update(['nc', 'maurer', 'bccr'], 'Prcp', 'bccr_bcm2_0.1.sresa1b.monthly.Prcp.1950.nc', key='maurer_bccr_1950') + test_data.update(['nc', 'misc', 'month_in_time_units'], 'clt', 'clt.nc', key='clt_month_units') + test_data.update(['nc', 'misc', 'rotated_pole'], 'pr', 'pr_EUR-11_CNRM-CERFACS-CNRM-CM5_historical_r1i1p1_CLMcom-CCLM4-8-17_v1_mon_198101-199012.nc', key='rotated_pole_cnrm_cerfacs') + test_data.update(['nc', 'misc', 'rotated_pole'], 'tas', 'tas_EUR-44_CCCma-CanESM2_rcp85_r1i1p1_SMHI-RCA4_v1_sem_209012-210011.nc', key='rotated_pole_cccma') + test_data.update(['nc', 'misc', 'rotated_pole'], 'tas', 'tas_EUR-44_ICHEC-EC-EARTH_historical_r12i1p1_SMHI-RCA4_v1_day_19710101-19751231.nc', key='rotated_pole_ichec') + test_data.update(['nc', 'misc', 'subset_test'], 'Prcp', 'sresa2.ncar_pcm1.3.monthly.Prcp.RAW.1950-2099.nc', key='subset_test_Prcp') + test_data.update(['nc', 'misc', 'subset_test'], 'Tavg', 'Tavg_bccr_bcm2_0.1.sresa2.nc', key='subset_test_Tavg') + test_data.update(['nc', 'misc', 'subset_test'], 'Tavg', 'sresa2.bccr_bcm2_0.1.monthly.Tavg.RAW.1950-2099.nc', key='subset_test_Tavg_sresa2') + test_data.update(['nc', 'narccap'], 'pr', 'pr_CRCM_ccsm_1981010103.nc', key='narccap_crcm') + test_data.update(['nc', 'narccap'], 'pr', 'pr_CRCM_ccsm_1981010103.nc', key='narccap_polar_stereographic') + test_data.update(['nc', 'narccap'], 'pr', 'pr_HRM3_gfdl_1981010103.nc', key='narccap_hrm3') + test_data.update(['nc', 'narccap'], 'pr', 'pr_RCM3_gfdl_1981010103.nc', key='narccap_rcm3') + test_data.update(['nc', 'narccap'], 'pr', 'pr_WRFG_ccsm_1986010103.nc', key='narccap_lambert_conformal') + test_data.update(['nc', 'narccap'], 'pr', 'pr_WRFG_ccsm_1986010103.nc', key='narccap_wrfg') + test_data.update(['nc', 'narccap'], 'pr', ['pr_WRFG_ncep_1981010103.nc', 'pr_WRFG_ncep_1986010103.nc'], key='narccap_pr_wrfg_ncep') + test_data.update(['nc', 'narccap'], 'tas', 'tas_HRM3_gfdl_1981010103.nc', key='narccap_rotated_pole') + test_data.update(['nc', 'narccap'], 'tas', 'tas_RCM3_gfdl_1981010103.nc', key='narccap_tas_rcm3_gfdl') + test_data.update(['nc', 'QED-2013'], 'climatology_TNn_monthly_max', 'climatology_TNn_monthly_max.nc', key='qed_2013_TNn_monthly_max') + test_data.update(['nc', 'QED-2013'], 'climatology_TNn_annual_min', 'climatology_TNn_annual_min.nc', key='qed_2013_TNn_annual_min') + test_data.update(['nc', 'QED-2013'], 'climatology_TasMin_seasonal_max_of_seasonal_means', 'climatology_TasMin_seasonal_max_of_seasonal_means.nc', key='qed_2013_TasMin_seasonal_max_of_seasonal_means') + test_data.update(['nc', 'QED-2013'], 'climatology_Tas_annual_max_of_annual_means', 'climatology_Tas_annual_max_of_annual_means.nc', key='qed_2013_climatology_Tas_annual_max_of_annual_means') + test_data.update(['nc', 'QED-2013', 'multifile'], 'txxmmedm', 'maurer02v2_median_txxmmedm_january_1971-2000.nc', key='qed_2013_maurer02v2_median_txxmmedm_january_1971-2000') + test_data.update(['nc', 'QED-2013', 'multifile'], 'txxmmedm', 'maurer02v2_median_txxmmedm_february_1971-2000.nc', key='qed_2013_maurer02v2_median_txxmmedm_february_1971-2000') + test_data.update(['nc', 'QED-2013', 'multifile'], 'txxmmedm', 'maurer02v2_median_txxmmedm_march_1971-2000.nc', key='qed_2013_maurer02v2_median_txxmmedm_march_1971-2000') + test_data.update(['nc', 'snippets'], 'dtr', 'snippet_Maurer02new_OBS_dtr_daily.1971-2000.nc', key='snippet_maurer_dtr') + test_data.update(['nc', 'snippets'], 'bias', 'seasonalbias.nc', key='snippet_seasonalbias') return test_data @@ -327,34 +340,6 @@ def size(self): total += os.path.getsize(element) return total - def copy_files(self, dest, verbose=False): - """ - Copy test files from their source to the base directory ``dest``. The folder hierarchy will be recreated under - ``dest``. - - :param str dest: The base directory. The directory must exist. - :raises: IOError - """ - - if not os.path.exists(dest): - raise (IOError('Copy destination does not exist: {0}'.format(dest))) - for k, v in self.iteritems(): - uri = self.get_uri(k) - if isinstance(uri, basestring): - to_copy = [uri] - else: - to_copy = uri - for to_copy_uri in to_copy: - dest_dir = os.path.join(*([dest] + v['collection'])) - dst = os.path.join(dest_dir, os.path.split(to_copy_uri)[1]) - if not os.path.exists(dest_dir): - os.makedirs(dest_dir) - if verbose: - print 'copying: {0}...'.format(dst) - shutil.copy2(to_copy_uri, dst) - if verbose: - print 'copy completed' - def copy_file(self, key, dest): """ Copy a single files with unique test key identifier ``key`` to the full path ``dest``. @@ -383,6 +368,16 @@ def get_rd(self, key, kwds=None): rd = RequestDataset(**kwds) return rd + def get_relative_dir(self, key): + """ + :returns: The relative directory with no starting slash. + :rtype: str + """ + + value = self[key] + path = os.path.join(*value['collection']) + return path + def get_uri(self, key): """ :param str key: The unique identifier to the test dataset. @@ -394,9 +389,10 @@ def get_uri(self, key): ref = self[key] coll = deepcopy(ref['collection']) if env.DIR_TEST_DATA is None: - raise (ValueError('The TestDataset object requires env.DIR_TEST_DATA have a path value.')) + raise ValueError('The TestDataset object requires env.DIR_TEST_DATA have a path value.') coll.insert(0, env.DIR_TEST_DATA) - ## determine if the filename is a string or a sequence of paths + + # determine if the filename is a string or a sequence of paths filename = ref['filename'] if isinstance(filename, basestring): coll.append(filename) @@ -407,47 +403,14 @@ def get_uri(self, key): copy_coll = copy(coll) copy_coll.append(part) uri.append(os.path.join(*copy_coll)) - ## ensure the uris exist, if not, we may need to download - try: - if isinstance(uri, basestring): - assert (os.path.exists(uri)) - else: - for element in uri: - assert (os.path.exists(element)) - except AssertionError: - if isinstance(uri, basestring): - download_uris = [uri] - else: - download_uris = uri - try: - os.makedirs(env.DIR_TEST_DATA) - except OSError: - if os.path.exists(env.DIR_TEST_DATA): - warn('Target download location exists. Files will be written to the existing location: {0}'.format( - env.DIR_TEST_DATA)) - else: - raise - for download_uri in download_uris: - wget_url = ocgis.constants.test_data_download_url_prefix + '/'.join(ref['collection']) + '/' + \ - os.path.split(download_uri)[1] - wget_dest = os.path.join(*([env.DIR_TEST_DATA] + ref['collection'] + [download_uri])) - try: - os.makedirs(os.path.split(wget_dest)[0]) - except OSError: - if os.path.exists(os.path.split(wget_dest)[0]): - warn('Data download directory exists: {0}'.format(os.path.split(wget_dest)[0])) - else: - raise - try: - if env.DEBUG: - cmd = ['wget', '-O', wget_dest, wget_url] - else: - cmd = ['wget', '--quiet', '-O', wget_dest, wget_url] - subprocess.check_call(cmd) - except CalledProcessError: - raise (ValueError( - '"wget" was unable to fetch the test data URL ({0}) to the destination location: {1}. The command list was: {2}'.format( - wget_url, wget_dest, cmd))) + + # ensure the uris exist, if not, we may need to download + if isinstance(uri, basestring): + assert (os.path.exists(uri)) + else: + for element in uri: + assert (os.path.exists(element)) + return uri def update(self, collection, variable, filename, key=None): diff --git a/src/ocgis/test/test_base.py b/src/ocgis/test/test_base.py index ff123dcf5..a8a216258 100644 --- a/src/ocgis/test/test_base.py +++ b/src/ocgis/test/test_base.py @@ -1,4 +1,4 @@ -from ocgis.test.base import TestBase +from ocgis.test.base import TestBase, TestData import ocgis from unittest.case import SkipTest from ocgis import constants @@ -44,9 +44,9 @@ def test_assertNumpyAll_type_differs(self): @dev def test_data_download(self): ocgis.env.DIR_TEST_DATA = self.current_dir_output - rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd1 = self.test_data.get_rd('cancm4_tas') ocgis.env.reset() - rd2 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas') self.assertEqual(rd1,rd2) @dev @@ -54,25 +54,29 @@ def test_multifile_data_download(self): ocgis.env.DIR_TEST_DATA = self.current_dir_output ocgis.env.DEBUG = True constants.test_data_download_url_prefix = 'https://dl.dropboxusercontent.com/u/867854/test_data_download/' - rd = self.test_data_nc.get_rd('narccap_pr_wrfg_ncep') + rd = self.test_data.get_rd('narccap_pr_wrfg_ncep') @dev def test_entirely_bad_location(self): ocgis.env.DIR_TEST_DATA = self.current_dir_output with self.assertRaises(ValueError): - self.test_data_nc.get_rd('cancm4_tasmax_2011') + self.test_data.get_rd('cancm4_tasmax_2011') @dev def test_copy_files(self): - self.test_data_nc.copy_files('/home/local/WX/ben.koziol/htmp/transfer') + self.test_data.copy_files('/home/local/WX/ben.koziol/htmp/transfer') def test_multifile(self): - rd = self.test_data_nc.get_rd('narccap_pr_wrfg_ncep') + rd = self.test_data.get_rd('narccap_pr_wrfg_ncep') self.assertEqual(len(rd.uri),2) class TestTestData(TestBase): + def test_get_relative_path(self): + ret = self.test_data.get_relative_dir('clt_month_units') + self.assertEqual(ret, 'nc/misc/month_in_time_units') + def test_size(self): - size = self.test_data_nc.size + size = self.test_data.size self.assertGreater(size, 1138333) diff --git a/src/ocgis/test/test_misc/test_conversion.py b/src/ocgis/test/test_misc/test_conversion.py index 00fddd9c2..848943ca7 100644 --- a/src/ocgis/test/test_misc/test_conversion.py +++ b/src/ocgis/test/test_misc/test_conversion.py @@ -9,15 +9,15 @@ class Test(TestBase): def test_nc_projection_writing(self): - rd = self.test_data_nc.get_rd('daymet_tmax') + rd = self.test_data.get_rd('daymet_tmax') ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format='nc') ret = ops.execute() ds = nc.Dataset(ret) self.assertTrue('lambert_conformal_conic' in ds.variables) def test_csv_plus(self): - rd1 = self.test_data_nc.get_rd('cancm4_tasmax_2011') - rd2 = self.test_data_nc.get_rd('maurer_bccr_1950') + rd1 = self.test_data.get_rd('cancm4_tasmax_2011') + rd2 = self.test_data.get_rd('maurer_bccr_1950') ops = ocgis.OcgOperations(dataset=[rd1,rd2],snippet=True,output_format='csv+', geom='state_boundaries',agg_selection=True, select_ugid=[32]) @@ -29,8 +29,8 @@ def test_csv_plus(self): self.assertTrue(len(lines) > 50) def test_csv_plus_custom_headers(self): - rd1 = self.test_data_nc.get_rd('cancm4_tasmax_2011') - rd2 = self.test_data_nc.get_rd('maurer_bccr_1950') + rd1 = self.test_data.get_rd('cancm4_tasmax_2011') + rd2 = self.test_data.get_rd('maurer_bccr_1950') headers = ['did','ugid','gid','alias','value','time'] ops = ocgis.OcgOperations(dataset=[rd1,rd2],snippet=True,output_format='csv+', geom='state_boundaries',agg_selection=True, @@ -43,8 +43,8 @@ def test_csv_plus_custom_headers(self): self.assertEqual(fheaders,[h.upper() for h in headers]) def test_shp_custom_headers(self): - rd1 = self.test_data_nc.get_rd('cancm4_tasmax_2011') - rd2 = self.test_data_nc.get_rd('maurer_bccr_1950') + rd1 = self.test_data.get_rd('cancm4_tasmax_2011') + rd2 = self.test_data.get_rd('maurer_bccr_1950') headers = ['did','ugid','gid','alias','value','time'] ops = ocgis.OcgOperations(dataset=[rd1,rd2],snippet=True,output_format='shp', geom='state_boundaries',agg_selection=True, @@ -55,14 +55,14 @@ def test_shp_custom_headers(self): self.assertEqual(f.meta['schema']['properties'].keys(),[h.upper() for h in headers]) def test_meta(self): - rd = self.test_data_nc.get_rd('cancm4_tasmax_2011') + rd = self.test_data.get_rd('cancm4_tasmax_2011') ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format='meta', geom='state_boundaries',agg_selection=True) ret = ops.execute() self.assertTrue(isinstance(ret,basestring)) def test_meta_with_source(self): - rd = self.test_data_nc.get_rd('cancm4_tasmax_2011') + rd = self.test_data.get_rd('cancm4_tasmax_2011') ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format='csv', geom='state_boundaries',agg_selection=True) ret = ops.execute() diff --git a/src/ocgis/test/test_misc/test_dependency_versions.py b/src/ocgis/test/test_misc/test_dependency_versions.py new file mode 100644 index 000000000..44267df31 --- /dev/null +++ b/src/ocgis/test/test_misc/test_dependency_versions.py @@ -0,0 +1,36 @@ +import fiona +import numpy +import osgeo +import shapely +import netCDF4 +from ocgis.test.base import TestBase + + +class TestVersions(TestBase): + + def test_cfunits(self): + import cfunits + self.assertEqual(cfunits.__version__, '0.9.6') + + def test_esmf(self): + import ESMF + self.assertEqual(ESMF.__release__, 'ESMF_6_3_0rp1') + + def test_fiona(self): + self.assertEqual(fiona.__version__, '1.4.5') + + def test_netCDF4(self): + self.assertEqual(netCDF4.__version__, '1.1.1') + + def test_numpy(self): + self.assertEqual(numpy.__version__, '1.8.2') + + def test_osgeo(self): + self.assertEqual(osgeo.__version__, '1.11.1') + + def test_rtree(self): + import rtree + self.assertEqual(rtree.__version__, '0.8.0') + + def test_shapely(self): + self.assertEqual(shapely.__version__, '1.4.3') \ No newline at end of file diff --git a/src/ocgis/test/test_ocgis/test_api/test_interpreter.py b/src/ocgis/test/test_ocgis/test_api/test_interpreter.py index 8e9c91188..1a38b6df2 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_interpreter.py +++ b/src/ocgis/test/test_ocgis/test_api/test_interpreter.py @@ -11,7 +11,7 @@ def test_execute_directory(self): """Test that the output directory is removed appropriately following an operations failure.""" kwds = dict(add_auxiliary_files=[True, False]) - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ## this geometry is outside the domain and will result in an exception geom = [1000, 1000, 1100, 1100] diff --git a/src/ocgis/test/test_ocgis/test_api/test_operations.py b/src/ocgis/test/test_ocgis/test_api/test_operations.py index e4fc34b8a..109e5bd4e 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_operations.py +++ b/src/ocgis/test/test_ocgis/test_api/test_operations.py @@ -22,23 +22,17 @@ class TestOcgOperations(TestBase): def setUp(self): TestBase.setUp(self) - env.DIR_DATA = os.path.join(env.DIR_TEST_DATA,'CanCM4') - - ## data may need to be pulled from remote repository - self.test_data_nc.get_rd('cancm4_tasmin_2001') - self.test_data_nc.get_rd('cancm4_tasmax_2011') - self.test_data_nc.get_rd('cancm4_tas') - - uris = [ - 'tasmin_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', - 'tasmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', - 'tas_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc' - ] - vars = ['tasmin','tasmax','tas'] - time_range = [dt(2000,1,1),dt(2000,12,31)] - level_range = [2,2] - self.datasets = [{'uri':uri,'variable':var,'time_range':time_range,'level_range':level_range} for uri,var in zip(uris,vars)] - self.datasets_no_range = [{'uri':uri,'variable':var} for uri,var in zip(uris,vars)] + + # data may need to be pulled from remote repository + rds = [self.test_data.get_rd('cancm4_tasmin_2001'), self.test_data.get_rd('cancm4_tasmax_2011'), + self.test_data.get_rd('cancm4_tas')] + + time_range = [dt(2000, 1, 1), dt(2000, 12, 31)] + level_range = [2, 2] + + self.datasets = [{'uri': rd.uri, 'variable': rd.variable, 'time_range': time_range, 'level_range': level_range} + for rd in rds] + self.datasets_no_range = [{'uri': rd.uri, 'variable': rd.variable} for rd in rds] def test_init(self): with self.assertRaises(DefinitionValidationError): @@ -50,13 +44,13 @@ def test_init(self): def test_regrid_destination(self): """Test regridding not allowed with clip operation.""" - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') with self.assertRaises(DefinitionValidationError): OcgOperations(dataset=rd, regrid_destination=rd, spatial_operation='clip') def test_conform_units_to(self): - rd1 = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_tas') + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas') rd2.alias = 'foo' ops = OcgOperations(dataset=[rd1, rd2], conform_units_to='celsius') for ds in ops.dataset.itervalues(): @@ -68,20 +62,20 @@ def test_conform_units_to(self): self.assertEqual(ds.conform_units_to, 'fahrenheit') def test_conform_units_to_bad_units(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') with self.assertRaises(RequestValidationError): OcgOperations(dataset=rd, conform_units_to='crap') def test_no_calc_grouping_with_string_expression(self): calc = 'es=tas*3' calc_grouping = ['month'] - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') with self.assertRaises(DefinitionValidationError): OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping) def test_time_range(self): - rd = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas') rd.alias = 'foo' tr = [datetime.datetime(2002,1,1),datetime.datetime(2002,3,1)] ops = ocgis.OcgOperations(dataset=[rd,rd2],time_range=tr) @@ -96,8 +90,8 @@ def test_time_range(self): self.assertEqual(r.time_range,tuple(tr)) def test_time_region(self): - rd = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas') rd.alias = 'foo' tr = {'month':[6],'year':[2005]} ops = ocgis.OcgOperations(dataset=[rd,rd2],time_region=tr) @@ -112,8 +106,8 @@ def test_time_region(self): self.assertEqual(r.time_region,tr) def test_level_range(self): - rd = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas') rd.alias = 'foo' lr = [1,2] ops = ocgis.OcgOperations(dataset=[rd,rd2],level_range=lr) @@ -128,8 +122,8 @@ def test_level_range(self): self.assertEqual(r.level_range,tuple(lr)) def test_nc_package_validation_raised_first(self): - rd = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('rotated_pole_ichec',kwds={'alias':'tas2'}) + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('rotated_pole_ichec',kwds={'alias':'tas2'}) try: ocgis.OcgOperations(dataset=[rd,rd2],output_format='nc') except DefinitionValidationError as e: @@ -144,8 +138,8 @@ def callback(perc,msg,app=app): app.append((perc,msg)) # print(perc,msg) - rd = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_tasmax_2011') + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tasmax_2011') dataset = [rd,rd2] for ds in dataset: ds.time_region = {'month':[6]} @@ -158,7 +152,7 @@ def callback(perc,msg,app=app): self.assertEqual(app[-1][0],100.0) def test_get_base_request_size(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd) size = ops.get_base_request_size() self.assertEqual(size,{'variables': {'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 116800.0, 'shape': (1, 3650, 1, 64, 128), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 1.0, 'shape': (128,), 'dtype': dtype('float64')}, 'row': {'kb': 0.5, 'shape': (64,), 'dtype': dtype('float64')}}}, 'total': 116830.015625}) @@ -167,49 +161,49 @@ def test_get_base_request_size(self): OcgOperations(dataset=rd, regrid_destination=rd).get_base_request_size() def test_get_base_request_size_with_geom(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[23]) size = ops.get_base_request_size() self.assertEqual(size,{'variables': {'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 171.09375, 'shape': (1, 3650, 1, 4, 3), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 0.0234375, 'shape': (3,), 'dtype': dtype('float64')}, 'row': {'kb': 0.03125, 'shape': (4,), 'dtype': dtype('float64')}}}, 'total': 199.6640625}) def test_get_base_request_size_multifile(self): - rd1 = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('narccap_pr_wrfg_ncep') + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('narccap_pr_wrfg_ncep') rds = [rd1,rd2] ops = OcgOperations(dataset=rds) size = ops.get_base_request_size() self.assertEqual({'variables': {'pr': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 228.25, 'shape': (29216,), 'dtype': dtype('float64')}, 'value': {'kb': 1666909.75, 'shape': (1, 29216, 1, 109, 134), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 1.046875, 'shape': (134,), 'dtype': dtype('float64')}, 'row': {'kb': 0.8515625, 'shape': (109,), 'dtype': dtype('float64')}}, 'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 116800.0, 'shape': (1, 3650, 1, 64, 128), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 1.0, 'shape': (128,), 'dtype': dtype('float64')}, 'row': {'kb': 0.5, 'shape': (64,), 'dtype': dtype('float64')}}}, 'total': 1783969.9140625},size) def test_get_base_request_size_multifile_with_geom(self): - rd1 = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('narccap_pr_wrfg_ncep') + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('narccap_pr_wrfg_ncep') rds = [rd1,rd2] ops = OcgOperations(dataset=rds,geom='state_boundaries',select_ugid=[23]) size = ops.get_base_request_size() self.assertEqual(size,{'variables': {'pr': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 228.25, 'shape': (29216,), 'dtype': dtype('float64')}, 'value': {'kb': 21341.375, 'shape': (1, 29216, 1, 17, 11), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 0.0859375, 'shape': (11,), 'dtype': dtype('float64')}, 'row': {'kb': 0.1328125, 'shape': (17,), 'dtype': dtype('float64')}}, 'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 171.09375, 'shape': (1, 3650, 1, 4, 3), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 0.0234375, 'shape': (3,), 'dtype': dtype('float64')}, 'row': {'kb': 0.03125, 'shape': (4,), 'dtype': dtype('float64')}}}, 'total': 21769.5078125}) def test_get_base_request_size_test_data(self): - for key in self.test_data_nc.keys(): - rd = self.test_data_nc.get_rd(key) + for key in self.test_data.keys(): + rd = self.test_data.get_rd(key) try: ops = OcgOperations(dataset=rd) ## the project cmip data may raise an exception since projection is ## not associated with a variable except DimensionNotFound: - rd = self.test_data_nc.get_rd(key,kwds=dict(dimension_map={'R':'projection','T':'time','X':'longitude','Y':'latitude'})) + rd = self.test_data.get_rd(key,kwds=dict(dimension_map={'R':'projection','T':'time','X':'longitude','Y':'latitude'})) ops = OcgOperations(dataset=rd) ret = ops.get_base_request_size() self.assertTrue(ret['total'] > 1) def test_get_base_request_size_with_calculation(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd,calc=[{'func':'mean','name':'mean'}], calc_grouping=['month']) size = ops.get_base_request_size() self.assertEqual(size['variables']['tas']['temporal']['shape'][0],3650) def test_str(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd) ret = str(ops) self.assertTrue(str(ret).startswith('OcgOperations')) @@ -277,7 +271,7 @@ def test_geom(self): self.assertEqual(g._shp_key,'mi_watersheds') def test_geom_having_changed_select_ugid(self): - ops = OcgOperations(dataset=self.test_data_nc.get_rd('cancm4_tas'), + ops = OcgOperations(dataset=self.test_data.get_rd('cancm4_tas'), geom='state_boundaries') self.assertEqual(len(list(ops.geom)),51) ops.select_ugid = [16,17] @@ -317,7 +311,7 @@ def test_calc_grouping_none_date_parts(self): self.assertEqual(obj.value,('day',)) ## only month, year, and day combinations are currently supported - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') calcs = [None,[{'func':'mean','name':'mean'}]] acceptable = ['day','month','year'] for calc in calcs: @@ -336,7 +330,7 @@ def test_calc_grouping_none_date_parts(self): def test_calc_grouping_seasonal_with_year(self): calc_grouping = [[1,2,3],'year'] calc = [{'func':'mean','name':'mean'}] - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, geom='state_boundaries',select_ugid=[25]) ret = ops.execute() @@ -345,7 +339,7 @@ def test_calc_grouping_seasonal_with_year(self): def test_calc_grouping_seasonal_with_unique(self): calc_grouping = [[12,1,2],'unique'] calc = [{'func':'mean','name':'mean'}] - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc_grouping=calc_grouping,geom='state_boundaries', select_ugid=[27],output_format='nc',calc=calc) ret = ops.execute() @@ -357,7 +351,7 @@ def test_calc_grouping_seasonal_with_unique(self): def test_dataset(self): env.DIR_DATA = ocgis.env.DIR_TEST_DATA - reference_rd = self.test_data_nc.get_rd('cancm4_tas') + reference_rd = self.test_data.get_rd('cancm4_tas') rd = RequestDataset(reference_rd.uri,reference_rd.variable) ds = definition.Dataset(rd) self.assertEqual(ds.value,RequestDatasetCollection([rd])) @@ -365,7 +359,7 @@ def test_dataset(self): dsa = {'uri':reference_rd.uri,'variable':reference_rd.variable} ds = definition.Dataset(dsa) - reference_rd2 = self.test_data_nc.get_rd('narccap_crcm') + reference_rd2 = self.test_data.get_rd('narccap_crcm') dsb = [dsa,{'uri':reference_rd2.uri,'variable':reference_rd2.variable,'alias':'knight'}] ds = definition.Dataset(dsb) @@ -394,8 +388,8 @@ def test_spatial_operation(self): def test_regridding_to_nc(self): """Write regridded data to netCDF.""" - rd1 = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_tas') + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd1, regrid_destination=rd2, output_format='nc', snippet=True, geom='state_boundaries', select_ugid=[25]) @@ -408,8 +402,8 @@ def test_regridding_to_nc(self): def test_regridding_to_shp_vector_wrap(self): """Test writing to shapefile with different vector wrap options.""" - rd1 = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_tas') + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas') for vector_wrap in [True, False]: ops = OcgOperations(dataset=rd1, regrid_destination=rd2, output_format='shp', snippet=True, diff --git a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py index 1d7c5dec3..c959d28fa 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py +++ b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py @@ -177,7 +177,7 @@ def test_calc_grouping_seasonal_aggregation_with_bad_flag(self): CalcGrouping([[1,2,3],[4,5,6],'fod']) def test_dataset(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') dd = Dataset(rd) with open('/tmp/dd.pkl','w') as f: @@ -466,7 +466,7 @@ def possible_datasets(self): return datasets def get_rd(self, **kwargs): - rd = self.test_data_nc.get_rd('cancm4_tas', kwds=kwargs) + rd = self.test_data.get_rd('cancm4_tas', kwds=kwargs) return rd def test_init(self): diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py index 0279efc9e..8f84422d7 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py @@ -45,10 +45,9 @@ class TestRequestDataset(TestBase): def setUp(self): TestBase.setUp(self) ## download test data - self.test_data_nc.get_rd('cancm4_rhs') - self.uri = os.path.join(ocgis.env.DIR_TEST_DATA, 'CanCM4', - 'rhs_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc') - self.variable = 'rhs' + rd = self.test_data.get_rd('cancm4_rhs') + self.uri = rd.uri + self.variable = rd.variable def test_init(self): rd = RequestDataset(uri=self.uri) @@ -61,7 +60,7 @@ def test_init(self): self.assertTrue(rd._has_assigned_coordinate_system) def test_str(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ss = str(rd) self.assertTrue(ss.startswith('RequestDataset')) self.assertTrue('crs' in ss) @@ -69,18 +68,18 @@ def test_str(self): def test_crs_overload(self): kwds = {'crs': CoordinateReferenceSystem(epsg=4362)} - rd = self.test_data_nc.get_rd('cancm4_tas', kwds=kwds) + rd = self.test_data.get_rd('cancm4_tas', kwds=kwds) field = rd.get() self.assertDictEqual(kwds['crs'].value, field.spatial.crs.value) def test_uri_cannot_be_set(self): - rd = self.test_data_nc.get_rd('cancm4_tas') - other_uri = self.test_data_nc.get_uri('cancm4_rhs') + rd = self.test_data.get_rd('cancm4_tas') + other_uri = self.test_data.get_uri('cancm4_rhs') with self.assertRaises(AttributeError): rd.uri = other_uri def get_multiple_variable_request_dataset_dictionary(self): - rd_orig = self.test_data_nc.get_rd('cancm4_tas') + rd_orig = self.test_data.get_rd('cancm4_tas') dest_uri = os.path.join(self.current_dir_output, os.path.split(rd_orig.uri)[1]) shutil.copy2(rd_orig.uri, dest_uri) with nc_scope(dest_uri, 'a') as ds: @@ -93,7 +92,7 @@ def get_multiple_variable_request_dataset_dictionary(self): return {'uri': dest_uri, 'variable': ['tas', 'tasmax']} def test_alias_change_after_init_one_variable(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') self.assertEqual(rd.name, 'tas') rd.alias = 'foo' self.assertEqual(rd.name, 'foo') @@ -141,7 +140,7 @@ def test_init_multiple_variables_with_alias_wrong_count(self): RequestDataset(uri=self.uri, variable=['tas', 'tasmax'], alias='tas_what') def test_init_combinations(self): - rd_orig = self.test_data_nc.get_rd('cancm4_tas') + rd_orig = self.test_data.get_rd('cancm4_tas') dest_uri = os.path.join(self.current_dir_output, os.path.split(rd_orig.uri)[1]) shutil.copy2(rd_orig.uri, dest_uri) with nc_scope(dest_uri, 'a') as ds: @@ -243,20 +242,20 @@ def itr_products_keywords(keywords): raise def test_variable_not_found(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') rd_bad = RequestDataset(uri=rd.uri, variable='crap') with self.assertRaises(VariableNotFoundError): rd_bad.get() def test_level_subset_without_level(self): lr = [1, 2] - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') rd.level_range = lr with self.assertRaises(ValueError): rd.get() def test_source_dictionary_is_deepcopied(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() self.assertEqual(rd.source_metadata, field.meta) ## the source metadata dictionary should be deepcopied prior to passing @@ -265,41 +264,41 @@ def test_source_dictionary_is_deepcopied(self): self.assertNotEqual(rd.source_metadata, field.meta) def test_source_index_matches_constant_value(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() self.assertEqual(field.temporal._src_idx.dtype, constants.np_int) def test_with_units(self): units = 'celsius' - rd = self.test_data_nc.get_rd('cancm4_tas', kwds={'units': units}) + rd = self.test_data.get_rd('cancm4_tas', kwds={'units': units}) self.assertEqual(rd.units, 'celsius') def test_without_units_attempting_conform(self): ## this will work because the units read from the metadata are equivalent - self.test_data_nc.get_rd('cancm4_tas', kwds={'conform_units_to': 'celsius'}) + self.test_data.get_rd('cancm4_tas', kwds={'conform_units_to': 'celsius'}) ## this will not work because the units are not equivalent - rd = self.test_data_nc.get_rd('cancm4_tas', kwds={'conform_units_to': 'coulomb'}) + rd = self.test_data.get_rd('cancm4_tas', kwds={'conform_units_to': 'coulomb'}) with self.assertRaises(RequestValidationError): rd.get() def test_with_bad_units_attempting_conform(self): ## pass bad units to the init and an attempt a conform. values from ## the source dataset are not used for overload. - rd = self.test_data_nc.get_rd('cancm4_tas', kwds={'conform_units_to': 'celsius', 'units': 'coulomb'}) + rd = self.test_data.get_rd('cancm4_tas', kwds={'conform_units_to': 'celsius', 'units': 'coulomb'}) with self.assertRaises(RequestValidationError): rd.get() def test_nonsense_units(self): with self.assertRaises(RequestValidationError): - self.test_data_nc.get_rd('cancm4_tas', kwds={'units': 'nonsense', 'conform_units_to': 'celsius'}) + self.test_data.get_rd('cancm4_tas', kwds={'units': 'nonsense', 'conform_units_to': 'celsius'}) def test_with_bad_units_passing_to_field(self): - rd = self.test_data_nc.get_rd('cancm4_tas', kwds={'units': 'celsius'}) + rd = self.test_data.get_rd('cancm4_tas', kwds={'units': 'celsius'}) field = rd.get() self.assertEqual(field.variables['tas'].units, 'celsius') def test_get_field_with_overloaded_units(self): - rd = self.test_data_nc.get_rd('cancm4_tas', kwds={'conform_units_to': 'celsius'}) + rd = self.test_data.get_rd('cancm4_tas', kwds={'conform_units_to': 'celsius'}) preload = [False, True] for pre in preload: field = rd.get() @@ -321,7 +320,7 @@ def test_get_field_with_overloaded_units(self): self.assertNumpyAll(to_test, value) def test_get_field_nonequivalent_units_in_source_data(self): - new_path = self.test_data_nc.copy_file('cancm4_tas', self.current_dir_output) + new_path = self.test_data.copy_file('cancm4_tas', self.current_dir_output) ## put non-equivalent units on the source data and attempto to conform with nc_scope(new_path, 'a') as ds: @@ -378,15 +377,12 @@ def test_inspect_as_dct(self): self.assertEqual(ref.keys(), ['Start Date', 'End Date', 'Calendar', 'Units', 'Resolution (Days)', 'Count', 'Has Bounds', 'Spatial Reference', 'Proj4 String', 'Extent', 'Interface Type', 'Resolution']) def test_env_dir_data(self): - ## test setting the var to a single directory + """Test setting the data directory to a single directory.""" + env.DIR_DATA = ocgis.env.DIR_TEST_DATA - rd = self.test_data_nc.get_rd('cancm4_rhs') - target = os.path.join(env.DIR_DATA, 'CanCM4', 'rhs_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc') - try: - self.assertEqual(rd.uri, target) - ## attempt to normalize the path - except AssertionError: - self.assertEqual(rd.uid, os.path.normpath(target)) + rd = self.test_data.get_rd('cancm4_rhs') + target = os.path.join(env.DIR_DATA, 'nc', 'CanCM4', 'rhs_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc') + self.assertEqual(rd.uri, target) ## test none and not finding the data env.DIR_DATA = None @@ -415,7 +411,7 @@ def test_level_range(self): self.assertEqual(rd.level_range, tuple([1, 1])) def test_multiple_uris(self): - rd = self.test_data_nc.get_rd('narccap_pr_wrfg_ncep') + rd = self.test_data.get_rd('narccap_pr_wrfg_ncep') self.assertEqual(len(rd.uri), 2) rd.inspect() @@ -432,8 +428,8 @@ def test_time_region(self): class TestRequestDatasetCollection(TestBase): def test_init(self): - rd1 = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_rhs') + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_rhs') keywords = dict(request_datasets=[None, rd1, [rd1], [rd1, rd2], {'uri': rd1.uri, 'variable': rd1.variable}]) @@ -445,15 +441,15 @@ def test_init(self): self.assertEqual(len(rdc), 0) def test_str(self): - rd1 = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_rhs') + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_rhs') rdc = RequestDatasetCollection(request_datasets=[rd1, rd2]) ss = str(rdc) self.assertTrue(ss.startswith('RequestDatasetCollection')) self.assertGreater(len(ss), 900) def test_name_attribute_used_for_keys(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') rd.name = 'hi_there' rdc = RequestDatasetCollection(request_datasets=[rd]) self.assertEqual(rdc.keys(), ['hi_there']) @@ -461,8 +457,8 @@ def test_name_attribute_used_for_keys(self): def test(self): env.DIR_DATA = ocgis.env.DIR_TEST_DATA - daymet = self.test_data_nc.get_rd('daymet_tmax') - tas = self.test_data_nc.get_rd('cancm4_tas') + daymet = self.test_data.get_rd('daymet_tmax') + tas = self.test_data.get_rd('cancm4_tas') uris = [daymet.uri, tas.uri] @@ -493,7 +489,7 @@ def test(self): self.assertIsInstance(rdc['a2'], RequestDataset) def test_with_overloads(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() ## loaded calendar should match file metadata self.assertEqual(field.temporal.calendar, '365_day') @@ -529,7 +525,7 @@ def test_with_overloads(self): def test_with_overloads_real_data(self): ## copy the test file as the calendar attribute will be modified - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') filename = os.path.split(rd.uri)[1] dest = os.path.join(self.current_dir_output, filename) shutil.copy2(rd.uri, dest) diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_base.py index e120e4024..6e864aec6 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_base.py @@ -9,7 +9,7 @@ class TestAbstractDriver(TestBase): def test_get_field(self): # test updating of regrid source flag - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') driver = DriverNetcdf(rd) field = driver.get_field() self.assertTrue(field._should_regrid) @@ -19,17 +19,17 @@ def test_get_field(self): self.assertFalse(field._should_regrid) # test flag with an assigned coordinate system - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') driver = DriverNetcdf(rd) field = driver.get_field() self.assertFalse(field._has_assigned_coordinate_system) - rd = self.test_data_nc.get_rd('cancm4_tas', kwds={'crs': CFWGS84()}) + rd = self.test_data.get_rd('cancm4_tas', kwds={'crs': CFWGS84()}) driver = DriverNetcdf(rd) field = driver.get_field() self.assertTrue(field._has_assigned_coordinate_system) def test_eq(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') d = DriverNetcdf(rd) d2 = deepcopy(d) self.assertEqual(d, deepcopy(d)) diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index e7372cceb..f2cbd6e80 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -24,6 +24,7 @@ from importlib import import_module from collections import OrderedDict from ocgis.util.logging_ocgis import ocgis_lh +from ocgis import ShpCabinet class TestDriverNetcdf(TestBase): @@ -31,7 +32,9 @@ class TestDriverNetcdf(TestBase): def get_2d_state_boundaries(self): geoms = [] build = True - with fiona.open('/home/ben.koziol/Dropbox/NESII/project/ocg/bin/shp/state_boundaries/state_boundaries.shp','r') as source: + sc = ShpCabinet() + path = sc.get_shp_path('state_boundaries') + with fiona.open(path,'r') as source: for ii,row in enumerate(source): if build: nrows = len(source) @@ -58,7 +61,7 @@ def get_2d_state_boundaries_sdim(self): return(sdim) def test_get_dimensioned_variables_one_variable_in_target_dataset(self): - uri = self.test_data_nc.get_uri('cancm4_tas') + uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(uri=uri) driver = DriverNetcdf(rd) ret = driver.get_dimensioned_variables() @@ -66,7 +69,7 @@ def test_get_dimensioned_variables_one_variable_in_target_dataset(self): self.assertEqual(rd._variable, ('tas',)) def test_get_dimensioned_variables_two_variables_in_target_dataset(self): - rd_orig = self.test_data_nc.get_rd('cancm4_tas') + rd_orig = self.test_data.get_rd('cancm4_tas') dest_uri = os.path.join(self.current_dir_output, os.path.split(rd_orig.uri)[1]) shutil.copy2(rd_orig.uri, dest_uri) with nc_scope(dest_uri, 'a') as ds: @@ -79,7 +82,7 @@ def test_get_dimensioned_variables_two_variables_in_target_dataset(self): self.assertEqual(rd.variable, rd.alias) def test_load_dtype_on_dimensions(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() with nc_scope(rd.uri) as ds: test_dtype_temporal = ds.variables['time'].dtype @@ -89,8 +92,8 @@ def test_load_dtype_on_dimensions(self): self.assertEqual(field.temporal.dtype,np.float64) def test_load(self): - ref_test = self.test_data_nc['cancm4_tas'] - uri = self.test_data_nc.get_uri('cancm4_tas') + ref_test = self.test_data['cancm4_tas'] + uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) field = rd.get() ds = nc.Dataset(uri,'r') @@ -116,14 +119,14 @@ def test_load(self): ds.close() def test_multifile_load(self): - uri = self.test_data_nc.get_uri('narccap_pr_wrfg_ncep') + uri = self.test_data.get_uri('narccap_pr_wrfg_ncep') rd = RequestDataset(uri,'pr') field = rd.get() self.assertEqual(field.temporal.extent_datetime,(datetime.datetime(1981, 1, 1, 0, 0), datetime.datetime(1991, 1, 1, 0, 0))) self.assertAlmostEqual(field.temporal.resolution,0.125) def test_load_dtype_fill_value(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() ## dtype and fill_value should be read from metadata. when accessed they ## should not load the value. @@ -132,8 +135,8 @@ def test_load_dtype_fill_value(self): self.assertEqual(field.variables['tas']._value,None) def test_load_datetime_slicing(self): - ref_test = self.test_data_nc['cancm4_tas'] - uri = self.test_data_nc.get_uri('cancm4_tas') + ref_test = self.test_data['cancm4_tas'] + uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) field = rd.get() @@ -145,24 +148,24 @@ def test_load_datetime_slicing(self): self.assertNumpyAll(slced.temporal.bounds_datetime,np.array([dt(2001,8,28),dt(2001,8,29)]).reshape(1, 2)) def test_load_value_datetime_after_slicing(self): - ref_test = self.test_data_nc['cancm4_tas'] - uri = self.test_data_nc.get_uri('cancm4_tas') + ref_test = self.test_data['cancm4_tas'] + uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) field = rd.get() slced = field[:,10:130,:,4:7,100:37] self.assertEqual(slced.temporal.value_datetime.shape,(120,)) def test_load_bounds_datetime_after_slicing(self): - ref_test = self.test_data_nc['cancm4_tas'] - uri = self.test_data_nc.get_uri('cancm4_tas') + ref_test = self.test_data['cancm4_tas'] + uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) field = rd.get() slced = field[:,10:130,:,4:7,100:37] self.assertEqual(slced.temporal.bounds_datetime.shape,(120,2)) def test_load_slice(self): - ref_test = self.test_data_nc['cancm4_tas'] - uri = self.test_data_nc.get_uri('cancm4_tas') + ref_test = self.test_data['cancm4_tas'] + uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) field = rd.get() ds = nc.Dataset(uri,'r') @@ -184,8 +187,8 @@ def test_load_slice(self): ds.close() def test_load_time_range(self): - ref_test = self.test_data_nc['cancm4_tas'] - uri = self.test_data_nc.get_uri('cancm4_tas') + ref_test = self.test_data['cancm4_tas'] + uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri,time_range=[dt(2005,2,15),dt(2007,4,18)]) field = rd.get() self.assertEqual(field.temporal.value_datetime[0],dt(2005, 2, 15, 12, 0)) @@ -193,8 +196,8 @@ def test_load_time_range(self): self.assertEqual(field.shape,(1,793,1,64,128)) def test_load_time_region(self): - ref_test = self.test_data_nc['cancm4_tas'] - uri = self.test_data_nc.get_uri('cancm4_tas') + ref_test = self.test_data['cancm4_tas'] + uri = self.test_data.get_uri('cancm4_tas') ds = nc.Dataset(uri,'r') rd = RequestDataset(variable=ref_test['variable'],uri=uri,time_region={'month':[8]}) field = rd.get() @@ -215,8 +218,8 @@ def test_load_time_region(self): ds.close() def test_load_time_region_with_years(self): - ref_test = self.test_data_nc['cancm4_tas'] - uri = self.test_data_nc.get_uri('cancm4_tas') + ref_test = self.test_data['cancm4_tas'] + uri = self.test_data.get_uri('cancm4_tas') ds = nc.Dataset(uri,'r') rd = RequestDataset(variable=ref_test['variable'],uri=uri,time_region={'month':[8],'year':[2008,2010]}) field = rd.get() @@ -237,8 +240,8 @@ def test_load_time_region_with_years(self): ds.close() def test_load_geometry_subset(self): - ref_test = self.test_data_nc['cancm4_tas'] - uri = self.test_data_nc.get_uri('cancm4_tas') + ref_test = self.test_data['cancm4_tas'] + uri = self.test_data.get_uri('cancm4_tas') states = self.get_2d_state_boundaries_sdim() ca = states[:,states.properties['STATE_NAME'] == 'California'] @@ -270,8 +273,8 @@ def test_load_geometry_subset(self): import_module('rtree') def test_load_time_region_slicing(self): - ref_test = self.test_data_nc['cancm4_tas'] - uri = self.test_data_nc.get_uri('cancm4_tas') + ref_test = self.test_data['cancm4_tas'] + uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri,alias='foo', time_region={'month':[1,10],'year':[2011,2013]}) @@ -311,7 +314,7 @@ def test_load_remote(self): ds.close() def test_load_with_projection(self): - uri = self.test_data_nc.get_uri('narccap_wrfg') + uri = self.test_data.get_uri('narccap_wrfg') rd = RequestDataset(uri,'pr') field = rd.get() self.assertIsInstance(field.spatial.crs,CFLambertConformal) @@ -327,7 +330,7 @@ def test_load_with_projection(self): self.assertAlmostEqual(field.spatial.geom.point.value[0,100].y,21.4615681252577) def test_load_projection_axes(self): - uri = self.test_data_nc.get_uri('cmip3_extraction') + uri = self.test_data.get_uri('cmip3_extraction') variable = 'Tavg' rd = RequestDataset(uri,variable) with self.assertRaises(DimensionNotFound): @@ -346,7 +349,7 @@ def test_load_projection_axes(self): ds.close() def test_load_projection_axes_slicing(self): - uri = self.test_data_nc.get_uri('cmip3_extraction') + uri = self.test_data.get_uri('cmip3_extraction') variable = 'Tavg' rd = RequestDataset(uri,variable,dimension_map={'R':'projection','T':'time','X':'longitude','Y':'latitude'}) field = rd.get() @@ -359,7 +362,7 @@ def test_load_projection_axes_slicing(self): ds.close() def test_load_climatology_bounds(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,output_format='nc',geom='state_boundaries', select_ugid=[27],calc=[{'func':'mean','name':'mean'}], calc_grouping=['month']) @@ -374,7 +377,7 @@ class Test(TestBase): def test_get_dimension_map_1(self): """Test dimension dictionary returned correctly.""" - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') dim_map = get_dimension_map('tas', rd.source_metadata) self.assertDictEqual(dim_map, {'Y': {'variable': u'lat', 'bounds': u'lat_bnds', 'dimension': u'lat', 'pos': 1}, 'X': {'variable': u'lon', 'bounds': u'lon_bnds', 'dimension': u'lon', 'pos': 2}, @@ -400,7 +403,7 @@ def test_get_dimension_map_3(self): try: # remove the bounds variable from a standard metadata dictionary - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') metadata = deepcopy(rd.source_metadata) metadata['variables'].pop('lat_bnds') dim_map = get_dimension_map('tas', metadata) diff --git a/src/ocgis/test/test_ocgis/test_api/test_subset.py b/src/ocgis/test/test_ocgis/test_api/test_subset.py index eff49fefd..2a290a7cc 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_subset.py +++ b/src/ocgis/test/test_ocgis/test_api/test_subset.py @@ -20,14 +20,14 @@ class TestSubsetOperation(TestBase): def get_operations(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') slc = [None, [0, 100], None, [0, 10], [0, 10]] ops = ocgis.OcgOperations(dataset=rd, slice=slc) return ops def get_subset_operation(self): geom = TestGeom.get_geometry_dictionaries() - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd, geom=geom, select_nearest=True) subset = SubsetOperation(ops) return subset @@ -39,13 +39,13 @@ def test_init(self): self.assertIsInstance(coll, SpatialCollection) self.assertEqual(ii, 0) - def test_bounding_box_wrapped(self): + def test_regridding_bounding_box_wrapped(self): """Test subsetting with a wrapped bounding box with the target as a 0-360 global grid.""" bbox = [-104, 36, -95, 44] - rd_global = self.test_data_nc.get_rd('cancm4_tas') - uri = os.path.expanduser('~/climate_data/maurer/bcca/obs/tasmax/1_8deg/gridded_obs.tasmax.OBS_125deg.daily.1991.nc') - rd_downscaled = ocgis.RequestDataset(uri=uri) + rd_global = self.test_data.get_rd('cancm4_tas') + rd_downscaled = self.test_data.get_rd('maurer_bcca_1991') + ops = ocgis.OcgOperations(dataset=rd_global, regrid_destination=rd_downscaled, geom=bbox, output_format='nc', snippet=True) ret = ops.execute() @@ -72,7 +72,7 @@ def test_regridding_same_field(self): #todo: what happens with multivariate calculations #todo: test with all masked values - rd_dest = self.test_data_nc.get_rd('cancm4_tas') + rd_dest = self.test_data.get_rd('cancm4_tas') keywords = dict(regrid_destination=[rd_dest, rd_dest.get().spatial, rd_dest.get()], geom=['state_boundaries']) @@ -81,8 +81,8 @@ def test_regridding_same_field(self): for ctr, k in enumerate(itr_products_keywords(keywords, as_namedtuple=True)): - rd1 = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_tas', kwds={'alias': 'tas2'}) + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas', kwds={'alias': 'tas2'}) # print ctr # if ctr != 1: continue @@ -95,8 +95,8 @@ def test_regridding_same_field(self): # so the subsetting comes out okay. k = deepcopy(k) k.regrid_destination.crs = Spherical() - rd1 = self.test_data_nc.get_rd('cancm4_tas', kwds={'crs': Spherical()}) - rd2 = self.test_data_nc.get_rd('cancm4_tas', kwds={'alias': 'tas2', 'crs': Spherical()}) + rd1 = self.test_data.get_rd('cancm4_tas', kwds={'crs': Spherical()}) + rd2 = self.test_data.get_rd('cancm4_tas', kwds={'alias': 'tas2', 'crs': Spherical()}) ops = ocgis.OcgOperations(dataset=[rd1, rd2], geom=k.geom, regrid_destination=k.regrid_destination, time_region={'month': [1], 'year': [2002]}, select_ugid=select_ugid) @@ -117,19 +117,10 @@ def test_regridding_same_field(self): field.spatial.grid.corners, field.spatial.geom.polygon.value]: self.assertIsNotNone(to_check) - def test_regridding_same_field_bad_bounds_raises(self): - """Test a regridding error is raised with bad bounds.""" - - rd1 = self.test_data_nc.get_rd('cancm4_tas') - ops = ocgis.OcgOperations(dataset=rd1, regrid_destination=rd1, snippet=True) - subset = SubsetOperation(ops) - with self.assertRaises(ValueError): - list(subset) - def test_regridding_same_field_bad_bounds_without_corners(self): """Test bad bounds may be regridded with_corners as False.""" - rd1 = self.test_data_nc.get_rd('cancm4_tas') + rd1 = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd1, regrid_destination=rd1, snippet=True, regrid_options={'with_corners': False}) subset = SubsetOperation(ops) @@ -145,8 +136,8 @@ def test_regridding_same_field_bad_bounds_without_corners(self): def test_regridding_same_field_value_mask(self): """Test with a value_mask.""" - rd1 = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_tas', kwds={'alias': 'tas2'}) + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas', kwds={'alias': 'tas2'}) value_mask = np.zeros(rd2.get().spatial.shape, dtype=bool) value_mask[30, 45] = True regrid_options = {'value_mask': value_mask, 'with_corners': False} @@ -157,8 +148,8 @@ def test_regridding_same_field_value_mask(self): def test_regridding_different_fields_requiring_wrapping(self): """Test with fields requiring wrapping.""" - rd1 = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('maurer_2010_tas') + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('maurer_2010_tas') geom = 'state_boundaries' select_ugid = [25] @@ -175,8 +166,8 @@ def test_regridding_different_fields_requiring_wrapping(self): def test_regridding_different_fields_variable_regrid_targets(self): """Test with a request dataset having regrid_source as False.""" - rd1 = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('maurer_2010_tas', kwds={'time_region': {'year': [1990], 'month': [2]}}) + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('maurer_2010_tas', kwds={'time_region': {'year': [1990], 'month': [2]}}) rd2.regrid_source = False rd3 = deepcopy(rd2) rd3.regrid_source = True @@ -215,16 +206,16 @@ def test_regridding_update_crs(self): if k.assign_source_crs: # assign the coordinate system changes some regridding behavior. in this case the value read from file # is the same as the assigned value. - rd1 = self.test_data_nc.get_rd('narccap_lambert_conformal', kwds={'crs': rd1.crs}) + rd1 = self.test_data.get_rd('narccap_lambert_conformal', kwds={'crs': rd1.crs}) else: - rd1 = self.test_data_nc.get_rd('narccap_lambert_conformal') + rd1 = self.test_data.get_rd('narccap_lambert_conformal') if k.assign_destination_crs: # assign the coordinate system changes some regridding behavior. in this case the value read from file # is the same as the assigned value. - rd2 = self.test_data_nc.get_rd('cancm4_tas', kwds={'crs': CFWGS84()}) + rd2 = self.test_data.get_rd('cancm4_tas', kwds={'crs': CFWGS84()}) else: - rd2 = self.test_data_nc.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas') if k.destination_type == 'rd': destination = rd2 @@ -248,8 +239,8 @@ def test_regridding_update_crs(self): # swap source and destination grids # test regridding lambert conformal to 0 to 360 grid - rd1 = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('narccap_lambert_conformal') + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('narccap_lambert_conformal') actual = np.ma.array([[[[[0.0, 0.0, 0.0, 0.0, 289.309326171875, 288.7110290527344, 287.92108154296875, 287.1899108886719, 286.51715087890625, 285.9024658203125, 0.0, 0.0, 0.0], [0.0, 288.77825927734375, 288.62823486328125, 288.3404541015625, 287.9151611328125, 287.32000732421875, 286.633544921875, 286.0067138671875, 285.43914794921875, 284.93060302734375, 284.48077392578125, 0.0, 0.0], [288.4192199707031, 288.18804931640625, 287.8165588378906, 287.30499267578125, 286.65362548828125, 285.86676025390625, 285.28515625, 284.7640686035156, 284.30316162109375, 283.90216064453125, 283.560791015625, 0.0, 0.0], [288.19488525390625, 287.74169921875, 287.14593505859375, 286.4078063964844, 285.52752685546875, 284.5051574707031, 283.87457275390625, 283.4606628417969, 283.1078186035156, 282.8158264160156, 282.58441162109375, 0.0, 0.0], [288.023193359375, 287.4422607421875, 286.6193542480469, 285.65179443359375, 284.5396728515625, 283.2830505371094, 282.4002685546875, 282.09503173828125, 281.8517761230469, 281.6702575683594, 281.55029296875, 0.0, 0.0], [287.8075866699219, 287.2928771972656, 286.2398986816406, 285.0399475097656, 283.6930236816406, 282.19915771484375, 280.86077880859375, 280.66571044921875, 280.5335388183594, 280.4640808105469, 280.4613952636719, 280.4708251953125, 0.0], [287.591552734375, 287.296875, 286.0108337402344, 284.5754089355469, 282.99066162109375, 281.2564392089844, 279.47003173828125, 279.34307861328125, 279.3382263183594, 279.3432922363281, 279.3581848144531, 279.3829040527344, 0.0], [287.3750305175781, 287.322265625, 285.8916931152344, 284.12139892578125, 282.3462829589844, 280.566162109375, 278.7807922363281, 278.1846618652344, 278.1950988769531, 278.2154846191406, 278.24578857421875, 278.2860107421875, 0.0], [286.864013671875, 286.48724365234375, 285.2509460449219, 283.4699401855469, 281.6840515136719, 279.8930358886719, 278.0966796875, 277.01617431640625, 277.0421142578125, 277.07806396484375, 277.1240234375, 277.1799621582031, 0.0], [286.0535583496094, 285.5471496582031, 284.6158752441406, 282.8240661621094, 281.0272521972656, 279.2252197265625, 277.4177551269531, 275.8373107910156, 275.8789367675781, 275.9306945800781, 275.9925231933594, 276.0644226074219, 0.0], [285.3349609375, 284.69732666015625, 283.9648132324219, 282.183837890625, 280.3759765625, 278.56280517578125, 276.74407958984375, 274.91961669921875, 274.7053527832031, 274.77313232421875, 274.85107421875, 274.9391784667969, 275.0374450683594], [284.7100830078125, 283.93963623046875, 283.07275390625, 281.54925537109375, 279.730224609375, 277.90576171875, 276.07568359375, 274.2397155761719, 273.5210266113281, 273.6050720214844, 273.69940185546875, 273.9654235839844, 274.24139404296875], [284.1809387207031, 283.27606201171875, 282.2731018066406, 280.9204406738281, 279.090087890625, 277.25421142578125, 275.41259765625, 273.7033996582031, 272.687744140625, 272.9641418457031, 273.2394104003906, 273.5135498046875, 273.78662109375], [283.7496337890625, 282.70855712890625, 281.56787109375, 280.3042907714844, 278.54541015625, 276.8524169921875, 275.22515869140625, 273.6634826660156, 272.24554443359375, 272.5191955566406, 272.7915954589844, 273.0628662109375, 273.3330078125], [283.39312744140625, 282.1578369140625, 280.91937255859375, 279.67755126953125, 278.1316223144531, 276.5411071777344, 275.017333984375, 273.56024169921875, 272.16973876953125, 272.07550048828125, 272.3450622558594, 272.6134338378906, 272.8805847167969], [282.7581481933594, 281.516845703125, 280.27227783203125, 279.0242614746094, 277.64892578125, 276.16229248046875, 274.743408203125, 273.3922424316406, 272.1087646484375, 271.63311767578125, 271.8998107910156, 272.1651916503906, 272.4293518066406], [282.1268615722656, 280.87945556640625, 279.6286926269531, 278.3744201660156, 277.095703125, 275.7143249511719, 274.4017639160156, 273.157958984375, 271.98297119140625, 271.1920471191406, 271.4557800292969, 271.71820068359375, 271.97930908203125], [281.499267578125, 280.24566650390625, 278.9886779785156, 277.7280578613281, 276.4637145996094, 275.19561767578125, 273.9908142089844, 272.85589599609375, 271.7908630371094, 270.79583740234375, 271.0130615234375, 271.26873779296875, 271.4607238769531], [280.8753662109375, 279.6155090332031, 278.3522033691406, 277.085205078125, 275.81439208984375, 274.6044921875, 273.50897216796875, 272.4844055175781, 271.58203125, 270.6971130371094, 270.5581359863281, 270.7032165527344, 270.7939453125], [280.25518798828125, 278.989013671875, 277.71929931640625, 276.4458312988281, 275.1974182128906, 274.173828125, 273.29473876953125, 272.4113464355469, 271.5235290527344, 270.63116455078125, 270.1499938964844, 270.1932678222656, 270.1813049316406], [0.0, 278.4078063964844, 277.3578186035156, 276.3003234863281, 275.2351379394531, 274.162109375, 273.2556457519531, 272.36480712890625, 271.4695129394531, 270.569580078125, 269.8001403808594, 269.7401428222656, 269.6240234375], [0.0, 278.4853820800781, 277.42474365234375, 276.3564758300781, 275.2804260253906, 274.1964416503906, 273.2213134765625, 272.3229675292969, 271.4200744628906, 270.5124816894531, 269.6001281738281, 269.3451232910156, 269.1233825683594], [0.0, 278.5711669921875, 277.49969482421875, 276.4205017089844, 275.33343505859375, 274.23834228515625, 273.1918640136719, 272.2858581542969, 271.3752746582031, 270.4599304199219, 269.53973388671875, 269.00958251953125, 268.6806335449219], [0.0, 0.0, 277.5827941894531, 276.4925537109375, 275.3943176269531, 274.2879638671875, 273.1732482910156, 272.25360107421875, 271.335205078125, 270.4120178222656, 269.48388671875, 268.73492431640625, 0.0]]]]], mask=[[[[[True, True, True, True, True, True, True, True, True, False, False, False, True], [True, True, True, True, True, True, True, False, False, False, False, False, True], [True, True, True, True, True, True, True, False, False, False, False, False, True], [True, True, True, True, True, True, False, False, False, False, False, False, False], [True, True, True, True, True, False, False, False, False, False, False, False, False], [True, True, True, True, False, False, False, False, False, False, False, False, True], [True, True, False, False, False, False, False, False, False, False, False, False, True], [True, True, False, False, False, False, False, False, False, False, False, True, True], [True, True, False, False, False, False, False, False, False, False, True, True, True], [True, True, False, False, False, False, False, False, False, False, True, True, True], [True, False, False, False, False, False, False, False, False, True, True, True, True], [True, False, False, False, False, False, False, False, True, True, True, True, True], [True, False, False, False, False, False, False, False, True, True, True, True, True], [True, False, False, False, False, False, False, True, True, True, True, True, True], [True, False, False, False, False, False, False, True, True, True, True, True, True], [False, False, False, False, False, False, True, True, True, True, True, True, True], [False, False, False, False, False, False, True, True, True, True, True, True, True], [False, False, False, False, False, False, True, True, True, True, True, True, True], [False, False, False, False, False, False, False, True, True, True, True, True, True], [False, False, False, False, False, False, False, True, True, True, True, True, True], [False, False, False, False, False, False, False, True, True, True, True, True, True], [False, False, False, False, False, False, False, False, True, True, True, True, True], [True, False, False, False, False, False, True, True, True, True, True, True, True], [True, False, False, True, True, True, True, True, True, True, True, True, True]]]]], @@ -274,8 +265,8 @@ def test_regridding_update_crs(self): def test_regridding_with_output_crs(self): """Test with an output coordinate system.""" - rd1 = self.test_data_nc.get_rd('narccap_lambert_conformal') - rd2 = self.test_data_nc.get_rd('cancm4_tas') + rd1 = self.test_data.get_rd('narccap_lambert_conformal') + rd2 = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd2, regrid_destination=rd1, output_crs=rd1.get().spatial.crs, geom='state_boundaries', select_ugid=[16, 25]) ret = ops.execute() @@ -290,8 +281,8 @@ def test_regridding_with_output_crs(self): def test_regridding_two_projected_coordinate_systems(self): """Test with two coordinate systems not in spherical coordinates.""" - rd1 = self.test_data_nc.get_rd('narccap_lambert_conformal') - rd2 = self.test_data_nc.get_rd('narccap_polar_stereographic') + rd1 = self.test_data.get_rd('narccap_lambert_conformal') + rd2 = self.test_data.get_rd('narccap_polar_stereographic') self.assertIsInstance(rd2.crs, CFPolarStereographic) diff --git a/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py b/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py index 2befe955a..115d6fa31 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py @@ -28,7 +28,7 @@ def run_standard_operations(self,calc,capture=False,output_format=None): aggregate,calc_grouping,output_format = tup if aggregate is True and output_format == 'nc': continue - rd = self.test_data_nc.get_rd('cancm4_tas',kwds={'time_region':{'year':[2001,2002]}}) + rd = self.test_data.get_rd('cancm4_tas',kwds={'time_region':{'year':[2001,2002]}}) try: ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[25], calc=calc,calc_grouping=calc_grouping,output_format=output_format, @@ -77,7 +77,7 @@ class Test(AbstractCalcBase): def test_date_groups_all(self): calc = [{'func':'mean','name':'mean'}] - rd = self.test_data_nc.get_rd('cancm4_tasmax_2011') + rd = self.test_data.get_rd('cancm4_tasmax_2011') calc_grouping = 'all' ops = OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, @@ -91,7 +91,7 @@ def test_date_groups_all(self): def test_time_region(self): kwds = {'time_region':{'year':[2011]}} - rd = self.test_data_nc.get_rd('cancm4_tasmax_2011',kwds=kwds) + rd = self.test_data.get_rd('cancm4_tasmax_2011',kwds=kwds) calc = [{'func':'mean','name':'mean'}] calc_grouping = ['year','month'] @@ -104,7 +104,7 @@ def test_time_region(self): self.assertEqual(tgroup['month'][-1],12) kwds = {'time_region':{'year':[2011,2013],'month':[8]}} - rd = self.test_data_nc.get_rd('cancm4_tasmax_2011',kwds=kwds) + rd = self.test_data.get_rd('cancm4_tasmax_2011',kwds=kwds) calc = [{'func':'threshold','name':'threshold','kwds':{'threshold':0.0,'operation':'gte'}}] calc_grouping = ['month'] aggregate = True @@ -120,7 +120,7 @@ def test_time_region(self): self.assertEqual(threshold.flatten()[0],62) def test_computational_nc_output(self): - rd = self.test_data_nc.get_rd('cancm4_tasmax_2011',kwds={'time_range':[datetime.datetime(2011,1,1),datetime.datetime(2011,12,31)]}) + rd = self.test_data.get_rd('cancm4_tasmax_2011',kwds={'time_range':[datetime.datetime(2011,1,1),datetime.datetime(2011,12,31)]}) calc = [{'func':'mean','name':'tasmax_mean'}] calc_grouping = ['month','year'] @@ -186,7 +186,7 @@ def test_frequency_percentiles(self): def test_date_groups(self): calc = [{'func':'mean','name':'mean'}] - rd = self.test_data_nc.get_rd('cancm4_tasmax_2011') + rd = self.test_data.get_rd('cancm4_tasmax_2011') calc_grouping = ['month'] ops = OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, @@ -237,7 +237,7 @@ def test_date_groups(self): rdt = ref.value_datetime self.assertEqual(rdt[0],dt(2011,1,1,12)) - rd = self.test_data_nc.get_rd('cancm4_tasmax_2011',kwds={'time_region':{'month':[1],'year':[2011]}}) + rd = self.test_data.get_rd('cancm4_tasmax_2011',kwds={'time_region':{'month':[1],'year':[2011]}}) field = rd.get() calc_grouping = ['month','day','year'] ops = OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, @@ -256,7 +256,7 @@ def get_collection(self,aggregate=False): spatial_operation = 'clip' else: spatial_operation = 'intersects' - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[25], spatial_operation=spatial_operation,aggregate=aggregate) ret = ops.execute() diff --git a/src/ocgis/test/test_ocgis/test_calc/test_engine.py b/src/ocgis/test/test_ocgis/test_calc/test_engine.py index f568f51a8..4fbb5a948 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_engine.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_engine.py @@ -30,7 +30,7 @@ def get_engine(self,kwds=None,funcs=None,grouping="None"): def test_with_eval_function_one_variable(self): funcs = [{'func':'tas2=tas+4','ref':EvalFunction}] engine = self.get_engine(funcs=funcs,grouping=None) - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') coll = ocgis.OcgOperations(dataset=rd,slice=[None,[0,700],None,[0,10],[0,10]]).execute() to_test = deepcopy(coll) engine.execute(coll) @@ -39,8 +39,8 @@ def test_with_eval_function_one_variable(self): def test_with_eval_function_two_variables(self): funcs = [{'func':'tas_out=tas+tas2','ref':EvalFunction}] engine = self.get_engine(funcs=funcs,grouping=None) - rd = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas') rd2.alias = 'tas2' field = rd.get() field2 = rd2.get() @@ -59,14 +59,14 @@ def test_constructor(self): self.get_engine(kwds=kwds) def test_execute(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') coll = ocgis.OcgOperations(dataset=rd,slice=[None,[0,700],None,[0,10],[0,10]]).execute() engine = self.get_engine() ret = engine.execute(coll) self.assertEqual(ret[1]['tas'].shape,(1, 12, 1, 10, 10)) def test_execute_tgd(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') coll = ocgis.OcgOperations(dataset=rd,slice=[None,[0,700],None,[0,10],[0,10]], calc=self.funcs,calc_grouping=self.grouping).execute() coll_data = ocgis.OcgOperations(dataset=rd,slice=[None,[0,700],None,[0,10],[0,10]]).execute() @@ -78,7 +78,7 @@ def test_execute_tgd(self): self.assertFalse(np.may_share_memory(coll.gvu(1,'mean'),coll_engine.gvu(1,'mean'))) def test_execute_tgd_malformed(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') coll = ocgis.OcgOperations(dataset=rd,slice=[None,[0,700],None,[0,10],[0,10]], calc=self.funcs,calc_grouping=['month','year']).execute() tgds = {'tas':coll[1]['tas'].temporal} diff --git a/src/ocgis/test/test_ocgis/test_calc/test_eval_function.py b/src/ocgis/test/test_ocgis/test_calc/test_eval_function.py index 019191060..556152a02 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_eval_function.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_eval_function.py @@ -80,7 +80,7 @@ def test_get_eval_string_bad_string(self): EvalFunction._get_eval_string_(expr,{'tas':'var.value'}) def test_calculation_one_variable_exp_only(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() field = field[:,0:10,:,:,:] expr = 'es=6.1078*exp(17.08085*(tas-273.16)/(234.175+(tas-273.16)))' @@ -97,8 +97,8 @@ def test_calculation_one_variable_exp_only(self): self.assertNumpyAll(ret['es'].value,actual_value) def test_calculation_two_variables_exp_only(self): - rd = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_tasmax_2001') + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tasmax_2001') field = rd.get() field2 = rd2.get() field.variables.add_variable(field2.variables['tasmax'],assign_new_uid=True) @@ -115,7 +115,7 @@ def test_calculation_two_variables_exp_only(self): self.assertNumpyAll(ret['foo'].value,actual_value) def test_calculation_one_variable_exp_and_log(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() field = field[:,0:10,:,:,:] expr = 'es=6.1078*exp(log(17.08085)*(tas-273.16)/(234.175+(tas-273.16)))' @@ -126,7 +126,7 @@ def test_calculation_one_variable_exp_and_log(self): self.assertNumpyAll(ret['es'].value,actual_value) def test_calculation_file_only_one_variable(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() field = field[:,0:10,:,:,:] expr = 'es=6.1078*exp(17.08085*(tas-273.16)/(234.175+(tas-273.16)))' @@ -137,8 +137,8 @@ def test_calculation_file_only_one_variable(self): self.assertEqual(ret['es'].fill_value,field.variables['tas'].fill_value) def test_calculation_file_only_two_variables(self): - rd = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_tasmax_2001') + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tasmax_2001') field = rd.get() field2 = rd2.get() field.variables.add_variable(field2.variables['tasmax'],assign_new_uid=True) diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py index 1255c86c1..ac8c8d68c 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py @@ -96,54 +96,54 @@ def test_calculate(self): @longrunning def test_real_data_multiple_datasets(self): - ocgis.env.DIR_DATA = ocgis.env.DIR_TEST_DATA - - rd_tasmax = RequestDataset(uri='Maurer02new_OBS_tasmax_daily.1971-2000.nc', - variable='tasmax', - time_region={'year':[1991],'month':[7]}) - rd_tasmin = RequestDataset(uri='Maurer02new_OBS_tasmin_daily.1971-2000.nc', - variable='tasmin', - time_region={'year':[1991],'month':[7]}) - - ops = OcgOperations(dataset=[rd_tasmax,rd_tasmin], + kwds = {'time_region': {'year': [1991], 'month': [7]}} + rd_tasmax = self.test_data.get_rd('maurer_2010_concatenated_tasmax', kwds=kwds) + rd_tasmin = self.test_data.get_rd('maurer_2010_concatenated_tasmin', kwds=kwds) + + ops = OcgOperations(dataset=[rd_tasmax, rd_tasmin], output_format='csv+', - calc=[{'name': 'Frequency Duration', 'func': 'freq_duration', 'kwds': {'threshold': 25.0, 'operation': 'gte'}}], - calc_grouping=['month','year'], - geom='us_counties',select_ugid=[2778],aggregate=True, - calc_raw=False,spatial_operation='clip', - headers=['did', 'ugid', 'gid', 'year', 'month', 'day', 'variable', 'calc_key', 'value'],) + calc=[{'name': 'Frequency Duration', 'func': 'freq_duration', + 'kwds': {'threshold': 25.0, 'operation': 'gte'}}], + calc_grouping=['month', 'year'], + geom='us_counties', select_ugid=[2778], aggregate=True, + calc_raw=False, spatial_operation='clip', + headers=['did', 'ugid', 'gid', 'year', 'month', 'day', 'variable', 'calc_key', 'value'], ) ret = ops.execute() - - with open(ret,'r') as f: + + with open(ret, 'r') as f: reader = csv.DictReader(f) variables = [row['VARIABLE'] for row in reader] - self.assertEqual(set(variables),set(['tasmax','tasmin'])) + self.assertEqual(set(variables), set(['tasmax', 'tasmin'])) def test_real_data(self): - uri = 'Maurer02new_OBS_tasmax_daily.1971-2000.nc' - variable = 'tasmax' - ocgis.env.DIR_DATA = ocgis.env.DIR_TEST_DATA - - for output_format in ['numpy','csv+','shp','csv']: - ops = OcgOperations(dataset={'uri':uri, - 'variable':variable, - 'time_region':{'year':[1991],'month':[7]}}, - output_format=output_format,prefix=output_format, - calc=[{'name': 'Frequency Duration', 'func': 'freq_duration', 'kwds': {'threshold': 15.0, 'operation': 'gte'}}], - calc_grouping=['month','year'], - geom='us_counties',select_ugid=[2778],aggregate=True, - calc_raw=False,spatial_operation='clip', - headers=['did', 'ugid', 'gid', 'year', 'month', 'day', 'variable', 'calc_key', 'value'],) + """Test calculations on real data.""" + + rd = self.test_data.get_rd('maurer_2010_concatenated_tasmax', kwds={'time_region': {'year': [1991], + 'month': [7]}}) + for output_format in ['numpy', 'csv+', 'shp', 'csv']: + ops = OcgOperations(dataset=rd, + output_format=output_format, prefix=output_format, + calc=[{'name': 'Frequency Duration', + 'func': 'freq_duration', + 'kwds': {'threshold': 15.0, 'operation': 'gte'}}], + calc_grouping=['month', 'year'], + geom='us_counties', select_ugid=[2778], aggregate=True, + calc_raw=False, spatial_operation='clip', + headers=['did', 'ugid', 'gid', 'year', 'month', 'day', 'variable', 'calc_key', + 'value'], ) ret = ops.execute() if output_format == 'numpy': ref = ret[2778]['tasmax'].variables['Frequency Duration'].value - self.assertEqual(ref.compressed()[0].shape,(2,)) - + self.assertEqual(ref.compressed()[0].shape, (2,)) + if output_format == 'csv+': - real = [{'COUNT': '1', 'UGID': '2778', 'DID': '1', 'CALC_KEY': 'freq_duration', 'MONTH': '7', 'DURATION': '7', 'GID': '2778', 'YEAR': '1991', 'VARIABLE': 'tasmax', 'DAY': '16'}, {'COUNT': '1', 'UGID': '2778', 'DID': '1', 'CALC_KEY': 'freq_duration', 'MONTH': '7', 'DURATION': '23', 'GID': '2778', 'YEAR': '1991', 'VARIABLE': 'tasmax', 'DAY': '16'}] - with open(ret,'r') as f: + real = [{'COUNT': '1', 'UGID': '2778', 'DID': '1', 'CALC_KEY': 'freq_duration', 'MONTH': '7', + 'DURATION': '7', 'GID': '2778', 'YEAR': '1991', 'VARIABLE': 'tasmax', 'DAY': '16'}, + {'COUNT': '1', 'UGID': '2778', 'DID': '1', 'CALC_KEY': 'freq_duration', 'MONTH': '7', + 'DURATION': '23', 'GID': '2778', 'YEAR': '1991', 'VARIABLE': 'tasmax', 'DAY': '16'}] + with open(ret, 'r') as f: reader = csv.DictReader(f) rows = list(reader) - for row,real_row in zip(rows,real): - self.assertDictEqual(row,real_row) + for row, real_row in zip(rows, real): + self.assertDictEqual(row, real_row) diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py index b37381990..e26f6ab8d 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py @@ -20,7 +20,7 @@ def get_percentile_reference(self): for year,day in itertools.product(years,days): dates.append(datetime.datetime(year,6,day,12)) - ds = nc.Dataset(self.test_data_nc.get_uri('cancm4_tas')) + ds = nc.Dataset(self.test_data.get_uri('cancm4_tas')) try: calendar = ds.variables['time'].calendar units = ds.variables['time'].units @@ -59,7 +59,7 @@ def test_constructor(self): def test_calculate(self): ## daily data for three years is wanted for the test. subset a CMIP5 ## decadal simulation to use for input into the computation. - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() field = field.get_between('temporal',datetime.datetime(2001,1,1),datetime.datetime(2003,12,31,23,59)) ## the calculation will be for months and years. set the temporal grouping. @@ -85,7 +85,7 @@ def test_calculate(self): @longrunning def test_operations(self): - uri = self.test_data_nc.get_uri('cancm4_tas') + uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(uri=uri, variable='tas', # time_range=[datetime.datetime(2001,1,1),datetime.datetime(2003,12,31,23,59)] @@ -103,7 +103,7 @@ def test_operations(self): @longrunning def test_operations_two_steps(self): ## get the request dataset to use as the basis for the percentiles - uri = self.test_data_nc.get_uri('cancm4_tas') + uri = self.test_data.get_uri('cancm4_tas') variable = 'tas' rd = RequestDataset(uri=uri,variable=variable) ## this is the underly OCGIS dataset object diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_heat_index.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_heat_index.py index 49f32aedf..e987e456e 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_heat_index.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_heat_index.py @@ -10,7 +10,7 @@ class TestHeatIndex(AbstractTestField): def test_units_raise_exception(self): kwds = {'time_range':[dt(2011,1,1),dt(2011,12,31,23,59,59)]} - ds = [self.test_data_nc.get_rd('cancm4_tasmax_2011',kwds=kwds),self.test_data_nc.get_rd('cancm4_rhsmax',kwds=kwds)] + ds = [self.test_data.get_rd('cancm4_tasmax_2011',kwds=kwds),self.test_data.get_rd('cancm4_rhsmax',kwds=kwds)] calc = [{'func':'heat_index','name':'heat_index','kwds':{'tas':'tasmax','rhs':'rhsmax'}}] ops = ocgis.OcgOperations(dataset=ds,calc=calc,slice=[0,0,0,0,0]) self.assertEqual(ops.calc_grouping,None) @@ -20,7 +20,7 @@ def test_units_raise_exception(self): def test_units_conform_to(self): ocgis.env.OVERWRITE = True kwds = {'time_range':[dt(2011,1,1),dt(2011,12,31,23,59,59)]} - ds = [self.test_data_nc.get_rd('cancm4_tasmax_2011',kwds=kwds),self.test_data_nc.get_rd('cancm4_rhsmax',kwds=kwds)] + ds = [self.test_data.get_rd('cancm4_tasmax_2011',kwds=kwds),self.test_data.get_rd('cancm4_rhsmax',kwds=kwds)] ## set the conform to units ds[0].conform_units_to = 'fahrenheit' diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py index 0e9677878..5f750b690 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py @@ -202,7 +202,7 @@ def test_execute_valid(self): def test_execute_valid_through_operations(self): """Test executing a "valid" convolution mode through operations ensuring the data is appropriately truncated.""" - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') calc = [{'func': 'convolve_1d', 'name': 'convolve', 'kwds': {'v': np.array([1, 1, 1, 1, 1]), 'mode': 'valid'}}] ops = ocgis.OcgOperations(dataset=rd, calc=calc, slice=[None, [0, 365], None, [0, 10], [0, 10]]) ret = ops.execute() diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py index 7cb3de520..5cb742451 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py @@ -47,7 +47,7 @@ def test_execute(self): def test_execute_valid_through_operations(self): """Test executing a "valid" convolution mode through operations ensuring the data is appropriately truncated.""" - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') calc = [{'func': 'moving_window', 'name': 'ma', 'kwds': {'k': 5, 'mode': 'valid', 'operation': 'mean'}}] ops = ocgis.OcgOperations(dataset=rd, calc=calc, slice=[None, [0, 365], None, [0, 10], [0, 10]]) ret = ops.execute() @@ -200,7 +200,7 @@ def test_two_variables_sample_size(self): set(ret.keys())) def test_file_only(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() field = field[:,10:20,:,20:30,40:50] grouping = ['month'] @@ -219,7 +219,7 @@ def test_file_only(self): def test_output_datatype(self): ## ensure the output data type is the same as the input data type of ## the variable. - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=[{'func':'mean','name':'mean'}], calc_grouping=['month'],geom='state_boundaries', select_ugid=[27]) @@ -229,7 +229,7 @@ def test_output_datatype(self): self.assertEqual(ret[27]['tas'].variables['mean'].dtype,var_dtype) def test_file_only_by_operations(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=[{'func':'mean','name':'mean'}], calc_grouping=['month'],geom='state_boundaries', select_ugid=[27],file_only=True,output_format='nc') diff --git a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py index 6e67e9335..828d0f0fe 100644 --- a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py +++ b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py @@ -36,12 +36,12 @@ def test_standard_AbstractIcclimFunction(self): for cg in CalcGrouping.iter_possible(): calc = [{'func':subclass.key,'name':subclass.key.split('_')[1]}] if klass == AbstractIcclimUnivariateSetFunction: - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') rd.time_region = {'year':[2001,2002]} calc = [{'func':subclass.key,'name':subclass.key.split('_')[1]}] else: - tasmin = self.test_data_nc.get_rd('cancm4_tasmin_2001') - tasmax = self.test_data_nc.get_rd('cancm4_tasmax_2001') + tasmin = self.test_data.get_rd('cancm4_tasmin_2001') + tasmax = self.test_data.get_rd('cancm4_tasmax_2001') rd = [tasmin,tasmax] for r in rd: r.time_region = {'year':[2001,2002]} @@ -90,8 +90,8 @@ def test_bad_icclim_key_to_operations(self): class TestDTR(TestBase): def test_calculate(self): - tasmin = self.test_data_nc.get_rd('cancm4_tasmin_2001') - tasmax = self.test_data_nc.get_rd('cancm4_tasmax_2001') + tasmin = self.test_data.get_rd('cancm4_tasmin_2001') + tasmax = self.test_data.get_rd('cancm4_tasmax_2001') field = tasmin.get() field.variables.add_variable(deepcopy(tasmax.get().variables['tasmax']), assign_new_uid=True) field = field[:,0:600,:,25:50,25:50] @@ -101,8 +101,8 @@ def test_calculate(self): self.assertEqual(ret['icclim_DTR'].value.shape,(1, 12, 1, 25, 25)) def test_bad_keyword_mapping(self): - tasmin = self.test_data_nc.get_rd('cancm4_tasmin_2001') - tas = self.test_data_nc.get_rd('cancm4_tas') + tasmin = self.test_data.get_rd('cancm4_tasmin_2001') + tas = self.test_data.get_rd('cancm4_tas') rds = [tasmin,tas] calc = [{'func':'icclim_DTR','name':'DTR','kwds':{'tas':'tasmin','tasmax':'tasmax'}}] with self.assertRaises(DefinitionValidationError): @@ -118,9 +118,9 @@ def test_calculation_operations(self): ## note the kwds must contain a map of the required variables to their ## associated aliases. calc = [{'func':'icclim_DTR','name':'DTR','kwds':{'tasmin':'tasmin','tasmax':'tasmax'}}] - tasmin = self.test_data_nc.get_rd('cancm4_tasmin_2001') + tasmin = self.test_data.get_rd('cancm4_tasmin_2001') tasmin.time_region = {'year':[2002]} - tasmax = self.test_data_nc.get_rd('cancm4_tasmax_2001') + tasmax = self.test_data.get_rd('cancm4_tasmax_2001') tasmax.time_region = {'year':[2002]} rds = [tasmin,tasmax] ops = ocgis.OcgOperations(dataset=rds,calc=calc,calc_grouping=['month'], @@ -131,8 +131,8 @@ def test_calculation_operations(self): class TestETR(TestBase): def test_calculate(self): - tasmin = self.test_data_nc.get_rd('cancm4_tasmin_2001') - tasmax = self.test_data_nc.get_rd('cancm4_tasmax_2001') + tasmin = self.test_data.get_rd('cancm4_tasmin_2001') + tasmax = self.test_data.get_rd('cancm4_tasmax_2001') field = tasmin.get() field.variables.add_variable(tasmax.get().variables['tasmax'], assign_new_uid=True) field = field[:,0:600,:,25:50,25:50] @@ -142,7 +142,7 @@ def test_calculate(self): self.assertEqual(ret['icclim_ETR'].value.shape,(1, 12, 1, 25, 25)) def test_calculate_rotated_pole(self): - tasmin_fake = self.test_data_nc.get_rd('rotated_pole_ichec') + tasmin_fake = self.test_data.get_rd('rotated_pole_ichec') tasmin_fake.alias = 'tasmin' tasmax_fake = deepcopy(tasmin_fake) tasmax_fake.alias = 'tasmax' @@ -163,7 +163,7 @@ def test_calculate_rotated_pole(self): class TestTx(TestBase): def test_calculate_operations(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') slc = [None,None,None,[0,10],[0,10]] calc_icclim = [{'func':'icclim_TG','name':'TG'}] calc_ocgis = [{'func':'mean','name':'mean'}] @@ -179,7 +179,7 @@ def test_calculate_operations(self): ret_icclim[1]['tas'].variables['TG'].value) def test_calculation_operations_to_nc(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') slc = [None,None,None,[0,10],[0,10]] ops_ocgis = OcgOperations(calc=[{'func':'icclim_TG','name':'TG'}], calc_grouping=['month'], @@ -199,7 +199,7 @@ def test_calculation_operations_to_nc(self): self.assertEqual(dict(var.__dict__),{'_FillValue':np.float32(1e20),u'units': u'K', u'standard_name': AbstractIcclimFunction.standard_name, u'long_name': u'Mean of daily mean temperature'}) def test_calculate(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() field = field[:,:,:,0:10,0:10] klasses = [IcclimTG,IcclimTN,IcclimTX] @@ -217,7 +217,7 @@ def test_calculate(self): class TestSU(TestBase): def test_calculate(self): - rd = self.test_data_nc.get_rd('cancm4_tasmax_2011') + rd = self.test_data.get_rd('cancm4_tasmax_2011') field = rd.get() field = field[:,:,:,0:10,0:10] for calc_grouping in [['month'],['month','year']]: @@ -229,14 +229,14 @@ def test_calculate(self): self.assertNumpyAll(ret_icclim['icclim_SU'].value,ret_ocgis['threshold'].value) def test_calculation_operations_bad_units(self): - rd = self.test_data_nc.get_rd('daymet_tmax') + rd = self.test_data.get_rd('daymet_tmax') calc_icclim = [{'func':'icclim_SU','name':'SU'}] ops_icclim = OcgOperations(calc=calc_icclim,calc_grouping=['month','year'],dataset=rd) with self.assertRaises(UnitsValidationError): ops_icclim.execute() def test_calculation_operations_to_nc(self): - rd = self.test_data_nc.get_rd('cancm4_tasmax_2011') + rd = self.test_data.get_rd('cancm4_tasmax_2011') slc = [None,None,None,[0,10],[0,10]] ops_ocgis = OcgOperations(calc=[{'func':'icclim_SU','name':'SU'}], calc_grouping=['month'], diff --git a/src/ocgis/test/test_ocgis/test_conv/test_base.py b/src/ocgis/test/test_ocgis/test_conv/test_base.py index 60c924e79..d756413ae 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_base.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_base.py @@ -16,7 +16,7 @@ class AbstractTestConverter(TestBase): def get_spatial_collection(self, field=None): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = field or rd.get()[:, 0, :, 0, 0] coll = SpatialCollection() coll.add_field(1, None, field) @@ -28,7 +28,7 @@ class TestAbstractConverter(AbstractTestConverter): def run_auxiliary_file_tst(self,Converter,file_list,auxiliary_file_list=None): auxiliary_file_list = auxiliary_file_list or self._auxiliary_file_list - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,output_format='numpy',slice=[None,0,None,[0,10],[0,10]]) coll = ops.execute() @@ -56,7 +56,7 @@ def run_auxiliary_file_tst(self,Converter,file_list,auxiliary_file_list=None): self.assertEqual(set(files),set(to_test)) def run_overwrite_true_tst(self,Converter,include_ops=False): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') _ops = ocgis.OcgOperations(dataset=rd,output_format='numpy',slice=[None,0,None,[0,10],[0,10]]) coll = _ops.execute() @@ -73,7 +73,7 @@ def run_overwrite_true_tst(self,Converter,include_ops=False): def test_multiple_variables(self): conv_klasses = [CsvConverter, NcConverter] - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() var2 = deepcopy(field.variables['tas']) var2.alias = 'tas2' @@ -94,7 +94,7 @@ def test_multiple_variables(self): self.assertNumpyAll(ds.variables['tas'][:], ds.variables['tas2'][:]) def test_overwrite_false_csv(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd, output_format='numpy', slice=[None, 0, None, [0, 10], [0, 10]]) coll = ops.execute() diff --git a/src/ocgis/test/test_ocgis/test_conv/test_csv_shp.py b/src/ocgis/test/test_ocgis/test_conv/test_csv_shp.py index c9fd691ca..b6afacaf5 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_csv_shp.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_csv_shp.py @@ -12,8 +12,8 @@ class Test(TestBase): def test_geometries_not_duplicated_with_equivalent_ugid(self): ## if geometries are equivalent, they should not have duplicates in the ## output shapefile. - rd = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_tasmax_2011') + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tasmax_2011') ops = OcgOperations(dataset=[rd,rd2],geom='state_boundaries',select_ugid=[16], output_format='csv+',snippet=True) ops.execute() @@ -32,8 +32,8 @@ def test_geometries_different_ugid(self): row = list(ShpCabinetIterator(key='state_boundaries', select_ugid=[16])) row.append(deepcopy(row[0])) row[1]['properties']['UGID'] = 17 - rd = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_tasmax_2011') + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tasmax_2011') ops = OcgOperations(dataset=[rd, rd2], geom=row, output_format='csv+', snippet=True) ops.execute() diff --git a/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py b/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py index b3166538f..0167351cc 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py @@ -12,7 +12,7 @@ class TestShpConverter(TestBase): def get_subset_operation(self): geom = TestGeom.get_geometry_dictionaries() - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd, geom=geom, select_nearest=True, snippet=True) subset = SubsetOperation(ops) return subset @@ -32,7 +32,7 @@ def test_attributes_copied(self): def test_none_geom(self): """Test a NoneType geometry will pass through the Fiona converter.""" - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') slc = [None, 0, None, [10, 20], [10, 20]] ops = ocgis.OcgOperations(dataset=rd, slice=slc) subset = SubsetOperation(ops) diff --git a/src/ocgis/test/test_ocgis/test_conv/test_meta.py b/src/ocgis/test/test_ocgis/test_conv/test_meta.py index 4d7dc0290..01cff7f0d 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_meta.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_meta.py @@ -6,7 +6,7 @@ class TestMetaConverter(TestBase): def test_init(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd) conv = MetaConverter(ops) self.assertTrue(len(conv.write()) > 4000) diff --git a/src/ocgis/test/test_ocgis/test_conv/test_nc.py b/src/ocgis/test/test_ocgis/test_conv/test_nc.py index 7552f6e24..071168836 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_nc.py @@ -23,7 +23,7 @@ def test_fill_value_modified(self): self.assertEqual(var._FillValue,np.ma.array([],dtype=np.dtype('int32')).fill_value) def test_fill_value_copied(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') with nc_scope(rd.uri) as ds: fill_value_test = ds.variables['tas']._FillValue ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format='nc') diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py index c3a63c628..e90300352 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py @@ -97,7 +97,7 @@ def test_place_prime_meridian_array(self): def test_wrap_unwrap_with_mask(self): """Test wrapped and unwrapped geometries with a mask ensuring that masked values are wrapped and unwrapped.""" - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[23]) ret = ops.execute() sdim = ret[23]['tas'].spatial @@ -244,7 +244,7 @@ def test_bad_parms(self): class TestCFLambertConformalConic(TestBase): def test_load_from_metadata(self): - uri = self.test_data_nc.get_uri('narccap_wrfg') + uri = self.test_data.get_uri('narccap_wrfg') ds = nc.Dataset(uri,'r') meta = NcMetadata(ds) crs = CFLambertConformal.load_from_metadata('pr',meta) @@ -259,16 +259,16 @@ def test_load_from_metadata(self): class TestCFRotatedPole(TestBase): def test_load_from_metadata(self): - rd = self.test_data_nc.get_rd('rotated_pole_ichec') + rd = self.test_data.get_rd('rotated_pole_ichec') self.assertIsInstance(rd.get().spatial.crs, CFRotatedPole) def test_equal(self): - rd = self.test_data_nc.get_rd('rotated_pole_ichec') + rd = self.test_data.get_rd('rotated_pole_ichec') rd2 = deepcopy(rd) self.assertEqual(rd.get().spatial.crs, rd2.get().spatial.crs) def test_in_operations(self): - rd = self.test_data_nc.get_rd('rotated_pole_ichec') + rd = self.test_data.get_rd('rotated_pole_ichec') rd2 = deepcopy(rd) rd2.alias = 'tas2' # # these projections are equivalent so it is okay to write them to a @@ -279,7 +279,7 @@ def test_in_operations(self): def test_get_rotated_pole_transformation(self): """Test SpatialDimension objects are appropriately transformed.""" - rd = self.test_data_nc.get_rd('rotated_pole_ichec') + rd = self.test_data.get_rd('rotated_pole_ichec') field = rd.get() field = field[:, 10:20, :, 40:55, 55:65] spatial = field.spatial diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py index 1e7a6a293..8d8e84229 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py @@ -2,7 +2,7 @@ import unittest import itertools import numpy as np -from ocgis import constants +from ocgis import constants, ShpCabinet from shapely import wkt from ocgis.interface.base.dimension.spatial import SpatialDimension,\ SpatialGeometryDimension, SpatialGeometryPolygonDimension,\ @@ -130,7 +130,8 @@ def test_init(self): class TestSpatialDimension(TestSpatialBase): def get_records(self): - path = '/home/ben.koziol/Dropbox/NESII/project/ocg/bin/shp/state_boundaries/state_boundaries.shp' + sc = ShpCabinet() + path = sc.get_shp_path('state_boundaries') with fiona.open(path, 'r') as source: records = list(source) meta = source.meta @@ -807,7 +808,7 @@ def test_update_crs_general_error(self): def test_update_crs_rotated_pole(self): """Test moving between rotated pole and WGS84.""" - rd = self.test_data_nc.get_rd('rotated_pole_ichec') + rd = self.test_data.get_rd('rotated_pole_ichec') field = rd.get() """:type: ocgis.interface.base.field.Field""" self.assertIsInstance(field.spatial.crs, CFRotatedPole) @@ -892,7 +893,9 @@ def test_empty(self): def test_geoms_only(self): geoms = [] - with fiona.open('/home/ben.koziol/Dropbox/NESII/project/ocg/bin/shp/state_boundaries/state_boundaries.shp','r') as source: + sc = ShpCabinet() + path = sc.get_shp_path('state_boundaries') + with fiona.open(path, 'r') as source: for row in source: geoms.append(shape(row['geometry'])) geoms = np.atleast_2d(geoms) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py index 8a309e6e2..a75fed33a 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py @@ -114,7 +114,7 @@ def test_seasonal_get_grouping(self): self.assertEqual(tg.value[0],dt(2012,12,16)) ## grab real data - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() td = TemporalDimension(value=field.temporal.value_datetime) tg = td.get_grouping([[3,4,5]]) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py index 84bd74898..6916a1f18 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py @@ -116,7 +116,7 @@ def test_should_regrid(self): def test_loading_from_source_spatial_bounds(self): """Test row bounds may be set to None when loading from source.""" - field = self.test_data_nc.get_rd('cancm4_tas').get() + field = self.test_data.get_rd('cancm4_tas').get() field.spatial.grid.row.bounds field.spatial.grid.row.bounds = None self.assertIsNone(field.spatial.grid.row.bounds) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_metadata.py b/src/ocgis/test/test_ocgis/test_interface/test_metadata.py index 02a310847..8f1a9d223 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_metadata.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_metadata.py @@ -7,7 +7,7 @@ class TestNcMetadata(TestBase): def setUp(self): - uri = self.test_data_nc.get_rd('cancm4_tasmax_2001').uri + uri = self.test_data.get_rd('cancm4_tasmax_2001').uri self.rootgrp = nc.Dataset(uri) def tearDown(self): diff --git a/src/ocgis/test/test_ocgis/test_regrid/test_base.py b/src/ocgis/test/test_ocgis/test_regrid/test_base.py index 3cd3843a4..212f868c9 100644 --- a/src/ocgis/test/test_ocgis/test_regrid/test_base.py +++ b/src/ocgis/test/test_ocgis/test_regrid/test_base.py @@ -48,7 +48,7 @@ def get_coords(mpoly): all_coords = np.array(all_coords) return all_coords - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() field.spatial.crs = Spherical() odd = field[:, :, :, 32, 64] @@ -73,7 +73,7 @@ def get_coords(mpoly): import ipdb;ipdb.set_trace() def atest_to_spherical(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') coll = ocgis.OcgOperations(dataset=rd, vector_wrap=True).execute() #, geom='state_boundaries', agg_selection=True).execute() field = coll[1]['tas'] grid_original = deepcopy(field.spatial.grid) @@ -186,7 +186,7 @@ def test_check_fields_for_regridding(self): def test_iter_regridded_fields_different_grid_shapes(self): """Test regridding a downscaled dataset to GCM output. The input and output grids have different shapes.""" - downscaled = self.test_data_nc.get_rd('maurer_2010_tas') + downscaled = self.test_data.get_rd('maurer_2010_tas') downscaled.time_region = {'month': [2], 'year': [1990]} downscaled = downscaled.get() poly = make_poly([37, 43], [-104, -94]) @@ -194,7 +194,7 @@ def test_iter_regridded_fields_different_grid_shapes(self): downscaled.spatial.unwrap() downscaled.spatial.crs = Spherical() - gcm = self.test_data_nc.get_rd('cancm4_tas') + gcm = self.test_data.get_rd('cancm4_tas') gcm = gcm.get() poly = make_poly([37, 43], [-104+360, -94+360]) gcm = gcm.get_intersects(poly) @@ -226,7 +226,7 @@ def test_iter_regridded_fields_different_grid_shapes(self): def test_iter_regridded_fields_problem_bounds(self): """Test a dataset with crap bounds will work when with_corners is False.""" - dst = self.test_data_nc.get_rd('cancm4_tas').get()[:, :, :, 20:25, 30:35] + dst = self.test_data.get_rd('cancm4_tas').get()[:, :, :, 20:25, 30:35] dst.spatial.crs = Spherical() src = deepcopy(dst[0, 0, 0, :, :]) @@ -354,7 +354,7 @@ def test_iter_regridded_fields_value_mask(self): def test_iter_regridded_fields_nonoverlapping_extents(self): """Test regridding with fields that do not spatially overlap.""" - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') # nebraska and california coll = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[16, 25], snippet=True, vector_wrap=False).execute() @@ -369,7 +369,7 @@ def test_iter_regridded_fields_nonoverlapping_extents(self): def test_iter_regridded_fields_partial_extents(self): """Test regridding with fields that partially overlap.""" - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') # california and nevada coll = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[23, 25], snippet=True, vector_wrap=False).execute() @@ -449,7 +449,7 @@ def test_get_sdim_from_esmf_grid(self): def test_get_esmf_grid_from_sdim_with_mask(self): """Test with masked data.""" - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[23], snippet=True, vector_wrap=False) ret = ops.execute() field = ret[23]['tas'] @@ -508,7 +508,7 @@ def test_get_esmf_grid_from_sdim(self): def test_get_esmf_grid_from_sdim_real_data(self): """Test creating ESMF field from real data using an OCGIS spatial dimension.""" - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() egrid = get_esmf_grid_from_sdim(field.spatial) diff --git a/src/ocgis/test/test_ocgis/test_util/test_environment.py b/src/ocgis/test/test_ocgis/test_util/test_environment.py index b1183b4b3..2256c3306 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_environment.py +++ b/src/ocgis/test/test_ocgis/test_util/test_environment.py @@ -29,7 +29,7 @@ def test_environment_variable(self): self.assertEqual(pm.value,True) -class Test(TestBase): +class TestEnvironment(TestBase): reset_env = False def get_is_available(self,module_name): @@ -39,7 +39,10 @@ def get_is_available(self,module_name): except ImportError: av = False return(av) - + + def test_conf_path(self): + env.CONF_PATH + def test_import_attributes(self): ## with both modules installed, these are expected to be true self.assertEqual(env.USE_CFUNITS,self.get_is_available('cfunits')) @@ -99,7 +102,7 @@ def test_env_overload(self): try: env.DIR_OUTPUT = out env.PREFIX = 'my_prefix' - rd = self.test_data_nc.get_rd('daymet_tmax') + rd = self.test_data.get_rd('daymet_tmax') ops = OcgOperations(dataset=rd,snippet=True) self.assertEqual(env.DIR_OUTPUT,ops.dir_output) self.assertEqual(env.PREFIX,ops.prefix) diff --git a/src/ocgis/test/test_ocgis/test_util/test_helpers.py b/src/ocgis/test/test_ocgis/test_util/test_helpers.py index 42491b4d4..734863b73 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_helpers.py +++ b/src/ocgis/test/test_ocgis/test_util/test_helpers.py @@ -13,8 +13,8 @@ class Test(TestBase): def test_get_sorted_uris_by_time_dimension(self): - rd_2001 = self.test_data_nc.get_rd('cancm4_tasmax_2001') - rd_2011 = self.test_data_nc.get_rd('cancm4_tasmax_2011') + rd_2001 = self.test_data.get_rd('cancm4_tasmax_2001') + rd_2011 = self.test_data.get_rd('cancm4_tasmax_2011') not_sorted = [rd_2011.uri, rd_2001.uri] actual = ['tasmax_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', diff --git a/src/ocgis/test/test_ocgis/test_util/test_large_array.py b/src/ocgis/test/test_ocgis/test_util/test_large_array.py index 045975e3c..58d79872b 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_large_array.py +++ b/src/ocgis/test/test_ocgis/test_util/test_large_array.py @@ -20,7 +20,7 @@ def test_with_callback(self): def callback(a, b): percentages.append(a) - rd = self.test_data_nc.get_rd('cancm4_tas', kwds={'time_region': {'month': [3]}}) + rd = self.test_data.get_rd('cancm4_tas', kwds={'time_region': {'month': [3]}}) ops = ocgis.OcgOperations(dataset=rd, calc=[{'func': 'mean', 'name': 'mean'}], calc_grouping=['month'], output_format='nc', geom='state_boundaries', @@ -40,7 +40,7 @@ def test_timing_use_optimizations(self): for use_optimizations in [True,False]: for ii in n: t1 = time.time() - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=[{'func':'mean','name':'mean'}], calc_grouping=['month'],output_format='nc', geom='state_boundaries', @@ -54,7 +54,7 @@ def test_timing_use_optimizations(self): self.assertTrue(tmean[True]['mean'] < tmean[False]['mean']) def test_multivariate_computation(self): - rd = self.test_data_nc.get_rd('cancm4_tas',kwds={'time_region':{'month':[3]}}) + rd = self.test_data.get_rd('cancm4_tas',kwds={'time_region':{'month':[3]}}) rd2 = deepcopy(rd) rd2.alias = 'tas2' calc = [{'func':'divide','name':'ln','kwds':{'arr1':'tas','arr2':'tas2'}}] @@ -70,7 +70,7 @@ def test_multivariate_computation(self): self.assertNcEqual(ret,ret_ocgis,ignore_attributes={'global': ['history']}) def test_with_no_calc_grouping(self): - rd = self.test_data_nc.get_rd('cancm4_tas',kwds={'time_region':{'month':[3]}}) + rd = self.test_data.get_rd('cancm4_tas',kwds={'time_region':{'month':[3]}}) ops = ocgis.OcgOperations(dataset=rd,calc=[{'func':'ln','name':'ln'}], calc_grouping=None,output_format='nc', geom='state_boundaries', @@ -83,7 +83,7 @@ def test_with_no_calc_grouping(self): self.assertNcEqual(ret,ret_ocgis,ignore_attributes={'global': ['history']}) def test_compute_with_time_region(self): - rd = self.test_data_nc.get_rd('cancm4_tas',kwds={'time_region':{'month':[3]}}) + rd = self.test_data.get_rd('cancm4_tas',kwds={'time_region':{'month':[3]}}) ops = ocgis.OcgOperations(dataset=rd,calc=[{'func':'mean','name':'mean'}], calc_grouping=['month'],output_format='nc', geom='state_boundaries', @@ -96,7 +96,7 @@ def test_compute_with_time_region(self): self.assertNcEqual(ret,ret_ocgis,ignore_attributes={'global': ['history']}) def test_compute_with_geom(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=[{'func':'mean','name':'mean'}], calc_grouping=['month'],output_format='nc', geom='state_boundaries', @@ -109,7 +109,7 @@ def test_compute_with_geom(self): self.assertNcEqual(ret,ret_ocgis,ignore_attributes={'global': ['history']}) def test_compute_small(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ## use a smaller netCDF as target ops = ocgis.OcgOperations(dataset=rd, @@ -144,7 +144,7 @@ def test_compute_large(self): n_tile_dimensions = 1 tile_range = [100, 100] - rd = RequestDatasetCollection(self.test_data_nc.get_rd('cancm4_tasmax_2011')) + rd = RequestDatasetCollection(self.test_data.get_rd('cancm4_tasmax_2011')) calc = [{'func': 'mean', 'name': 'my_mean'}, {'func': 'freq_perc', 'name': 'perc_90', 'kwds': {'percentile': 90}}, diff --git a/src/ocgis/test/test_ocgis/test_util/test_logging_ocgis.py b/src/ocgis/test/test_ocgis/test_util/test_logging_ocgis.py index e9523c30f..7bd7cc3b9 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_logging_ocgis.py +++ b/src/ocgis/test/test_ocgis/test_util/test_logging_ocgis.py @@ -145,7 +145,7 @@ def test_exc(self): ocgis_lh('something happened',exc=e) def test_writing(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format='csv') ret = ops.execute() folder = os.path.split(ret)[0] diff --git a/src/ocgis/test/test_ocgis/test_util/test_shp_cabinet.py b/src/ocgis/test/test_ocgis/test_util/test_shp_cabinet.py index 4004cb056..61157fa34 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_shp_cabinet.py +++ b/src/ocgis/test/test_ocgis/test_util/test_shp_cabinet.py @@ -49,7 +49,7 @@ def test_select_ugids_absent_raises_exception(self): with self.assertRaises(ValueError): list(sci) - ops = ocgis.OcgOperations(dataset=self.test_data_nc.get_rd('cancm4_tas'), + ops = ocgis.OcgOperations(dataset=self.test_data.get_rd('cancm4_tas'), geom='state_boundaries', select_ugid=[9999]) with self.assertRaises(ValueError): diff --git a/src/ocgis/test/test_ocgis/test_util/test_shp_process.py b/src/ocgis/test/test_ocgis/test_util/test_shp_process.py index 438442002..aa369ba5f 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_shp_process.py +++ b/src/ocgis/test/test_ocgis/test_util/test_shp_process.py @@ -8,11 +8,13 @@ class TestShpProcess(TestBase): - _test_path = '/home/ben.koziol/Dropbox/NESII/project/ocg/bin/test_data/test_shp_process' def test_shp_process(self): copy_path = os.path.join(self.current_dir_output, 'test_shp_process') - shutil.copytree(self._test_path, copy_path) + sc = ShpCabinet() + test_path = os.path.split(sc.get_shp_path('wc_4326'))[0] + shutil.copytree(test_path, copy_path) + shp_path = os.path.join(copy_path, 'wc_4326.shp') out_folder = tempfile.mkdtemp(dir=self.current_dir_output) sp = ShpProcess(shp_path, out_folder) diff --git a/src/ocgis/test/test_ocgis/test_util/test_shp_scanner/test_shp_scanner.py b/src/ocgis/test/test_ocgis/test_util/test_shp_scanner/test_shp_scanner.py index 2e5a31afb..8c2da337f 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_shp_scanner/test_shp_scanner.py +++ b/src/ocgis/test/test_ocgis/test_util/test_shp_scanner/test_shp_scanner.py @@ -28,13 +28,13 @@ def get_geometry(self,select_ugid): return(geoms[0]['geom']) def get_subset_rd(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ret = OcgOperations(dataset=rd,geom=self.nevada,snippet=True,output_format='nc').execute() rd_sub = RequestDataset(uri=ret,variable='tas') return(rd_sub) def test_get_does_intersect_true(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') for geom in [self.nevada,self.new_york]: self.assertTrue(get_does_intersect(rd,geom)) diff --git a/src/ocgis/test/test_ocgis/test_util/test_spatial/test_index.py b/src/ocgis/test/test_ocgis/test_util/test_spatial/test_index.py index 00eda988c..08151b084 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_spatial/test_index.py +++ b/src/ocgis/test/test_ocgis/test_util/test_spatial/test_index.py @@ -1,113 +1,94 @@ -import unittest from shapely import wkt import numpy as np import itertools from shapely.geometry.point import Point from shapely.geometry.geo import mapping -from unittest.case import SkipTest -from importlib import import_module +from ocgis.test.base import TestBase +from ocgis.util.spatial.index import SpatialIndex -try: - from ocgis.util.spatial.index import SpatialIndex -except ImportError: - pass -class TestSpatialIndex(unittest.TestCase): - - def setUp(self): - try: - import_module('rtree') - super(TestSpatialIndex,self).setUp() - except ImportError: - raise(SkipTest('Spatial index tests only relevant when "rtree" is available for import.')) - +class TestSpatialIndex(TestBase): + @property def geom_michigan(self): wkt_txt = 'MULTIPOLYGON(((-88.497527 48.173795,-88.625327 48.033167,-88.901547 47.960248,-89.028622 47.850655,-89.139885 47.824076,-89.192916 47.844613,-89.201787 47.883857,-89.156099 47.939228,-88.497527 48.173795)),((-88.500681 47.290180,-88.437901 47.355896,-88.211392 47.447835,-87.788120 47.470793,-87.704383 47.415950,-87.737510 47.393024,-87.917042 47.358007,-88.222279 47.200752,-88.412843 46.988094,-88.470664 47.111472,-88.594262 47.134765,-88.595632 47.243593,-88.500681 47.290180)),((-85.859844 45.969469,-85.914955 45.957978,-85.917104 45.918192,-86.067891 45.964210,-86.259319 45.946929,-86.315638 45.905682,-86.343795 45.834396,-86.458275 45.762747,-86.529390 45.748961,-86.522010 45.724094,-86.576124 45.710174,-86.629784 45.621233,-86.685053 45.650048,-86.696919 45.692511,-86.584735 45.813879,-86.761469 45.826067,-86.901624 45.714778,-87.123759 45.696246,-87.260707 45.554802,-87.332227 45.423942,-87.583864 45.162733,-87.592514 45.108501,-87.672814 45.140672,-87.729669 45.176604,-87.736200 45.199072,-87.721628 45.211672,-87.719668 45.236771,-87.705142 45.247086,-87.704471 45.272205,-87.645362 45.348169,-87.643684 45.361856,-87.689598 45.391269,-87.760038 45.352897,-87.828008 45.358321,-87.841282 45.346149,-87.862096 45.370165,-87.868535 45.372072,-87.873974 45.362085,-87.883610 45.365854,-87.849531 45.406117,-87.860267 45.445098,-87.813614 45.466460,-87.789385 45.499067,-87.805141 45.544525,-87.828602 45.568591,-87.786312 45.568519,-87.775075 45.600387,-87.776045 45.613200,-87.819938 45.654450,-87.817054 45.665390,-87.780945 45.675915,-87.777473 45.684101,-87.801156 45.701324,-87.801553 45.711391,-87.842362 45.722418,-87.873629 45.750699,-87.969179 45.766448,-87.990070 45.795046,-88.051639 45.786112,-88.088734 45.791532,-88.129949 45.819402,-88.121786 45.834878,-88.065421 45.873642,-88.095764 45.891803,-88.093850 45.920615,-88.111390 45.926287,-88.150438 45.936293,-88.180194 45.953516,-88.214992 45.947901,-88.257168 45.967055,-88.299152 45.961944,-88.321323 45.966712,-88.369938 45.994587,-88.403522 45.983422,-88.454319 46.000760,-88.483814 45.999151,-88.494083 46.012960,-88.515613 46.018609,-88.548358 46.019300,-88.575357 46.008959,-88.597536 46.015516,-88.615502 45.994120,-88.643669 45.993388,-88.677384 46.020144,-88.703605 46.018923,-88.726409 46.029581,-88.773017 46.021147,-88.777480 46.032614,-88.793815 46.036360,-88.804397 46.026804,-88.925195 46.073601,-88.985301 46.100391,-89.099806 46.145642,-89.925136 46.304025,-90.111659 46.340429,-90.115177 46.365155,-90.141797 46.393899,-90.161391 46.442380,-90.211526 46.506295,-90.258401 46.508789,-90.269785 46.522480,-90.300181 46.525051,-90.302393 46.544296,-90.313708 46.551563,-90.385525 46.539657,-90.408200 46.568610,-90.018864 46.678633,-89.886252 46.768935,-89.791244 46.824713,-89.386718 46.850208,-89.214592 46.923378,-89.125187 46.996606,-88.994875 46.997103,-88.929688 47.030926,-88.884832 47.104554,-88.629500 47.225812,-88.618104 47.131114,-88.511215 47.106506,-88.512995 47.032589,-88.441164 46.990734,-88.445964 46.928304,-88.476523 46.855151,-88.446617 46.799396,-88.177827 46.945890,-88.189188 46.900958,-88.036685 46.911865,-87.900654 46.909761,-87.663766 46.836851,-87.371539 46.507991,-87.110679 46.501473,-87.006402 46.536293,-86.871382 46.444359,-86.759495 46.486631,-86.638220 46.422263,-86.462392 46.561085,-86.148109 46.673053,-86.096739 46.655268,-85.857536 46.694815,-85.503850 46.674174,-85.230094 46.756785,-84.954759 46.770951,-85.026971 46.694339,-85.018975 46.549024,-85.051655 46.505576,-85.016639 46.476444,-84.931320 46.487843,-84.803653 46.444054,-84.629815 46.482943,-84.572667 46.407926,-84.415967 46.480658,-84.311614 46.488669,-84.181646 46.248720,-84.273134 46.207309,-84.247031 46.171447,-84.119735 46.176108,-84.029578 46.128943,-84.061981 46.094470,-83.989501 46.025985,-83.901952 46.005902,-83.906460 45.960239,-84.113272 45.978538,-84.354485 45.999190,-84.501635 45.978342,-84.616845 46.038230,-84.689022 46.035918,-84.731732 45.855679,-84.851100 45.890636,-85.061629 46.024751,-85.378243 46.100047,-85.509546 46.101911,-85.655381 45.972870,-85.859844 45.969469)),((-83.854680 46.014031,-83.801105 45.988412,-83.756420 46.027338,-83.673592 46.036192,-83.680314 46.071794,-83.732448 46.084108,-83.649887 46.103971,-83.589498 46.088518,-83.533991 46.011790,-83.473189 45.987547,-83.516159 45.925714,-83.579813 45.917501,-83.629705 45.953596,-83.804881 45.936764,-83.852810 45.997449,-83.885891 45.970852,-83.854680 46.014031)),((-86.834829 41.765504,-86.617592 41.907448,-86.498833 42.126446,-86.374278 42.249421,-86.284980 42.422324,-86.217854 42.774825,-86.273837 43.121045,-86.463201 43.475166,-86.541301 43.663187,-86.447811 43.772665,-86.404345 43.766642,-86.434101 43.781458,-86.428814 43.820123,-86.459548 43.950184,-86.438147 43.945592,-86.518602 44.053619,-86.386423 44.183204,-86.271954 44.351228,-86.238038 44.522273,-86.258627 44.700731,-86.108484 44.734442,-86.082918 44.777929,-86.097964 44.850612,-86.067454 44.898257,-85.795756 44.985974,-85.610215 45.196527,-85.565514 45.180560,-85.653006 44.958362,-85.638039 44.778435,-85.526081 44.763162,-85.451351 44.860540,-85.384869 45.010603,-85.390244 45.211593,-85.373253 45.273541,-85.305475 45.320383,-85.092862 45.370225,-84.985893 45.373178,-84.921674 45.409899,-85.081815 45.464650,-85.120447 45.569779,-85.078019 45.630185,-84.983412 45.683713,-84.972038 45.737745,-84.724186 45.780304,-84.465275 45.653637,-84.321458 45.665607,-84.205560 45.630905,-84.135229 45.571343,-84.105907 45.498749,-83.922892 45.491773,-83.782809 45.409449,-83.712318 45.412394,-83.592363 45.349502,-83.495832 45.360802,-83.489598 45.328937,-83.394019 45.272907,-83.420761 45.257182,-83.398695 45.213641,-83.312707 45.098620,-83.444441 45.052773,-83.433972 45.011128,-83.464903 44.997883,-83.429355 44.926297,-83.319724 44.860646,-83.280812 44.703183,-83.320036 44.515460,-83.356963 44.335133,-83.529150 44.261274,-83.568237 44.170118,-83.598404 44.070493,-83.704802 43.997165,-83.873615 43.962842,-83.918376 43.916997,-83.938121 43.698283,-83.699164 43.599642,-83.654615 43.607420,-83.530909 43.725943,-83.494248 43.702841,-83.466408 43.745740,-83.367163 43.844452,-83.326026 43.940459,-82.940154 44.069959,-82.805978 44.033564,-82.727902 43.972506,-82.618487 43.787866,-82.605738 43.694568,-82.503820 43.172253,-82.419836 42.972465,-82.471952 42.898682,-82.473238 42.762896,-82.518179 42.634052,-82.645877 42.631728,-82.634015 42.669382,-82.729806 42.681226,-82.820407 42.635794,-82.802361 42.612926,-82.888138 42.495756,-82.874907 42.458067,-82.929389 42.363040,-83.107588 42.292705,-83.193873 42.115749,-83.190066 42.033979,-83.482691 41.725130,-83.763954 41.717042,-83.868639 41.715993,-84.359208 41.708039,-84.384393 41.707150,-84.790377 41.697494,-84.788478 41.760959,-84.826008 41.761875,-85.193140 41.762867,-85.297209 41.763581,-85.659459 41.762627,-85.799227 41.763535,-86.068302 41.764628,-86.234565 41.764864,-86.525181 41.765540,-86.834829 41.765504)))' - return(wkt.loads(wkt_txt)) - + return wkt.loads(wkt_txt) + @property def geom_michigan_point_grid(self): n = 10 bounds = self.geom_michigan.bounds - x = np.linspace(bounds[2]+1,bounds[0]-1,n) - y = np.linspace(bounds[1]-1,bounds[3]+1,n) + x = np.linspace(bounds[2] + 1, bounds[0] - 1, n) + y = np.linspace(bounds[1] - 1, bounds[3] + 1, n) ret = {} - for ii,(ix,iy) in enumerate(itertools.product(x,y)): - ret[ii] = Point(ix,iy) - return(ret) - + for ii, (ix, iy) in enumerate(itertools.product(x, y)): + ret[ii] = Point(ix, iy) + return ret + def test_constructor(self): SpatialIndex() - + def test_add_polygon(self): si = SpatialIndex() - si.add(1,self.geom_michigan) - self.assertEqual(tuple(si._index.bounds),self.geom_michigan.bounds) - + si.add(1, self.geom_michigan) + self.assertEqual(tuple(si._index.bounds), self.geom_michigan.bounds) + def test_add_point(self): pt = self.geom_michigan.centroid si = SpatialIndex() - si.add(1,pt) - self.assertEqual(tuple(si._index.bounds),pt.bounds) + si.add(1, pt) + self.assertEqual(tuple(si._index.bounds), pt.bounds) def test_add_sequence(self): si = SpatialIndex() - ids = [1,2] - geoms = [self.geom_michigan,self.geom_michigan] - si.add(ids,geoms) + ids = [1, 2] + geoms = [self.geom_michigan, self.geom_michigan] + si.add(ids, geoms) ids = list(si._index.intersection(self.geom_michigan.bounds)) - self.assertEqual([1,2],ids) - + self.assertEqual([1, 2], ids) + def test_get_intersection_rtree(self): points = self.geom_michigan_point_grid -# print self.write_geom_dict(points) si = SpatialIndex() ids = points.keys() geoms = [points[i] for i in ids] - si.add(ids,geoms) + si.add(ids, geoms) ids = list(si._get_intersection_rtree_(self.geom_michigan)) - self.assertEqual(set(ids),set([12, 13, 14, 15, 16, 17, 22, 23, 24, 25, 26, 27, 32, 33, 34, 35, 36, 37, 42, 43, 44, 45, 46, 47, 52, 53, 54, 55, 56, 57, 62, 63, 64, 65, 66, 67, 72, 73, 74, 75, 76, 77, 82, 83, 84, 85, 86, 87])) -# new_dct = {i:points[i] for i in ids} -# print self.write_geom_dict(new_dct) + self.assertEqual(set(ids), set( + [12, 13, 14, 15, 16, 17, 22, 23, 24, 25, 26, 27, 32, 33, 34, 35, 36, 37, 42, 43, 44, 45, 46, 47, 52, 53, 54, + 55, 56, 57, 62, 63, 64, 65, 66, 67, 72, 73, 74, 75, 76, 77, 82, 83, 84, 85, 86, 87])) def test_iter_intersects(self): points = self.geom_michigan_point_grid si = SpatialIndex() ids = points.keys() geoms = [points[i] for i in ids] - si.add(ids,geoms) - intersects_ids = list(si.iter_intersects(self.geom_michigan,points)) -# new_geoms = {i:points[i] for i in intersects_ids} -# print self.write_geom_dict(new_geoms) - self.assertEqual(set(intersects_ids),set([22, 23, 24, 32, 33, 34, 35, 36, 42, 43, 44, 46, 56, 66, 67, 76])) + si.add(ids, geoms) + intersects_ids = list(si.iter_intersects(self.geom_michigan, points)) + self.assertEqual(set(intersects_ids), set([22, 23, 24, 32, 33, 34, 35, 36, 42, 43, 44, 46, 56, 66, 67, 76])) def test_keep_touches(self): points = self.geom_michigan_point_grid si = SpatialIndex() ids = points.keys() geoms = [points[i] for i in ids] - si.add(ids,geoms) + si.add(ids, geoms) touch_geom = Point(*mapping(self.geom_michigan)['coordinates'][0][0][3]) - si.add(1000,touch_geom) + si.add(1000, touch_geom) points[1000] = touch_geom - for keep_touches in [True,False]: - intersects_ids = list(si.iter_intersects(self.geom_michigan,points,keep_touches=keep_touches)) + for keep_touches in [True, False]: + intersects_ids = list(si.iter_intersects(self.geom_michigan, points, keep_touches=keep_touches)) if keep_touches: - self.assertIn(1000,intersects_ids) + self.assertIn(1000, intersects_ids) else: - self.assertNotIn(1000,intersects_ids) - + self.assertNotIn(1000, intersects_ids) + def test_iter_intersects_with_polygon(self): polygon = self.geom_michigan[1] -# print 'michigan',self.write_geom_dict({1:polygon},geometry_type='Polygon') si = SpatialIndex() points = self.geom_michigan_point_grid ids = points.keys() geoms = [points[i] for i in ids] - si.add(ids,geoms) - intersects_ids = list(si.iter_intersects(polygon,points)) -# new_geoms = {i:points[i] for i in intersects_ids} -# print 'points',self.write_geom_dict(new_geoms) - self.assertEqual(intersects_ids,[67]) - \ No newline at end of file + si.add(ids, geoms) + intersects_ids = list(si.iter_intersects(polygon, points)) + self.assertEqual(intersects_ids, [67]) diff --git a/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py b/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py index c030ee7b2..e46854e20 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py +++ b/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py @@ -51,7 +51,7 @@ def nebraska(self): @property def rd_rotated_pole(self): - rd = self.test_data_nc.get_rd('rotated_pole_cccma') + rd = self.test_data.get_rd('rotated_pole_cccma') return rd @property @@ -86,7 +86,7 @@ def get_subset_sdim(self): def get_target(self): # 1: standard input file - geographic coordinate system, unwrapped - rd_standard = self.test_data_nc.get_rd('cancm4_tas') + rd_standard = self.test_data.get_rd('cancm4_tas') # 2: standard field - geographic coordinate system field_standard = rd_standard.get() @@ -95,7 +95,7 @@ def get_target(self): field_rotated_pole = self.rd_rotated_pole.get() # 4: field with lambert conformal coordinate system - rd = self.test_data_nc.get_rd('narccap_lambert_conformal') + rd = self.test_data.get_rd('narccap_lambert_conformal') field_lambert = rd.get() # 5: standard input field - geographic coordinate system, wrapped @@ -156,7 +156,7 @@ def test_get_buffered_subset_sdim(self): def test_get_should_wrap(self): # a 360 dataset - field_360 = self.test_data_nc.get_rd('cancm4_tas').get() + field_360 = self.test_data.get_rd('cancm4_tas').get() ss = SpatialSubsetOperation(field_360, wrap=True) self.assertTrue(ss._get_should_wrap_(ss.target)) ss = SpatialSubsetOperation(field_360, wrap=False) @@ -218,7 +218,7 @@ def test_get_spatial_subset_circular_geometries(self): """Test circular geometries. They were causing wrapping errors.""" geoms = TestGeom.get_geometry_dictionaries() - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ss = SpatialSubsetOperation(rd, wrap=True) buffered = [element['geom'].buffer(rd.get().spatial.grid.resolution*2) for element in geoms] for buff in buffered: @@ -234,7 +234,7 @@ def test_get_spatial_subset_output_crs(self): proj4 = '+proj=aea +lat_1=20 +lat_2=60 +lat_0=40 +lon_0=-96 +x_0=0 +y_0=0 +ellps=GRS80 +datum=NAD83 +units=m +no_defs' output_crs = CoordinateReferenceSystem(proj4=proj4) subset_sdim = SpatialDimension.from_records([self.nebraska]) - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ss = SpatialSubsetOperation(rd, output_crs=output_crs) ret = ss.get_spatial_subset('intersects', subset_sdim) self.assertEqual(ret.spatial.crs, output_crs) @@ -261,7 +261,7 @@ def test_get_spatial_subset_wrap(self): """Test subsetting with wrap set to a boolean value.""" subset_sdim = SpatialDimension.from_records([self.nebraska]) - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') self.assertTrue(rd.get().spatial.is_unwrapped) ss = SpatialSubsetOperation(rd, wrap=True) ret = ss.get_spatial_subset('intersects', subset_sdim) @@ -303,7 +303,7 @@ def test_prepare_subset_sdim(self): # test nebraska against an unwrapped dataset specifically nebraska = SpatialDimension.from_records([self.nebraska]) - field = self.test_data_nc.get_rd('cancm4_tas').get() + field = self.test_data.get_rd('cancm4_tas').get() ss = SpatialSubsetOperation(field) prepared = ss._prepare_subset_sdim_(nebraska) self.assertTrue(prepared.is_unwrapped) @@ -314,7 +314,7 @@ def test_sdim(self): def test_should_update_crs(self): # no output crs provided - target = self.test_data_nc.get_rd('cancm4_tas') + target = self.test_data.get_rd('cancm4_tas') ss = SpatialSubsetOperation(target) self.assertFalse(ss.should_update_crs) diff --git a/src/ocgis/test/test_ocgis/test_util/test_units.py b/src/ocgis/test/test_ocgis/test_util/test_units.py index eea1bfeae..b962b52ed 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_units.py +++ b/src/ocgis/test/test_ocgis/test_util/test_units.py @@ -11,12 +11,12 @@ class TestField(TestBase): def test_units_read_from_file(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() self.assertEqual(field.variables['tas'].cfunits,Units('K')) def test_units_conform_from_file(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() sub = field.get_time_region({'month':[5],'year':[2005]}) sub.variables['tas'].cfunits_conform(Units('celsius')) diff --git a/src/ocgis/test/test_ocgis/test_util/test_zipper.py b/src/ocgis/test/test_ocgis/test_util/test_zipper.py index 1aa7f4785..18a047cba 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_zipper.py +++ b/src/ocgis/test/test_ocgis/test_util/test_zipper.py @@ -10,7 +10,7 @@ def test(): - tdata = TestBase.get_tst_data_nc() + tdata = TestBase.get_tst_data() rd = tdata.get_rd('cancm4_tas') output_formats = [ diff --git a/src/ocgis/test/test_real_data/test_cf.py b/src/ocgis/test/test_real_data/test_cf.py index e60b342d1..fcd48d27d 100644 --- a/src/ocgis/test/test_real_data/test_cf.py +++ b/src/ocgis/test/test_real_data/test_cf.py @@ -10,7 +10,7 @@ class Test(TestBase): def test_missing_bounds(self): - rd = self.test_data_nc.get_rd('snippet_maurer_dtr') + rd = self.test_data.get_rd('snippet_maurer_dtr') ip = rd.inspect_as_dct() def test_climatology(self): diff --git a/src/ocgis/test/test_real_data/test_cf_exceptions.py b/src/ocgis/test/test_real_data/test_cf_exceptions.py index df900e7bb..5fcda1eab 100644 --- a/src/ocgis/test/test_real_data/test_cf_exceptions.py +++ b/src/ocgis/test/test_real_data/test_cf_exceptions.py @@ -8,7 +8,7 @@ class Test(TestBase): def test_months_in_units(self): - rd = self.test_data_nc.get_rd('clt_month_units') + rd = self.test_data.get_rd('clt_month_units') field = rd.get() self.assertEqual(field.temporal.units,'months since 1979-1-1 0') self.assertEqual(field.temporal.value_datetime[50],datetime.datetime(1983,3,16)) @@ -16,7 +16,7 @@ def test_months_in_units(self): self.assertEqual(field.temporal.shape,(120,)) def test_months_in_units_time_range_subsets(self): - rd = self.test_data_nc.get_rd('clt_month_units') + rd = self.test_data.get_rd('clt_month_units') field = rd.get() time_range = [field.temporal.value_datetime[0], field.temporal.value_datetime[0]] ops = ocgis.OcgOperations(dataset=rd, time_range=time_range) @@ -24,7 +24,7 @@ def test_months_in_units_time_range_subsets(self): self.assertEqual((1, 1, 1, 46, 72), ret[1]['clt'].shape) def test_months_in_units_convert_to_shapefile(self): - uri = self.test_data_nc.get_uri('clt_month_units') + uri = self.test_data.get_uri('clt_month_units') variable = 'clt' ## select the month of may for two years time_region = {'month':[5],'year':[1982,1983]} @@ -38,7 +38,7 @@ def test_months_in_units_convert_to_shapefile(self): self.assertEqual(len(source),6624) def test_months_in_units_convert_to_netcdf(self): - uri = self.test_data_nc.get_uri('clt_month_units') + uri = self.test_data.get_uri('clt_month_units') variable = 'clt' rd = ocgis.RequestDataset(uri=uri,variable=variable) ## subset the clt dataset by the state of nevada and write to netcdf @@ -53,7 +53,7 @@ def test_months_in_units_convert_to_netcdf(self): self.assertNumpyAll(field.temporal.value,field2.temporal.value) def test_months_in_units_calculation(self): - rd = self.test_data_nc.get_rd('clt_month_units') + rd = self.test_data.get_rd('clt_month_units') calc = [{'func': 'mean', 'name': 'mean'}] calc_grouping = ['month'] ops = ocgis.OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping) diff --git a/src/ocgis/test/test_real_data/test_multiple_datasets.py b/src/ocgis/test/test_real_data/test_multiple_datasets.py index 07217ee77..489f2df27 100644 --- a/src/ocgis/test/test_real_data/test_multiple_datasets.py +++ b/src/ocgis/test/test_real_data/test_multiple_datasets.py @@ -16,10 +16,10 @@ class Test(TestBase): def setUp(self): TestBase.setUp(self) - self.maurer = self.test_data_nc.get_rd('maurer_bccr_1950') - self.cancm4 = self.test_data_nc.get_rd('cancm4_tasmax_2001') - self.tasmin = self.test_data_nc.get_rd('cancm4_tasmin_2001') -# self.albisccp = self.test_data_nc.get_rd('ccsm4') + self.maurer = self.test_data.get_rd('maurer_bccr_1950') + self.cancm4 = self.test_data.get_rd('cancm4_tasmax_2001') + self.tasmin = self.test_data.get_rd('cancm4_tasmin_2001') +# self.albisccp = self.test_data.get_rd('ccsm4') @property def california(self): @@ -68,7 +68,7 @@ def test_vector_wrap(self): for key in keys: prev_value = None for vector_wrap in [True,False]: - rd = self.test_data_nc.get_rd(key[0]) + rd = self.test_data.get_rd(key[0]) prefix = 'vw_{0}_{1}'.format(vector_wrap,rd.variable) ops = ocgis.OcgOperations(dataset=rd,geom=geom,snippet=False, vector_wrap=vector_wrap,prefix=prefix) @@ -152,9 +152,9 @@ def assert_projection(path,check_ugid=True): for src in source: src.close() - rd1 = self.test_data_nc.get_rd('narccap_rcm3') + rd1 = self.test_data.get_rd('narccap_rcm3') rd1.alias = 'rcm3' - rd2 = self.test_data_nc.get_rd('narccap_crcm') + rd2 = self.test_data.get_rd('narccap_crcm') rd2.alias = 'crcm' rd = [ rd1, diff --git a/src/ocgis/test/test_real_data/test_narccap.py b/src/ocgis/test/test_real_data/test_narccap.py index 257020a41..06be8a344 100644 --- a/src/ocgis/test/test_real_data/test_narccap.py +++ b/src/ocgis/test/test_real_data/test_narccap.py @@ -16,17 +16,17 @@ class TestRotatedPole(TestBase): def test_validation(self): ## CFRotatedPole is not an appropriate output crs. it may also not be ## transformed to anything but WGS84 - rd = self.test_data_nc.get_rd('narccap_rotated_pole') + rd = self.test_data.get_rd('narccap_rotated_pole') with self.assertRaises(DefinitionValidationError): OcgOperations(dataset=rd,output_crs=CFRotatedPole(grid_north_pole_latitude=5, grid_north_pole_longitude=5)) ## this is an okay output coordinate system for the two input coordinate ## systems - rd2 = self.test_data_nc.get_rd('narccap_lambert_conformal') + rd2 = self.test_data.get_rd('narccap_lambert_conformal') OcgOperations(dataset=[rd,rd2],output_crs=CFWGS84()) def test_calculation(self): - rd = self.test_data_nc.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) + rd = self.test_data.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) calc = [{'func':'mean','name':'mean'}] calc_grouping = ['month'] ops = OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, @@ -37,7 +37,7 @@ def test_calculation(self): self.assertEqual(field.shape,(1,1,1,130,155)) def test_intersects(self): - rd = self.test_data_nc.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) + rd = self.test_data.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[16]) ret = ops.execute() ref = ret.gvu(16,'tas') @@ -53,7 +53,7 @@ def test_intersects(self): [True,False,False,False,False,False,False,False,False,False,True,True,True,True,True]],dtype=bool)) def test_clip_aggregate(self): - rd = self.test_data_nc.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) + rd = self.test_data.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[16], spatial_operation='clip',aggregate=True,output_format='numpy') ## the output CRS should be automatically updated for this operation @@ -64,12 +64,12 @@ def test_clip_aggregate(self): self.assertAlmostEqual(ret.mean(),269.83058215725805) def test_read(self): - rd = self.test_data_nc.get_rd('narccap_rotated_pole') + rd = self.test_data.get_rd('narccap_rotated_pole') field = rd.get() self.assertIsInstance(field.spatial.crs,CFRotatedPole) def test_to_netcdf(self): - rd = self.test_data_nc.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) + rd = self.test_data.get_rd('narccap_rotated_pole',kwds=dict(time_region={'month':[12],'year':[1982]})) ## it does not care about slices or no geometries ops = OcgOperations(dataset=rd,output_format='nc') ret = ops.execute() @@ -77,7 +77,7 @@ def test_to_netcdf(self): self.assertEqual(rd2.get().temporal.extent,(5444.0,5474.875)) def test_to_netcdf_with_geometry(self): - rd = self.test_data_nc.get_rd('narccap_rotated_pole') + rd = self.test_data.get_rd('narccap_rotated_pole') ## this bounding box covers the entire spatial domain. the software will ## move between rotated pole and CFWGS84 using this operation. it can then ## be compared against the "null" result which just does a snippet. @@ -96,7 +96,7 @@ def test_to_netcdf_with_geometry(self): self.assertTrue(diff.max() <= 1.02734374963e-06) def test_to_netcdf_with_slice(self): - rd = self.test_data_nc.get_rd('narccap_rotated_pole') + rd = self.test_data.get_rd('narccap_rotated_pole') ops = OcgOperations(dataset=rd, output_format='nc', slice=[None,[0,10],None,[0,10],[0,10]], @@ -109,7 +109,7 @@ def test_to_netcdf_with_slice(self): class Test(TestBase): def test_cf_lambert_conformal(self): - rd = self.test_data_nc.get_rd('narccap_lambert_conformal') + rd = self.test_data.get_rd('narccap_lambert_conformal') field = rd.get() crs = field.spatial.crs self.assertDictEqual(crs.value,{'lon_0': -97, 'ellps': 'WGS84', 'y_0': 2700000, 'no_defs': True, 'proj': 'lcc', 'x_0': 3325000, 'units': 'm', 'lat_2': 60, 'lat_1': 30, 'lat_0': 47.5}) @@ -118,7 +118,7 @@ def test_cf_lambert_conformal(self): def test_read_write_projections(self): """Test NARCCAP coordinate systems may be appropriately read and written to NetCDF.""" - data_dir = os.path.join(ocgis.env.DIR_TEST_DATA, 'narccap') + data_dir = os.path.join(ocgis.env.DIR_TEST_DATA, 'nc', 'narccap') ocgis.env.DIR_DATA = data_dir ocgis.env.OVERWRITE = True diff --git a/src/ocgis/test/test_real_data/test_package.py b/src/ocgis/test/test_real_data/test_package.py index 3055c7245..6515f9d60 100644 --- a/src/ocgis/test/test_real_data/test_package.py +++ b/src/ocgis/test/test_real_data/test_package.py @@ -7,8 +7,8 @@ class Test(TestBase): def test_nc(self): - rd1 = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('cancm4_rhsmax') + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_rhsmax') rd = [rd1,rd2] for output_format in ['shp','csv','csv+','nc']: if output_format == 'nc': diff --git a/src/ocgis/test/test_real_data/test_random_datasets.py b/src/ocgis/test/test_real_data/test_random_datasets.py index 3de2db699..79d711f98 100644 --- a/src/ocgis/test/test_real_data/test_random_datasets.py +++ b/src/ocgis/test/test_real_data/test_random_datasets.py @@ -23,7 +23,7 @@ class TestCMIP3Masking(TestBase): @longrunning def test_many_request_datasets(self): - rd_base = self.test_data_nc.get_rd('subset_test_Prcp') + rd_base = self.test_data.get_rd('subset_test_Prcp') geom = [-74.0, 40.0, -72.0, 42.0] rds = [deepcopy(rd_base) for ii in range(500)] for rd in rds: @@ -33,7 +33,7 @@ def test_many_request_datasets(self): def test(self): for key in ['subset_test_Prcp','subset_test_Tavg_sresa2','subset_test_Tavg']: ## test method to return a RequestDataset - rd = self.test_data_nc.get_rd(key) + rd = self.test_data.get_rd(key) geoms = [[-74.0, 40.0, -72.0, 42.0], [-74.0, 38.0, -72.0, 40.0]] for geom in geoms: @@ -55,7 +55,7 @@ class TestCnrmCerfacs(TestBase): @property def rd(self): - return self.test_data_nc.get_rd('rotated_pole_cnrm_cerfacs') + return self.test_data.get_rd('rotated_pole_cnrm_cerfacs') def test_subset(self): """Test data may be subsetted and that coordinate transformations return the same value arrays.""" @@ -97,7 +97,7 @@ def test_cccma_rotated_pole(self): ## with rotated pole, the uid mask was not being updated correctly following ## a transformation back to rotated pole. this needed to be updated explicitly ## in subset.py - rd = self.test_data_nc.get_rd('rotated_pole_cccma') + rd = self.test_data.get_rd('rotated_pole_cccma') geom = (5.87161922454834, 47.26985931396479, 15.03811264038086, 55.05652618408209) ops = ocgis.OcgOperations(dataset=rd,output_format='shp',geom=geom, select_ugid=[1],snippet=True) @@ -112,7 +112,7 @@ def test_cccma_rotated_pole(self): def test_ichec_rotated_pole(self): ## this point is far outside the domain ocgis.env.OVERWRITE = True - rd = self.test_data_nc.get_rd('rotated_pole_ichec') + rd = self.test_data.get_rd('rotated_pole_ichec') for geom in [[-100.,45.],[-100,45,-99,46]]: ops = ocgis.OcgOperations(dataset=rd,output_format='nc', calc=[{'func':'mean','name':'mean'}], @@ -122,8 +122,8 @@ def test_ichec_rotated_pole(self): ops.execute() def test_narccap_cancm4_point_subset_no_abstraction(self): - rd = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('narccap_tas_rcm3_gfdl') + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('narccap_tas_rcm3_gfdl') rd.alias = 'tas_narccap' rds = [rd,rd2] geom = [-105.2751,39.9782] @@ -134,8 +134,8 @@ def test_narccap_cancm4_point_subset_no_abstraction(self): ops.execute() def test_narccap_cancm4_point_subset_with_abstraction(self): - rd = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('narccap_tas_rcm3_gfdl') + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('narccap_tas_rcm3_gfdl') rd2.alias = 'tas_narccap' rds = [ rd, @@ -157,8 +157,8 @@ def test_narccap_cancm4_point_subset_with_abstraction(self): self.assertTrue(ret.geoms[1].area > ret.geoms[2].area) def test_narccap_cancm4_point_subset_with_abstraction_to_csv_shp(self): - rd = self.test_data_nc.get_rd('cancm4_tas') - rd2 = self.test_data_nc.get_rd('narccap_tas_rcm3_gfdl') + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('narccap_tas_rcm3_gfdl') rd.alias = 'tas_narccap' rds = [ rd, @@ -175,7 +175,7 @@ def test_narccap_cancm4_point_subset_with_abstraction_to_csv_shp(self): self.assertEqual(set([row['properties']['UGID'] for row in rows]),set([1,2])) def test_collection_field_geometries_equivalent(self): - rd = self.test_data_nc.get_rd('cancm4_tas',kwds=dict(time_region={'month':[6,7,8]})) + rd = self.test_data.get_rd('cancm4_tas',kwds=dict(time_region={'month':[6,7,8]})) geom = ['state_boundaries',[{'properties':{'UGID':16},'geom':Point([-99.80780059778753,41.52315831343389])}]] for vw,g in itertools.product([True,False],geom): ops = ocgis.OcgOperations(dataset=rd,select_ugid=[16,32],geom=g, @@ -189,7 +189,7 @@ def test_collection_field_geometries_equivalent(self): def test_empty_subset_multi_geometry_wrapping(self): ## adjacent state boundaries were causing an error with wrapping where ## a reference to the source field was being updated. - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[5,6,7]) ret = ops.execute() self.assertEqual(set(ret.keys()),set([5,6,7])) @@ -197,7 +197,7 @@ def test_empty_subset_multi_geometry_wrapping(self): def test_seasonal_calc(self): calc = [{'func':'mean','name':'my_mean'},{'func':'std','name':'my_std'}] calc_grouping = [[3,4,5]] - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, calc_sample_size=True,geom='state_boundaries', select_ugid=[23]) @@ -207,7 +207,7 @@ def test_seasonal_calc(self): calc = [{'func':'mean','name':'my_mean'},{'func':'std','name':'my_std'}] calc_grouping = [[12,1,2],[3,4,5],[6,7,8],[9,10,11]] - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, calc_sample_size=True,geom='state_boundaries', select_ugid=[23]) @@ -217,7 +217,7 @@ def test_seasonal_calc(self): calc = [{'func':'mean','name':'my_mean'},{'func':'std','name':'my_std'}] calc_grouping = [[12,1],[2,3]] - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, calc_sample_size=True,geom='state_boundaries', select_ugid=[23]) @@ -229,7 +229,7 @@ def test_seasonal_calc_dkp(self): key = 'dynamic_kernel_percentile_threshold' calc = [{'func':key,'name':'dkp','kwds':{'operation':'lt','percentile':90,'width':5}}] calc_grouping = [[3,4,5]] - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, calc_sample_size=False,geom='state_boundaries', select_ugid=[23]) @@ -241,7 +241,7 @@ def test_seasonal_calc_dkp(self): self.assertNumpyAll(to_test,reference) def test_selecting_single_value(self): - rd = self.test_data_nc.get_rd('cancm4_tas') + rd = self.test_data.get_rd('cancm4_tas') lat_index = 32 lon_index = 97 with nc_scope(rd.uri) as ds: @@ -297,52 +297,26 @@ def test_value_conversion(self): ops.execute() def test_qed_multifile(self): - ddir = os.path.join(ocgis.env.DIR_TEST_DATA, 'QED-2013', 'multifile') - variable = 'txxmmedm' - ocgis.env.DIR_DATA = ddir + """Test concatenating three single time slice climatological files.""" - uri = ['maurer02v2_median_txxmmedm_january_1971-2000.nc', - 'maurer02v2_median_txxmmedm_february_1971-2000.nc', - 'maurer02v2_median_txxmmedm_march_1971-2000.nc'] - - rd = ocgis.RequestDataset(uri,variable) + key = ['qed_2013_maurer02v2_median_txxmmedm_january_1971-2000', + 'qed_2013_maurer02v2_median_txxmmedm_february_1971-2000', + 'qed_2013_maurer02v2_median_txxmmedm_march_1971-2000'] + uri = [self.test_data.get_uri(k) for k in key] + rd = ocgis.RequestDataset(uri=uri, variable='txxmmedm') field = rd.get() + self.assertEqual(field.shape, (1, 3, 1, 222, 462)) @longrunning def test_maurer_concatenated_shp(self): """Test Maurer concatenated data may be appropriately subsetted.""" - ocgis.env.DIR_DATA = '/usr/local/climate_data/maurer/2010-concatenated' - ocgis.env.DIR_DATA = os.path.join(ocgis.env.DIR_TEST_DATA, 'maurer', '2010-concatenated') - # ocgis.env.VERBOSE = True - # ocgis.env.DEBUG = True - - names = [ - # [u'Maurer02new_OBS_dtr_daily.1971-2000.nc'], - [u'Maurer02new_OBS_tas_daily.1971-2000.nc'], - [u'Maurer02new_OBS_tasmin_daily.1971-2000.nc'], - [u'Maurer02new_OBS_pr_daily.1971-2000.nc'], - [u'Maurer02new_OBS_tasmax_daily.1971-2000.nc'] - ] - variables = [ - # u'dtr', - u'tas', - u'tasmin', - u'pr', - u'tasmax' - ] - # time_range = [datetime.datetime(1971, 1, 1, 0, 0),datetime.datetime(2000, 12, 31, 0, 0)] - - # rd = RequestDataset(uri=names[0], variable='tas') - # field = rd.get() - # # ops = OcgOperations(dataset=rd, output_format='shp', snippet=True) - # # print ops.execute() - # import ipdb;ipdb.set_trace() - - time_range = None + variables = [u'tas', u'tasmin', u'pr', u'tasmax'] + key_template = 'maurer_2010_concatenated_{0}' + keys = [key_template.format(v) for v in variables] + time_region = {'month': [6, 7, 8], 'year': None} - rds = [ocgis.RequestDataset(name, variable, time_range=time_range, - time_region=time_region) for name, variable in zip(names, variables)] + rds = [self.test_data.get_rd(key, kwds={'time_region': time_region, 'time_range': None}) for key in keys] ops = ocgis.OcgOperations(dataset=rds, calc=[{'name': 'Standard Deviation', 'func': 'std', 'kwds': {}}], calc_grouping=['month'], calc_raw=False, geom='us_counties', select_ugid=[286], @@ -358,42 +332,43 @@ def test_maurer_concatenated_shp(self): self.assertEqual(variables, set([u'pr', u'tasmax', u'tasmin', u'tas'])) def test_point_shapefile_subset(self): - _output_format = ['numpy','nc','csv','csv+'] + """Test subsetting using a point shapefile.""" + + _output_format = ['numpy', 'nc', 'csv', 'csv+'] for output_format in _output_format: - rd = self.test_data_nc.get_rd('cancm4_tas') - ops = OcgOperations(dataset=rd,geom='qed_city_centroids',output_format=output_format, + rd = self.test_data.get_rd('cancm4_tas') + ops = OcgOperations(dataset=rd, geom='qed_city_centroids', output_format=output_format, prefix=output_format) ret = ops.execute() if output_format == 'numpy': - self.assertEqual(len(ret),4) + self.assertEqual(len(ret), 4) @longrunning def test_maurer_concatenated_tasmax_region(self): - ocgis.env.DIR_DATA = os.path.join(ocgis.env.DIR_TEST_DATA, 'maurer', '2010-concatenated') - filename = 'Maurer02new_OBS_tasmax_daily.1971-2000.nc' - variable = 'tasmax' -# ocgis.env.VERBOSE = True - - rd = ocgis.RequestDataset(filename,variable) - ops = ocgis.OcgOperations(dataset=rd,geom='us_counties',select_ugid=[2778], + rd = self.test_data.get_rd('maurer_2010_concatenated_tasmax') + ops = ocgis.OcgOperations(dataset=rd, geom='us_counties', select_ugid=[2778], output_format='numpy') ret = ops.execute() ref = ret[2778]['tasmax'] years = np.array([dt.year for dt in ret[2778]['tasmax'].temporal.value_datetime]) months = np.array([dt.month for dt in ret[2778]['tasmax'].temporal.value_datetime]) - select = np.array([dt.month in (6,7,8) and dt.year in (1990,1991,1992,1993,1994,1995,1996,1997,1998,1999) for dt in ret[2778]['tasmax'].temporal.value_datetime]) - time_subset = ret[2778]['tasmax'].variables['tasmax'].value[:,select,:,:,:] + select = np.array( + [dt.month in (6, 7, 8) and dt.year in (1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999) for dt in + ret[2778]['tasmax'].temporal.value_datetime]) + time_subset = ret[2778]['tasmax'].variables['tasmax'].value[:, select, :, :, :] time_values = ref.temporal.value[select] - - rd = ocgis.RequestDataset(filename,variable,time_region={'month':[6,7,8],'year':[1990,1991,1992,1993,1994,1995,1996,1997,1998,1999]}) - ops = ocgis.OcgOperations(dataset=rd,geom='us_counties',select_ugid=[2778], + + kwds = { + 'time_region': {'month': [6, 7, 8], 'year': [1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999]}} + rd = self.test_data.get_rd('maurer_2010_concatenated_tasmax', kwds=kwds) + ops = ocgis.OcgOperations(dataset=rd, geom='us_counties', select_ugid=[2778], output_format='numpy') ret2 = ops.execute() ref2 = ret2[2778]['tasmax'] - - self.assertEqual(time_values.shape,ref2.temporal.shape) - self.assertEqual(time_subset.shape,ref2.variables['tasmax'].value.shape) - self.assertNumpyAll(time_subset,ref2.variables['tasmax'].value) + + self.assertEqual(time_values.shape, ref2.temporal.shape) + self.assertEqual(time_subset.shape, ref2.variables['tasmax'].value.shape) + self.assertNumpyAll(time_subset, ref2.variables['tasmax'].value) self.assertFalse(np.any(ref2.variables['tasmax'].value < 0)) def test_time_region_subset(self): @@ -402,7 +377,7 @@ def test_time_region_subset(self): _year = [[2011],None,[2012],[2011,2013]] def run_test(month,year): - rd = self.test_data_nc.get_rd('cancm4_rhs',kwds={'time_region':{'month':month,'year':year}}) + rd = self.test_data.get_rd('cancm4_rhs',kwds={'time_region':{'month':month,'year':year}}) ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries', select_ugid=[25]) @@ -425,7 +400,7 @@ def test_time_range_time_region_subset(self): time_range = [dt(2013,1,1),dt(2015,12,31)] time_region = {'month':[6,7,8],'year':[2013,2014]} kwds = {'time_range':time_range,'time_region':time_region} - rd = self.test_data_nc.get_rd('cancm4_rhs',kwds=kwds) + rd = self.test_data.get_rd('cancm4_rhs',kwds=kwds) ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[25]) ret = ops.execute() ref = ret[25]['rhs'] @@ -437,7 +412,7 @@ def test_time_range_time_region_do_not_overlap(self): time_region = {'month':[6,7,8],'year':[2013,2014,2018]} kwds = {'time_range':time_range,'time_region':time_region} with self.assertRaises(RequestValidationError): - self.test_data_nc.get_rd('cancm4_rhs',kwds=kwds) + self.test_data.get_rd('cancm4_rhs',kwds=kwds) @longrunning def test_maurer_2010(self): @@ -446,7 +421,7 @@ def test_maurer_2010(self): calc = [{'func':'mean','name':'mean'},{'func':'median','name':'median'}] calc_grouping = ['month'] for key in keys: - rd = self.test_data_nc.get_rd(key) + rd = self.test_data.get_rd(key) dct = rd.inspect_as_dct() self.assertEqual(dct['derived']['Count'],'102564') @@ -471,14 +446,14 @@ def test_maurer_2010(self): def test_clip_aggregate(self): ## this geometry was hanging - rd = self.test_data_nc.get_rd('cancm4_tas',kwds={'time_region':{'year':[2003]}}) + rd = self.test_data.get_rd('cancm4_tas',kwds={'time_region':{'year':[2003]}}) ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[14,16], aggregate=False,spatial_operation='clip',output_format='csv+') ret = ops.execute() @longrunning def test_narccap_point_subset_small(self): - rd = self.test_data_nc.get_rd('narccap_pr_wrfg_ncep') + rd = self.test_data.get_rd('narccap_pr_wrfg_ncep') geom = [-97.74278,30.26694] # ocgis.env.VERBOSE = True # ocgis.env.DEBUG = True @@ -496,76 +471,64 @@ def test_narccap_point_subset_small(self): self.assertEqual(set(ref.variables.keys()),set(['mean', 'median', 'max', 'min'])) def test_bad_time_dimension(self): - ocgis.env.DIR_DATA = ocgis.env.DIR_TEST_DATA - uri = 'seasonalbias.nc' - variable = 'bias' - for output_format in [ - 'numpy', - 'csv', - 'csv+','shp', - 'nc' - ]: - - dataset = RequestDataset(uri=uri,variable=variable) - ops = OcgOperations(dataset=dataset,output_format=output_format, - format_time=False,prefix=output_format) + """Test not formatting the time dimension.""" + + for output_format in ['numpy', 'csv', 'csv+', 'shp', 'nc']: + + dataset = self.test_data.get_rd('snippet_seasonalbias') + ops = OcgOperations(dataset=dataset, output_format=output_format, format_time=False, prefix=output_format) ret = ops.execute() - + if output_format == 'numpy': self.assertNumpyAll(ret[1]['bias'].temporal.value, - np.array([-712208.5,-712117. ,-712025. ,-711933.5])) + np.array([-712208.5, -712117., -712025., -711933.5])) self.assertNumpyAll(ret[1]['bias'].temporal.bounds, - np.array([[-712254.,-712163.],[-712163.,-712071.],[-712071.,-711979.],[-711979.,-711888.]])) - + np.array([[-712254., -712163.], [-712163., -712071.], [-712071., -711979.], + [-711979., -711888.]])) + if output_format == 'csv': with open(ret) as f: reader = DictReader(f) for row in reader: - self.assertTrue(all([row[k] == '' for k in ['YEAR','MONTH','DAY']])) + self.assertTrue(all([row[k] == '' for k in ['YEAR', 'MONTH', 'DAY']])) self.assertTrue(float(row['TIME']) < -50000) - + if output_format == 'nc': - self.assertNcEqual(dataset.uri,ret,check_types=False,ignore_attributes={'global': ['history']}) + self.assertNcEqual(dataset.uri, ret, check_types=False, ignore_attributes={'global': ['history']}) def test_time_region_climatology(self): - ocgis.env.DIR_DATA = ocgis.env.DIR_TEST_DATA - - uri = 'climatology_TNn_monthly_max.nc' - variable = 'climatology_TNn_monthly_max' - rd = ocgis.RequestDataset(uri,variable,time_region={'year':[1989],'month':[6]}) - ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[16]) + """Test for reading metadata from QED 2013 climate data files.""" + + rd = self.test_data.get_rd('qed_2013_TNn_monthly_max', kwds={'time_region': {'year': [1989], 'month': [6]}}) + ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[16]) ret = ops.execute() ref = ret[16]['climatology_TNn_monthly_max'] - self.assertEqual(set([6]),set([dt.month for dt in ref.temporal.value_datetime])) - self.assertNumpyAll(np.array([[ 151., 10774.]]),ref.temporal.bounds) - - uri = 'climatology_TNn_monthly_max.nc' - variable = 'climatology_TNn_monthly_max' - rd = ocgis.RequestDataset(uri,variable,time_region={'year':None,'month':[6]}) - ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[16]) + self.assertEqual(set([6]), set([dt.month for dt in ref.temporal.value_datetime])) + self.assertNumpyAll(np.array([[151., 10774.]]), ref.temporal.bounds) + + rd = self.test_data.get_rd('qed_2013_TNn_monthly_max', kwds={'time_region': {'year': None, 'month': [6]}}) + ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[16]) ret = ops.execute() ref = ret[16]['climatology_TNn_monthly_max'] - self.assertEqual(set([6]),set([dt.month for dt in ref.temporal.value_datetime])) - - rd = ocgis.RequestDataset('climatology_TNn_annual_min.nc','climatology_TNn_annual_min') - ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[16]) + self.assertEqual(set([6]), set([dt.month for dt in ref.temporal.value_datetime])) + + rd = self.test_data.get_rd('qed_2013_TNn_annual_min') + ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[16]) ret = ops.execute() ref = ret[16]['climatology_TNn_annual_min'] - - rd = ocgis.RequestDataset('climatology_TasMin_seasonal_max_of_seasonal_means.nc','climatology_TasMin_seasonal_max_of_seasonal_means')#,time_region={'year':[1989]}) - ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[16]) + + rd = self.test_data.get_rd('qed_2013_TasMin_seasonal_max_of_seasonal_means') + ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[16]) ret = ops.execute() ref = ret[16]['climatology_TasMin_seasonal_max_of_seasonal_means'] - - uri = 'climatology_Tas_annual_max_of_annual_means.nc' - variable = 'climatology_Tas_annual_max_of_annual_means' - rd = ocgis.RequestDataset(uri,variable) - ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[16]) + + rd = self.test_data.get_rd('qed_2013_climatology_Tas_annual_max_of_annual_means') + ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[16]) ret = ops.execute() - ref = ret[16][variable] + ref = ret[16]['climatology_Tas_annual_max_of_annual_means'] def test_mfdataset_to_nc(self): - rd = self.test_data_nc.get_rd('maurer_2010_pr') + rd = self.test_data.get_rd('maurer_2010_pr') ops = OcgOperations(dataset=rd,output_format='nc',calc=[{'func':'mean','name':'my_mean'}], calc_grouping=['year'],geom='state_boundaries',select_ugid=[23]) ret = ops.execute() diff --git a/src/ocgis/util/environment.py b/src/ocgis/util/environment.py index 1c4e6105d..fed21b7fe 100644 --- a/src/ocgis/util/environment.py +++ b/src/ocgis/util/environment.py @@ -20,6 +20,7 @@ def __init__(self): self.DIR_BIN = EnvParm('DIR_BIN',None) self.USE_SPATIAL_INDEX = EnvParmImport('USE_SPATIAL_INDEX',None,'rtree') self.USE_CFUNITS = EnvParmImport('USE_CFUNITS',None,'cfunits') + self.CONF_PATH = EnvParm('CONF_PATH', os.path.expanduser('~/.config/ocgis.conf')) self.ops = None self._optimize_store = {} From decd019224042769dac8a570042abb7adc27b02a Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Tue, 4 Nov 2014 17:36:57 -0700 Subject: [PATCH 10/71] 1. Removed raised exceptions with polygon.value and grid.corners. They now return None to be consistent with other spatial objects. 2. Added new functionality for extrapolating corners from 2-d grids. 3. Added new wrapped state code and comparison functions. --- doc/utility.rst | 2 +- src/ocgis/api/subset.py | 15 +- src/ocgis/constants.py | 9 +- src/ocgis/exc.py | 16 +- src/ocgis/interface/base/crs.py | 130 ++++++-- src/ocgis/interface/base/dimension/base.py | 11 +- src/ocgis/interface/base/dimension/spatial.py | 313 ++++++++++-------- src/ocgis/interface/base/field.py | 1 - src/ocgis/regrid/base.py | 20 +- .../test_request/test_driver/test_nc.py | 7 +- .../test/test_ocgis/test_api/test_subset.py | 20 +- .../test_interface/test_base/test_crs.py | 71 +++- .../test_base/test_dimension/test_base.py | 91 ++--- .../test_base/test_dimension/test_spatial.py | 291 ++++++++++------ .../test/test_ocgis/test_regrid/test_base.py | 20 +- .../test_ocgis/test_util/test_environment.py | 3 + .../test/test_ocgis/test_util/test_helpers.py | 193 ++++++++++- .../test_spatial/test_spatial_subset.py | 14 +- src/ocgis/test/test_simple/test_simple.py | 9 +- src/ocgis/util/environment.py | 3 + src/ocgis/util/helpers.py | 198 +++++++++-- 21 files changed, 1024 insertions(+), 413 deletions(-) diff --git a/doc/utility.rst b/doc/utility.rst index 2e2b76009..b3fd6c94a 100644 --- a/doc/utility.rst +++ b/doc/utility.rst @@ -6,7 +6,7 @@ Utility Functions :members: format_return .. automodule:: ocgis.util.helpers - :members: get_interpolated_bounds, get_sorted_uris_by_time_dimension + :members: get_bounds_from_1d, get_sorted_uris_by_time_dimension .. automodule:: ocgis.util.large_array :members: compute diff --git a/src/ocgis/api/subset.py b/src/ocgis/api/subset.py index b682dc97a..310d11919 100644 --- a/src/ocgis/api/subset.py +++ b/src/ocgis/api/subset.py @@ -1,7 +1,6 @@ from ocgis.calc.engine import OcgCalculationEngine from ocgis import env, constants -from ocgis.exc import EmptyData, ExtentError, MaskedDataError, EmptySubsetError, ImproperPolygonBoundsError, \ - VariableInCollectionError +from ocgis.exc import EmptyData, ExtentError, MaskedDataError, EmptySubsetError, VariableInCollectionError from ocgis.interface.base.field import Field from ocgis.util.logging_ocgis import ocgis_lh, ProgressOcgOperations import logging @@ -287,14 +286,10 @@ def _assert_abstraction_available_(self, field): """ if self.ops.abstraction is not None: - try: - getattr(field.spatial.geom, self.ops.abstraction) - except ImproperPolygonBoundsError: - msg = 'A "polygon" spatial abstraction is not available without the presence of bounds.' - exc = ImproperPolygonBoundsError(msg) - ocgis_lh(exc=exc, logger='subset') - except Exception as e: - ocgis_lh(exc=e, logger='subset') + attr = getattr(field.spatial.geom, self.ops.abstraction) + if attr is None: + msg = 'A "{0}" spatial abstraction is not available.'.format(self.ops.abstraction) + ocgis_lh(exc=ValueError(msg), logger='subset') def _get_slice_or_snippet_(self, field): """ diff --git a/src/ocgis/constants.py b/src/ocgis/constants.py index 35c41848b..17cb0e1e8 100644 --- a/src/ocgis/constants.py +++ b/src/ocgis/constants.py @@ -1,8 +1,6 @@ import numpy as np #: Standard bounds name used when none is available from the input data. -from ocgis.interface.base.crs import CFWGS84 - ocgis_bounds = 'bounds' #: Default netCDF4 output file type @@ -47,11 +45,8 @@ #: NumPy functions enabled for functions evaluated from string representations. enabled_numpy_ufuncs = ['exp','log','abs'] -#: The default coordinate system to use if none is provided. -default_coordinate_system = CFWGS84() - -#: The value for the prime meridian to use when wrapping. -prime_meridian = 179.9999999999999 +#: The value for the 180th meridian to use when wrapping. +meridian_180th = 179.9999999999999 test_run_long_tests = False diff --git a/src/ocgis/exc.py b/src/ocgis/exc.py index 4ed0792ec..04be223ef 100644 --- a/src/ocgis/exc.py +++ b/src/ocgis/exc.py @@ -25,6 +25,18 @@ def __str__(self): return msg +class ShapeError(OcgException): + """ + Raised when an array has an incompatible shape with an operation. + """ + + +class SingleElementError(ShapeError): + """ + Raised when an operation requires more than a single element. + """ + + class CalculationException(OcgException): def __init__(self, function_klass, message=None): @@ -150,7 +162,7 @@ class SpatialWrappingError(OcgException): pass -class ImproperPolygonBoundsError(OcgException): +class _ImproperPolygonBoundsError(OcgException): pass @@ -272,7 +284,7 @@ def __str__(self): return message -class CornersUnavailable(OcgException): +class _CornersUnavailable(OcgException): """Raised when grid corners may not be constructed.""" pass \ No newline at end of file diff --git a/src/ocgis/interface/base/crs.py b/src/ocgis/interface/base/crs.py index 42d6a4008..f48fa95cf 100644 --- a/src/ocgis/interface/base/crs.py +++ b/src/ocgis/interface/base/crs.py @@ -4,9 +4,12 @@ from osgeo.osr import SpatialReference from fiona.crs import from_string, to_string import numpy as np +from shapely.geometry import Point, Polygon +from shapely.geometry.base import BaseMultipartGeometry +from ocgis import constants from ocgis.util.logging_ocgis import ocgis_lh from ocgis.exc import SpatialWrappingError, ProjectionCoordinateNotFound,\ - ProjectionDoesNotMatch, ImproperPolygonBoundsError, CornersUnavailable + ProjectionDoesNotMatch from ocgis.util.spatial.wrap import Wrapper from ocgis.util.helpers import iter_array from shapely.geometry.multipolygon import MultiPolygon @@ -82,7 +85,52 @@ def sr(self): class WrappableCoordinateReferenceSystem(object): - """Mean to be used in mixin class that can be wrapped.""" + """Meant to be used in mixin classes for coordinate systems that can be wrapped.""" + + _flag_wrapped = 'wrapped' + _flag_unwrapped = 'unwrapped' + _flag_unknown = 'unknown' + + _flag_action_wrap = 'wrap' + _flag_action_unwrap = 'unwrap' + + @classmethod + def get_wrap_action(cls, state_src, state_dst): + """ + :param str state_src: The wrapped state of the source dataset. + :param str state_dst: The wrapped state of the destination dataset. + :returns: The wrapping action to perform on ``state_src``. + :rtype: str + :raises: NotImplementedError, ValueError + """ + + possible = [cls._flag_wrapped, cls._flag_unwrapped, cls._flag_unknown] + has_issue = None + if state_src not in possible: + has_issue = 'source' + if state_dst not in possible: + has_issue = 'destination' + if has_issue is not None: + msg = 'The wrapped state on "{0}" is not recognized.'.format(has_issue) + raise ValueError(msg) + + # the default action is to do nothing. + ret = None + # if the wrapped state of the destination is unknown, then there is no appropriate wrapping action suitable for + # the source. + if state_dst == cls._flag_unknown: + ret = None + # if the destination is wrapped and src is unwrapped, then wrap the src. + elif state_dst == cls._flag_wrapped: + if state_src == cls._flag_unwrapped: + ret = cls._flag_action_wrap + # if the destination is unwrapped and the src is wrapped, the source needs to be unwrapped. + elif state_dst == cls._flag_unwrapped: + if state_src == cls._flag_wrapped: + ret = cls._flag_action_unwrap + else: + raise NotImplementedError(state_dst) + return ret @classmethod def get_is_360(cls, spatial): @@ -104,17 +152,17 @@ def get_is_360(cls, spatial): # column dimension is likely missing try: if spatial.grid.col is None: - try: + if spatial.grid.corners is not None: check = spatial.grid.corners[1] - except CornersUnavailable: + else: check = spatial.grid.value[1, :, :] else: ocgis_lh(exc=e) except AttributeError: # there may be no grid, access the geometries directly - try: + if spatial.geom.polygon is not None: geoms_to_check = spatial.geom.polygon.value - except ImproperPolygonBoundsError: + else: geoms_to_check = spatial.geom.point.value geoms_to_check = geoms_to_check.compressed() @@ -171,11 +219,9 @@ def unwrap(self, spatial): ref[select] += 360 # attempt to to unwrap the grid corners if they exist - try: + if spatial.grid.corners is not None: select = spatial.grid.corners[1] < 0 spatial.grid.corners[1][select] += 360 - except CornersUnavailable: - pass else: ocgis_lh(exc=SpatialWrappingError('Data already has a 0 to 360 coordinate system.')) @@ -229,7 +275,7 @@ def wrap(self,spatial): ref[select] -= 360 # attempt to wrap the grid corners if they exist - try: + if spatial.grid.corners is not None: ref = spatial.grid.corners.data if bounds_cross_meridian: spatial.grid._corners = None @@ -244,24 +290,70 @@ def wrap(self,spatial): else: select = ref[1] > 180 ref[1][select] -= 360 - except CornersUnavailable: - pass else: ocgis_lh(exc=SpatialWrappingError('Data does not have a 0 to 360 coordinate system.')) - def _get_to_wrap_(self,spatial): + @staticmethod + def _get_to_wrap_(spatial): ret = [] ret.append(spatial.geom.point) - try: + if spatial.geom.polygon is not None: ret.append(spatial.geom.polygon) - except ImproperPolygonBoundsError: - pass - return(ret) + return ret + + @classmethod + def _get_wrapped_state_from_array_(cls, arr): + """ + :param arr: Input n-dimensional array. + :type arr: :class:`numpy.ndarray` + :returns: A string flag. See class level ``_flag_*`` attributes for values. + :rtype: str + """ + + gt_m180 = arr > constants.meridian_180th + lt_pm = arr < 0 + + if np.any(lt_pm): + ret = cls._flag_wrapped + elif np.any(gt_m180): + ret = cls._flag_unwrapped + else: + ret = cls._flag_unknown + + return ret + + @classmethod + def _get_wrapped_state_from_geometry_(cls, geom): + """ + :param geom: The input geometry. + :type geom: :class:`~shapely.geometry.point.Point`, :class:`~shapely.geometry.point.Polygon`, + :class:`~shapely.geometry.multipoint.MultiPoint`, :class:`~shapely.geometry.multipolygon.MultiPolygon` + :returns: A string flag. See class level ``_flag_*`` attributes for values. + :rtype: str + :raises: NotImplementedError + """ + + if isinstance(geom, BaseMultipartGeometry): + itr = geom + else: + itr = [geom] + + app = np.array([]) + for element in itr: + if isinstance(element, Point): + element_arr = [np.array(element)[0]] + elif isinstance(element, Polygon): + element_arr = np.array(element.exterior.coords)[:, 0] + else: + raise NotImplementedError(type(element)) + app = np.append(app, element_arr) + + return cls._get_wrapped_state_from_array_(app) @staticmethod def _place_prime_meridian_array_(arr): """ - Replace any 180 degree values with the value of :attribute:`ocgis.constants.prime_meridian`. + Replace any 180 degree values with the value of :attribute:`ocgis.constants.meridian_180th`. :param arr: The target array to modify inplace. :type arr: :class:`numpy.array` @@ -272,7 +364,7 @@ def _place_prime_meridian_array_(arr): # find the values that are 180 select = arr == 180 # replace the values that are 180 with the constant value - np.place(arr, select, constants.prime_meridian) + np.place(arr, select, constants.meridian_180th) # return the mask used for the replacement return select diff --git a/src/ocgis/interface/base/dimension/base.py b/src/ocgis/interface/base/dimension/base.py index f094aa7f8..7cac238cd 100644 --- a/src/ocgis/interface/base/dimension/base.py +++ b/src/ocgis/interface/base/dimension/base.py @@ -2,7 +2,7 @@ import numpy as np from ocgis import constants from ocgis.util.helpers import get_none_or_1d, get_none_or_2d, get_none_or_slice,\ - get_formatted_slice, assert_raise, get_interpolated_bounds + get_formatted_slice, assert_raise, get_bounds_from_1d from copy import copy, deepcopy from ocgis.exc import EmptySubsetError, ResolutionError from operator import mul @@ -167,7 +167,7 @@ def bounds(self): # if the bounds are None, check if an attempt should be made to interpolate bounds from the value itself. if self._interpolate_bounds and self._bounds is None: - self._bounds = get_interpolated_bounds(self.value) + self.set_extrapolated_bounds() self._has_interpolated_bounds = True # if no error is encountered, then the bounds should have been set during loading from source. simply return the @@ -329,7 +329,12 @@ def get_iter(self): yld.update({ref_name_bounds_lower:None, ref_name_bounds_upper:None}) yield(ii,yld) - + + def set_extrapolated_bounds(self): + """Set the bounds variable using extrapolation.""" + + self.bounds = get_bounds_from_1d(self.value) + def _format_private_value_(self,value): return(self._get_none_or_array_(value,masked=False)) diff --git a/src/ocgis/interface/base/dimension/spatial.py b/src/ocgis/interface/base/dimension/spatial.py index 4d63fed65..77c7b516b 100644 --- a/src/ocgis/interface/base/dimension/spatial.py +++ b/src/ocgis/interface/base/dimension/spatial.py @@ -7,15 +7,14 @@ get_formatted_slice, get_reduced_slice, get_trimmed_array_by_mask,\ get_added_slice, make_poly from shapely.geometry.point import Point -from ocgis import constants +from ocgis import constants, env import itertools from shapely.geometry.polygon import Polygon from copy import copy from shapely.prepared import prep from shapely.geometry.multipoint import MultiPoint from shapely.geometry.multipolygon import MultiPolygon -from ocgis.exc import ImproperPolygonBoundsError, EmptySubsetError, SpatialWrappingError, MultipleElementsFound, \ - CornersUnavailable +from ocgis.exc import EmptySubsetError, SpatialWrappingError, MultipleElementsFound from osgeo.ogr import CreateGeometryFromWkb, Geometry, wkbGeometryCollection, wkbPoint from shapely import wkb import fiona @@ -78,11 +77,10 @@ class SpatialDimension(base.AbstractUidDimension): _axis = 'SPATIAL' _attrs_slice = ('uid','grid','_geom') - def __init__(self, *args, **kwds): - self.grid = kwds.pop('grid', None) - self.crs = kwds.pop('crs', None) - self.abstraction = kwds.pop('abstraction', 'polygon') - self._geom = kwds.pop('geom', None) + def __init__(self, *args, **kwargs): + self.grid = kwargs.pop('grid', None) + self.crs = kwargs.pop('crs', None) + self._geom = kwargs.pop('geom', None) # convert the input crs to CFWGS84 if they are equivalent if self.crs == CFWGS84(): @@ -90,12 +88,12 @@ def __init__(self, *args, **kwds): # remove row and col dimension keywords if they are present. we do not want to pass them to the superclass # constructor. - row = kwds.pop('row', None) - col = kwds.pop('col', None) + row = kwargs.pop('row', None) + col = kwargs.pop('col', None) ## attempt to build the geometry dimension - point = kwds.pop('point', None) - polygon = kwds.pop('polygon', None) + point = kwargs.pop('point', None) + polygon = kwargs.pop('polygon', None) geom_kwds = dict(point=point, polygon=polygon) if any([g != None for g in geom_kwds.values()]): self._geom = SpatialGeometryDimension(**geom_kwds) @@ -104,17 +102,23 @@ def __init__(self, *args, **kwds): if self._grid is None and self._geom is None: self.grid = SpatialGridDimension(row=row, col=col) - super(SpatialDimension, self).__init__(*args, **kwds) + self._abstraction = kwargs.pop('abstraction', None) + self.abstraction = self._abstraction + + super(SpatialDimension, self).__init__(*args, **kwargs) + + @property + def abstraction(self): + return self.geom.abstraction + + @abstraction.setter + def abstraction(self, value): + self._abstraction = value + self.geom.abstraction = value - assert self.abstraction in ('point', 'polygon', None) - @property def abstraction_geometry(self): - if self.abstraction is None: - ret = self.geom.get_highest_order_abstraction() - else: - ret = getattr(self.geom, self.abstraction) - return ret + return self.geom.get_highest_order_abstraction() @property def geom(self): @@ -123,7 +127,7 @@ def geom(self): msg = 'At least a grid is required to construct a geometry dimension.' raise ValueError(msg) else: - self._geom = SpatialGeometryDimension(grid=self.grid, uid=self.grid.uid) + self._geom = SpatialGeometryDimension(grid=self.grid, uid=self.grid.uid, abstraction=self._abstraction) return self._geom @property @@ -165,10 +169,10 @@ def weights(self): if self.geom is None: ret = self.grid.weights else: - try: - ret = self.geom.polygon.weights - except ImproperPolygonBoundsError: + if self.geom.polygon is None: ret = self.geom.point.weights + else: + ret = self.geom.polygon.weights return ret def assert_uniform_mask(self): @@ -199,14 +203,14 @@ def from_records(cls, records, crs=None): :param records: A sequence of records returned from an Fiona file object. :type records: sequence - :param crs: If ``None``, default to :attr:`~ocgis.constants.default_coordinate_system`. + :param crs: If ``None``, default to :attr:`~ocgis.env.DEFAULT_COORDSYS`. :type crs: dict or :class:`ocgis.interface.base.crs.CoordinateReferenceSystem` :rtype: :class:`ocgis.interface.base.dimension.SpatialDimension` """ if not isinstance(crs, CoordinateReferenceSystem): # if there is no crs dictionary passed, assume WGS84 - crs = crs or constants.default_coordinate_system.value + crs = crs or env.DEFAULT_COORDSYS.value crs = CoordinateReferenceSystem(value=crs) # these are mappings used to construct the SpatialDimension @@ -287,25 +291,25 @@ def from_records(cls, records, crs=None): sdim = SpatialDimension(geom=dim_geom, uid=uid, properties=properties, crs=crs, abstraction=mapping_kwds[klass]) return sdim - + def get_clip(self, polygon, return_indices=False, use_spatial_index=True, select_nearest=False): assert(type(polygon) in (Polygon, MultiPolygon)) - + ret, slc = self.get_intersects(polygon, return_indices=True, use_spatial_index=use_spatial_index, select_nearest=select_nearest) - + ## clipping with points is okay... - try: + if ret.geom.polygon is not None: ref_value = ret.geom.polygon.value - except ImproperPolygonBoundsError: + else: ref_value = ret.geom.point.value for (row_idx, col_idx), geom in iter_array(ref_value, return_value=True): ref_value[row_idx, col_idx] = geom.intersection(polygon) - + if return_indices: ret = (ret, slc) - + return(ret) - + def get_intersects(self, polygon, return_indices=False, use_spatial_index=True, select_nearest=False): """ :param polygon: The subset geometry objec to use for the intersects operation. @@ -336,25 +340,23 @@ def get_intersects(self, polygon, return_indices=False, use_spatial_index=True, if self.grid is None: raise NotImplementedError else: - # reset the geometries - ret._geom = None - # subset the grid by its bounding box ret.grid, slc = self.grid.get_subset_bbox(minx, miny, maxx, maxy, return_indices=True, use_bounds=use_bounds) + + # slice the geometries if they are available + if ret._geom is not None: + ret._geom = ret._geom[slc[0], slc[1]] + # update the unique identifier to copy the grid uid ret.uid = ret.grid.uid assert not self.uid.mask.any() - # attempt to mask the polygons - try: - # only use the polygons if the abstraction indicates as much - if self.abstraction == 'point': - raise ImproperPolygonBoundsError - else: - ret._geom._polygon = ret.geom.polygon.get_intersects_masked(polygon, - use_spatial_index=use_spatial_index) - grid_mask = ret.geom.polygon.value.mask - except ImproperPolygonBoundsError: + # attempt to mask the polygons if the abstraction is point or none + if self.geom.polygon is not None and self.abstraction in ['polygon', None]: + ret._geom._polygon = ret.geom.polygon.get_intersects_masked(polygon, + use_spatial_index=use_spatial_index) + grid_mask = ret.geom.polygon.value.mask + else: ret._geom._point = ret.geom.point.get_intersects_masked(polygon, use_spatial_index=use_spatial_index) grid_mask = ret.geom.point.value.mask assert not self.uid.mask.any() @@ -380,12 +382,9 @@ def get_intersects(self, polygon, return_indices=False, use_spatial_index=True, ret = ret[adjust['row'], adjust['col']] if select_nearest: - try: - if self.abstraction == 'point': - raise ImproperPolygonBoundsError - else: - target_geom = ret.geom.polygon.value - except ImproperPolygonBoundsError: + if self.geom.polygon is not None and self.abstraction in ['polygon', None]: + target_geom = ret.geom.polygon.value + else: target_geom = ret.geom.point.value distances = {} centroid = polygon.centroid @@ -405,36 +404,54 @@ def get_intersects(self, polygon, return_indices=False, use_spatial_index=True, ret = (ret, tuple(ret_slc)) return ret - - def get_geom_iter(self,target=None,as_multipolygon=True): + + def get_geom_iter(self, target=None, as_multipolygon=True): + """ + :param str target: The target geometry. One of "point" or "polygon". If ``None``, return the highest order + abstraction. + :param bool as_multipolygon: If ``True``, convert all polygons to multipolygons. + :returns: An iterator yielding a tuple: (int row index, int column index, Shapely geometry, int unique id) + :rtype: tuple + :raises: AttributeError + """ + target = target or self.abstraction if target is None: value = self.geom.get_highest_order_abstraction().value else: - value = getattr(self.geom,target).value + try: + value = getattr(self.geom, target).value + except AttributeError: + msg = 'The target abstraction "{0}" is not available.'.format(target) + raise ValueError(msg) - ## no need to attempt and convert to MultiPolygon if we are working with - ## point data. + # no need to attempt and convert to MultiPolygon if we are working with point data. if as_multipolygon and target == 'point': as_multipolygon = False r_uid = self.uid - for (row_idx,col_idx),geom in iter_array(value,return_value=True): + for (row_idx, col_idx), geom in iter_array(value, return_value=True): if as_multipolygon: - if isinstance(geom,Polygon): + if isinstance(geom, Polygon): geom = MultiPolygon([geom]) - uid = r_uid[row_idx,col_idx] - yield(row_idx,col_idx,geom,uid) + uid = r_uid[row_idx, col_idx] + yield (row_idx, col_idx, geom, uid) def get_mask(self): + """ + :returns: A deepcopy of a the boolean mask used on the spatial dimension. + :rtype: :class:`numpy.ndarray` + :raises: ValueError + """ + if self.grid is None: if self.geom.point is None: ret = self.geom.polygon.value.mask else: ret = self.geom.point.value.mask else: - ret = self.grid.value.mask[0,:,:] - return(ret.copy()) + ret = self.grid.value.mask[0, :, :] + return ret.copy() def set_mask(self, mask): """ @@ -490,13 +507,11 @@ def update_crs(self, to_crs): value_col = self.grid.value.data[1].reshape(-1) self._update_crs_with_geometry_collection_(to_sr, value_row, value_col)# update corners - try: + if self.grid.corners is not None: # update the corners corner_row = self.grid.corners.data[0].reshape(-1) corner_col = self.grid.corners.data[1].reshape(-1) self._update_crs_with_geometry_collection_(to_sr, corner_row, corner_col) - except CornersUnavailable: - pass self.grid.row = None self.grid.col = None @@ -599,7 +614,7 @@ def __init__(self, *args, **kwargs): if self._value is None: if self.row is None or self.col is None: msg = 'Without a value, a row and column dimension are required.' - raise(ValueError(msg)) + raise ValueError(msg) def __getitem__(self,slc): slc = get_formatted_slice(slc,2) @@ -647,30 +662,27 @@ def corners(self): if self._corners is None: if self.row is None or self.col is None: - msg = 'Row and/or column not available.' - raise CornersUnavailable(msg) + pass + elif self.row.bounds is None or self.col.bounds is None: + pass else: - if self.row.bounds is None or self.col.bounds is None: - msg = 'Row and/or column bounds not available.' - raise CornersUnavailable(msg) - else: - fill = np.zeros([2]+list(self.shape)+[4], dtype=self.row.value.dtype) - col_bounds = self.col.bounds - row_bounds = self.row.bounds - for ii, jj in itertools.product(range(self.shape[0]), range(self.shape[1])): - fill_element = fill[:, ii, jj] - fill_element[:, 0] = row_bounds[ii, 0], col_bounds[jj, 0] - fill_element[:, 1] = row_bounds[ii, 0], col_bounds[jj, 1] - fill_element[:, 2] = row_bounds[ii, 1], col_bounds[jj, 1] - fill_element[:, 3] = row_bounds[ii, 1], col_bounds[jj, 0] - - mask_value = self.value.mask - mask_fill = np.zeros(fill.shape, dtype=bool) - for (ii, jj), m in iter_array(mask_value[0, :, :], return_value=True): - mask_fill[:, ii, jj, :] = m - fill = np.ma.array(fill, mask=mask_fill) - - self._corners = fill + fill = np.zeros([2]+list(self.shape)+[4], dtype=self.row.value.dtype) + col_bounds = self.col.bounds + row_bounds = self.row.bounds + for ii, jj in itertools.product(range(self.shape[0]), range(self.shape[1])): + fill_element = fill[:, ii, jj] + fill_element[:, 0] = row_bounds[ii, 0], col_bounds[jj, 0] + fill_element[:, 1] = row_bounds[ii, 0], col_bounds[jj, 1] + fill_element[:, 2] = row_bounds[ii, 1], col_bounds[jj, 1] + fill_element[:, 3] = row_bounds[ii, 1], col_bounds[jj, 0] + + mask_value = self.value.mask + mask_fill = np.zeros(fill.shape, dtype=bool) + for (ii, jj), m in iter_array(mask_value[0, :, :], return_value=True): + mask_fill[:, ii, jj, :] = m + fill = np.ma.array(fill, mask=mask_fill) + + self._corners = fill return self._corners @@ -688,23 +700,24 @@ def corners_esmf(self): fill = np.zeros([2] + [element + 1 for element in self.shape], dtype=self.value.dtype) range_row = range(self.shape[0]) range_col = range(self.shape[1]) + _corners = self.corners for ii, jj in itertools.product(range_row, range_col): ref = fill[:, ii:ii+2, jj:jj+2] - ref[:, 0, 0] = self.corners[:, ii, jj, 0] - ref[:, 0, 1] = self.corners[:, ii, jj, 1] - ref[:, 1, 1] = self.corners[:, ii, jj, 2] - ref[:, 1, 0] = self.corners[:, ii, jj, 3] + ref[:, 0, 0] = _corners[:, ii, jj, 0] + ref[:, 0, 1] = _corners[:, ii, jj, 1] + ref[:, 1, 1] = _corners[:, ii, jj, 2] + ref[:, 1, 0] = _corners[:, ii, jj, 3] return fill @property def extent(self): if self.row is None: - try: + if self.corners is not None: minx = self.corners[1].min() miny = self.corners[0].min() maxx = self.corners[1].max() maxy = self.corners[0].max() - except CornersUnavailable: + else: minx = self.value[1,:,:].min() miny = self.value[0,:,:].min() maxx = self.value[1,:,:].max() @@ -720,7 +733,7 @@ def extent(self): miny = self.row.bounds.min() maxx = self.col.bounds.max() maxy = self.row.bounds.max() - return(minx,miny,maxx,maxy) + return minx, miny, maxx, maxy @property def extent_polygon(self): @@ -866,34 +879,55 @@ def _get_value_(self): class SpatialGeometryDimension(base.AbstractUidDimension): _axis = 'GEOM' _ndims = 2 - _attrs_slice = ('uid','grid','_point','_polygon') - - def __init__(self,*args,**kwds): - self.grid = kwds.pop('grid',None) - self._point = kwds.pop('point',None) - self._polygon = kwds.pop('polygon',None) - - super(SpatialGeometryDimension,self).__init__(*args,**kwds) + _attrs_slice = ('uid', 'grid', '_point', '_polygon') + + def __init__(self, *args, **kwargs): + self.grid = kwargs.pop('grid', None) + self._point = kwargs.pop('point', None) + self._polygon = kwargs.pop('polygon', None) + self._abstraction = kwargs.pop('abstraction', None) + + super(SpatialGeometryDimension, self).__init__(*args, **kwargs) if self.grid is None and self._point is None and self._polygon is None: msg = 'At minimum, a grid, point, or polygon dimension is required.' - raise(ValueError(msg)) + raise ValueError(msg) + + @property + def abstraction(self): + return self._abstraction + + @abstraction.setter + def abstraction(self, value): + options = ['point', 'polygon', None] + if value not in options: + raise ValueError('Must be one of: {0}'.format(options)) + # reset polygons if the point abstraction is set. + if value == 'point': + self._polygon = None + self._abstraction = value @property def point(self): if self._point is None and self.grid is not None: - self._point = SpatialGeometryPointDimension(grid=self.grid,uid=self.grid.uid) - return(self._point) + self._point = SpatialGeometryPointDimension(grid=self.grid, uid=self.grid.uid) + return self._point @property def polygon(self): if self._polygon is None: - if self.grid is None: - msg = 'Constructing a polygon dimension requires a grid dimension.' - raise ImproperPolygonBoundsError(msg) - else: - self._polygon = SpatialGeometryPolygonDimension(grid=self.grid,uid=self.grid.uid) - return(self._polygon) + if self.abstraction in ['polygon', None]: + if self.grid is not None: + try: + self._polygon = SpatialGeometryPolygonDimension(grid=self.grid, uid=self.grid.uid) + except ValueError: + none_bounds_row = self.grid.row is None or self.grid.row.bounds is None + none_bounds_col = self.grid.col is None or self.grid.col.bounds is None + if any([none_bounds_row, none_bounds_col]): + pass + else: + raise + return self._polygon @property def shape(self): @@ -905,18 +939,26 @@ def shape(self): def get_highest_order_abstraction(self): """ - Return the highest order abstraction geometry with preference given by: + :returns: Return the highest order abstraction geometry with preference given by: 1. Polygon 2. Point + :rtype: :class:`~ocgis.interface.base.dimension.spatial.SpatialGeometryDimension` """ - try: + if self.abstraction == 'point': + ret = self.point + elif self.abstraction == 'polygon': ret = self.polygon - # if the polygon is a NoneType, return the point dimension - if ret is None: + else: + if self.polygon is None: ret = self.point - except ImproperPolygonBoundsError: - ret = self.point + else: + ret = self.polygon + + if ret is None: + msg = 'No abstraction geometry found. Is "abstraction" compatible with the geometries available?' + raise ValueError(msg) + return ret def get_iter(self): @@ -1106,22 +1148,21 @@ def __init__(self, *args, **kwargs): # we can construct from a grid dimension having bounds if self.grid is None: msg = 'A grid dimension is required for constructing a polygon dimension without a value.' - raise ImproperPolygonBoundsError(msg) + raise ValueError(msg) else: - # corners may also be used to construct polygons. if there are none, explain why polygons are not - # available - try: - if self.grid._corners is None: - if self.grid.row is None or self.grid.col is None: - raise ImproperPolygonBoundsError( - 'Polygon dimensions require a grid dimension with row and column dimensions with bounds.') - else: - if self.grid.row.bounds is None or self.grid.col.bounds is None: - raise ImproperPolygonBoundsError( - 'Polygon dimensions require row and column dimension bounds to have delta > 0.') - except CornersUnavailable: - msg = 'Polygon dimensions require grid corners.' - raise(ImproperPolygonBoundsError(msg)) + # corners may also be used to construct polygons. if they are not immediately available, check for + # bounds are on the row and column. + none_bounds_row = self.grid.row is None or self.grid.row.bounds is None + none_bounds_col = self.grid.col is None or self.grid.col.bounds is None + should_raise = True + if any([none_bounds_row, none_bounds_col]): + if self.grid.corners is not None: + should_raise = False + else: + should_raise = False + if should_raise: + msg = 'Row/column bounds or grid corners are required to construct polygons.' + raise ValueError(msg) @property def area(self): diff --git a/src/ocgis/interface/base/field.py b/src/ocgis/interface/base/field.py index a6835f951..7373c7c05 100644 --- a/src/ocgis/interface/base/field.py +++ b/src/ocgis/interface/base/field.py @@ -10,7 +10,6 @@ from ocgis.interface.base.variable import Variable, VariableCollection from ocgis import constants from shapely.geometry.point import Point -from ocgis.exc import ImproperPolygonBoundsError import logging from ocgis.util.logging_ocgis import ocgis_lh diff --git a/src/ocgis/regrid/base.py b/src/ocgis/regrid/base.py index f2adbdf06..a5affecd8 100644 --- a/src/ocgis/regrid/base.py +++ b/src/ocgis/regrid/base.py @@ -1,7 +1,7 @@ from copy import deepcopy import ESMF import numpy as np -from ocgis.exc import CornersUnavailable, RegriddingError, CornersInconsistentError +from ocgis.exc import RegriddingError, CornersInconsistentError from ocgis.interface.base.crs import Spherical from ocgis.interface.base.dimension.spatial import SpatialGridDimension, SpatialDimension from ocgis.interface.base.variable import VariableCollection @@ -109,7 +109,7 @@ def get_esmf_grid_from_sdim(sdim, with_corners=True, value_mask=None): # attempt to access corners if possible if with_corners: - try: + if sdim.grid.corners is not None: corners_esmf = sdim.grid.corners_esmf # adding corners. first tell the grid object to allocate corners egrid.add_coords(staggerloc=[ESMF.StaggerLoc.CORNER]) @@ -117,8 +117,6 @@ def get_esmf_grid_from_sdim(sdim, with_corners=True, value_mask=None): grid_corner = egrid.coords[ESMF.StaggerLoc.CORNER] grid_corner[1][:] = corners_esmf[0] grid_corner[0][:] = corners_esmf[1] - except CornersUnavailable: - pass return egrid @@ -206,15 +204,13 @@ def _assert_spherical_crs_(crs): if with_corners: has_corners_sources = [] for source in sources: - try: - source.spatial.grid.corners + if source.spatial.grid.corners is not None: has_corners_sources.append(True) - except CornersUnavailable: + else: has_corners_sources.append(False) - try: - sdim.grid.corners + if sdim.grid.corners is not None: has_corners_destination = True - except CornersUnavailable: + else: has_corners_destination = False if not all(has_corners_sources) or not has_corners_destination: msg = 'Corners are not available on all sources and destination. Consider setting "with_corners" to False.' @@ -339,8 +335,8 @@ def iter_regridded_fields(sources, destination, with_corners='choose', value_mas out_sdim.grid.col.bounds = None out_sdim.grid._corners = None # remove any polygons if they exist - if out_sdim._geom is not None: - out_sdim.geom._polygon = None + out_sdim.geom._polygon = None + out_sdim.geom.grid = out_sdim.grid build = False # perform the regrid operation and fill the new variabales diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index f2cbd6e80..7e26f4878 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -12,11 +12,11 @@ from ocgis.interface.base.crs import WGS84, CFWGS84, CFLambertConformal import numpy as np from datetime import datetime as dt -from ocgis.interface.base.dimension.spatial import SpatialGeometryPolygonDimension,SpatialGeometryDimension, \ +from ocgis.interface.base.dimension.spatial import SpatialGeometryPolygonDimension, SpatialGeometryDimension, \ SpatialDimension import fiona from shapely.geometry.geo import shape -from ocgis.exc import EmptySubsetError, ImproperPolygonBoundsError, DimensionNotFound +from ocgis.exc import EmptySubsetError, DimensionNotFound import datetime from unittest.case import SkipTest import ocgis @@ -324,8 +324,7 @@ def test_load_with_projection(self): self.assertAlmostEqual(field.spatial.grid.value.mean(),-26.269666952512416) field.spatial.crs.unwrap(field.spatial) self.assertAlmostEqual(field.spatial.grid.value.mean(),153.73033304748759) - with self.assertRaises(ImproperPolygonBoundsError): - field.spatial.geom.polygon + self.assertIsNone(field.spatial.geom.polygon) self.assertAlmostEqual(field.spatial.geom.point.value[0,100].x,278.52630062012787) self.assertAlmostEqual(field.spatial.geom.point.value[0,100].y,21.4615681252577) diff --git a/src/ocgis/test/test_ocgis/test_api/test_subset.py b/src/ocgis/test/test_ocgis/test_api/test_subset.py index 2a290a7cc..eaaaf12db 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_subset.py +++ b/src/ocgis/test/test_ocgis/test_api/test_subset.py @@ -1,10 +1,9 @@ from copy import deepcopy -import os import pickle from ocgis.conv.numpy_ import NumpyConverter -from ocgis.exc import CornersUnavailable +from ocgis.exc import DimensionNotFound from ocgis.interface.base.crs import Spherical, CFWGS84, CFPolarStereographic -from ocgis.interface.base.dimension.spatial import SpatialDimension +from ocgis.interface.base.dimension.spatial import SpatialDimension, SpatialGeometryPointDimension from ocgis.test.base import TestBase import ocgis from ocgis.api.subset import SubsetOperation @@ -13,8 +12,8 @@ from ocgis.test.test_ocgis.test_api.test_parms.test_definition import TestGeom from ocgis.util.itester import itr_products_keywords from ocgis.util.logging_ocgis import ProgressOcgOperations -from ocgis import constants import numpy as np +from ocgis import env class TestSubsetOperation(TestBase): @@ -32,6 +31,14 @@ def get_subset_operation(self): subset = SubsetOperation(ops) return subset + def test_abstraction_not_available(self): + """Test appropriate exception is raised when a selected abstraction is not available.""" + + rd = self.test_data.get_rd('daymet_tmax') + ops = ocgis.OcgOperations(dataset=rd, abstraction='polygon', geom='state_boundaries', select_ugid=[25]) + with self.assertRaises(ValueError): + ops.execute() + def test_init(self): for rb, p in itertools.product([True, False], [None, ProgressOcgOperations()]): sub = SubsetOperation(self.get_operations(), request_base_size_only=rb, progress=p) @@ -109,7 +116,7 @@ def test_regridding_same_field(self): if isinstance(k.regrid_destination, SpatialDimension): self.assertEqual(field.spatial.crs, Spherical()) else: - self.assertEqual(field.spatial.crs, constants.default_coordinate_system) + self.assertEqual(field.spatial.crs, env.DEFAULT_COORDSYS) self.assertTrue(d['variable'].value.mean() > 100) self.assertTrue(np.any(field.spatial.get_mask())) self.assertTrue(np.any(d['variable'].value.mask)) @@ -128,8 +135,7 @@ def test_regridding_same_field_bad_bounds_without_corners(self): for coll in ret: for dd in coll.get_iter_melted(): field = dd['field'] - with self.assertRaises(CornersUnavailable): - field.spatial.grid.corners + self.assertIsNone(field.spatial.grid.corners) for to_test in [field.spatial.grid.row.bounds, field.spatial.grid.col.bounds]: self.assertIsNone(to_test) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py index e90300352..4a860644f 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py @@ -6,7 +6,7 @@ from ocgis.interface.base.dimension.base import VectorDimension from ocgis.interface.base.dimension.spatial import SpatialGridDimension,\ SpatialDimension -from ocgis.exc import SpatialWrappingError, CornersUnavailable +from ocgis.exc import SpatialWrappingError from ocgis.test.base import TestBase import numpy as np from copy import deepcopy @@ -55,6 +55,59 @@ def test_ne(self): self.assertNotEqual('input', crs1) +class TestWrappableCoordinateSystem(TestBase): + create_dir = False + + def test_get_wrap_action(self): + _w = WrappableCoordinateReferenceSystem + possible = [_w._flag_wrapped, _w._flag_unwrapped, _w._flag_unknown, 'foo'] + keywords = dict(state_src=possible, + state_dst=possible) + for k in itr_products_keywords(keywords, as_namedtuple=True): + try: + ret = _w.get_wrap_action(k.state_src, k.state_dst) + except ValueError: + self.assertTrue(k.state_src == 'foo' or k.state_dst == 'foo') + continue + if k.state_dst == _w._flag_unknown: + self.assertIsNone(ret) + elif k.state_src == _w._flag_unwrapped and k.state_dst == _w._flag_wrapped: + self.assertEqual(ret, _w._flag_action_wrap) + elif k.state_src == _w._flag_wrapped and k.state_dst == _w._flag_unwrapped: + self.assertEqual(ret, _w._flag_action_unwrap) + else: + self.assertIsNone(ret) + + def test_get_wrapped_state_from_array(self): + + def _run_(arr, actual_wrapped_state): + ret = WrappableCoordinateReferenceSystem._get_wrapped_state_from_array_(arr) + self.assertEqual(ret, actual_wrapped_state) + + arr = np.array([-170]) + _run_(arr, WrappableCoordinateReferenceSystem._flag_wrapped) + + arr = np.array([270]) + _run_(arr, WrappableCoordinateReferenceSystem._flag_unwrapped) + + arr = np.array([30]) + _run_(arr, WrappableCoordinateReferenceSystem._flag_unknown) + + def test_get_wrapped_state_from_geometry(self): + geoms = [Point(-130, 40), + MultiPoint([Point(-130, 40), Point(30, 50)]), + make_poly((30, 40), (-130, -120)), + MultiPolygon([make_poly((30, 40), (-130, -120)), make_poly((30, 40), (130, 160))])] + + for geom in geoms: + ret = WrappableCoordinateReferenceSystem._get_wrapped_state_from_geometry_(geom) + self.assertEqual(ret, WrappableCoordinateReferenceSystem._flag_wrapped) + + pt = Point(270, 50) + ret = WrappableCoordinateReferenceSystem._get_wrapped_state_from_geometry_(pt) + self.assertEqual(ret, WrappableCoordinateReferenceSystem._flag_unwrapped) + + class TestSpherical(TestBase): def test_init(self): @@ -92,7 +145,7 @@ def test_place_prime_meridian_array(self): arr = np.array([123, 180, 200, 180], dtype=float) ret = Spherical._place_prime_meridian_array_(arr) self.assertNumpyAll(ret, np.array([False, True, False, True])) - self.assertNumpyAll(arr, np.array([123., constants.prime_meridian, 200., constants.prime_meridian])) + self.assertNumpyAll(arr, np.array([123., constants.meridian_180th, 200., constants.meridian_180th])) def test_wrap_unwrap_with_mask(self): """Test wrapped and unwrapped geometries with a mask ensuring that masked values are wrapped and unwrapped.""" @@ -170,8 +223,7 @@ def _get_sdim_(value, bounds): sdim.wrap() self.assertIsNone(sdim.grid.col.bounds) self.assertIsNone(sdim.grid.row.bounds) - with self.assertRaises(CornersUnavailable): - sdim.grid.corners + self.assertIsNone(sdim.grid.corners) self.assertEqual(sdim.geom.polygon.value[0, 0][0].bounds, (-180.0, 38.0, -176.0, 42.0)) self.assertNumpyAll(np.array(sdim.geom.point.value[0, 0]), np.array([-178., 40.])) @@ -180,8 +232,7 @@ def _get_sdim_(value, bounds): sdim.wrap() self.assertIsNone(sdim.grid.col.bounds) self.assertIsNone(sdim.grid.row.bounds) - with self.assertRaises(CornersUnavailable): - sdim.grid.corners + self.assertIsNone(sdim.grid.corners) self.assertEqual(sdim.geom.polygon.value[0, 0][0].bounds, (178.0, 38.0, 180.0, 42.0)) self.assertEqual(sdim.geom.polygon.value[0, 0][1].bounds, (-180.0, 38.0, -178.0, 42.0)) self.assertNumpyAll(np.array(sdim.geom.point.value[0, 0]), np.array([180., 40.])) @@ -194,8 +245,7 @@ def _get_sdim_(value, bounds): sdim.grid.col.bounds sdim.grid.col.bounds = None sdim.wrap() - with self.assertRaises(CornersUnavailable): - sdim.grid.corners + self.assertIsNone(sdim.grid.corners) # unwrap a wrapped spatial dimension making sure the unwrapped multipolygon bounds are the same as the wrapped # polygon bounds. @@ -290,9 +340,10 @@ def test_get_rotated_pole_transformation(self): spatial.grid.uid.mask[5, 6] = True spatial.assert_uniform_mask() - self.assertIsNone(spatial._geom) + self.assertIsNone(spatial._geom._polygon) + self.assertIsNone(spatial._geom._point) spatial.geom - self.assertIsNotNone(spatial._geom) + self.assertIsNotNone(spatial._geom.point) new_spatial = field.spatial.crs.get_rotated_pole_transformation(spatial) original_crs = deepcopy(field.spatial.crs) self.assertIsInstance(new_spatial.crs, CFWGS84) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py index ee9aa5b54..318a47bb1 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py @@ -1,20 +1,25 @@ import unittest import numpy as np +from ocgis import constants from ocgis.exc import EmptySubsetError, ResolutionError from ocgis.interface.base.dimension.base import VectorDimension from copy import deepcopy from cfunits.cfunits import Units -from ocgis.util.helpers import get_interpolated_bounds +from ocgis.test.base import TestBase +from ocgis.util.helpers import get_bounds_from_1d -class TestVectorDimension(unittest.TestCase): - - def assertNumpyAll(self,arr1,arr2): - return(self.assertTrue(np.all(arr1 == arr2))) - - def assertNumpyNotAll(self,arr1,arr2): - return(self.assertFalse(np.all(arr1 == arr2))) - +class TestVectorDimension(TestBase): + create_dir = False + + def test_set_extrapolated_bounds(self): + value = np.array([1, 2, 3, 4], dtype=float) + vd = VectorDimension(value=value) + self.assertIsNone(vd.bounds) + vd.set_extrapolated_bounds() + actual = np.array([[0.5, 1.5], [1.5, 2.5], [2.5, 3.5], [3.5, 4.5]], dtype=float) + self.assertNumpyAll(vd.bounds, actual) + def test_bounds_only_two_dimensional(self): value = [10,20,30,40,50] bounds = [ @@ -66,47 +71,55 @@ def test_bad_dtypes(self): VectorDimension(value=181.5,bounds=['a','b']) def test_one_value(self): - values = [5,np.array([5])] + """Test passing a single value.""" + + values = [5, np.array([5])] for value in values: - vdim = VectorDimension(value=value,src_idx=10) - self.assertEqual(vdim.value[0],5) - self.assertEqual(vdim.uid[0],1) - self.assertEqual(len(vdim.uid),1) - self.assertEqual(vdim.shape,(1,)) - self.assertNumpyAll(vdim.bounds,None) - self.assertEqual(vdim[0].value[0],5) - self.assertEqual(vdim[0].uid[0],1) - self.assertEqual(vdim[0]._src_idx[0],10) - self.assertNumpyAll(vdim[0].bounds,None) + vdim = VectorDimension(value=value, src_idx=10) + self.assertEqual(vdim.value[0], 5) + self.assertEqual(vdim.uid[0], 1) + self.assertEqual(len(vdim.uid), 1) + self.assertEqual(vdim.shape, (1,)) + self.assertIsNone(vdim.bounds) + self.assertEqual(vdim[0].value[0], 5) + self.assertEqual(vdim[0].uid[0], 1) + self.assertEqual(vdim[0]._src_idx[0], 10) + self.assertIsNone(vdim[0].bounds) with self.assertRaises(ResolutionError): vdim.resolution def test_with_bounds(self): - vdim = VectorDimension(value=[4,5,6],bounds=[[3,5],[4,6],[5,7]]) - self.assertNumpyAll(vdim.bounds,np.array([[3,5],[4,6],[5,7]])) - self.assertNumpyAll(vdim.uid,np.array([1,2,3])) - self.assertEqual(vdim.resolution,2.0) + """Test passing bounds to the constructor.""" + + vdim = VectorDimension(value=[4, 5, 6], bounds=[[3, 5], [4, 6], [5, 7]]) + self.assertNumpyAll(vdim.bounds, np.array([[3, 5], [4, 6], [5, 7]])) + self.assertNumpyAll(vdim.uid, np.array([1, 2, 3], dtype=constants.np_int)) + self.assertEqual(vdim.resolution, 2.0) def test_boolean_slice(self): - vdim = VectorDimension(value=[4,5,6],bounds=[[3,5],[4,6],[5,7]]) - vdim_slc = vdim[np.array([True,False,True])] + """Test slicing with boolean values.""" + + vdim = VectorDimension(value=[4, 5, 6], bounds=[[3, 5], [4, 6], [5, 7]]) + vdim_slc = vdim[np.array([True, False, True])] self.assertFalse(len(vdim_slc) > 2) - self.assertNumpyAll(vdim_slc.value,[4,6]) - self.assertNumpyAll(vdim_slc.bounds,[[3,5],[5,7]]) + self.assertNumpyAll(vdim_slc.value, np.array([4, 6])) + self.assertNumpyAll(vdim_slc.bounds, np.array([[3, 5], [5, 7]])) def test_set_reference(self): - vdim = VectorDimension(value=[4,5,6]) + """Test setting values on the internal value array using indexing.""" + + vdim = VectorDimension(value=[4, 5, 6]) vdim_slc = vdim[1] - self.assertEqual(vdim_slc.uid[0],2) + self.assertEqual(vdim_slc.uid[0], 2) vdim_slc2 = vdim[:] - self.assertNumpyAll(vdim_slc2.value,vdim.value) + self.assertNumpyAll(vdim_slc2.value, vdim.value) vdim._value[1] = 500 - self.assertNumpyAll(vdim.value,[4,500,6]) + self.assertNumpyAll(vdim.value, np.array([4, 500, 6])) with self.assertRaises(TypeError): - vdim.bounds[1,:] - self.assertNumpyAll(vdim.value,vdim_slc2.value) + vdim.bounds[1, :] + self.assertNumpyAll(vdim.value, vdim_slc2.value) vdim_slc2._value[2] = 1000 - self.assertNumpyAll(vdim.value,vdim_slc2.value) + self.assertNumpyAll(vdim.value, vdim_slc2.value) def test_slice_source_idx_only(self): vdim = VectorDimension(src_idx=[4,5,6],data='foo') @@ -138,14 +151,16 @@ def test_units_with_bounds(self): for i in [True,False]: value = [5.,10.,15.] vdim = VectorDimension(value=value,units='celsius', - bounds=get_interpolated_bounds(np.array(value)), + bounds=get_bounds_from_1d(np.array(value)), interpolate_bounds=i) vdim.cfunits_conform(Units('kelvin')) self.assertNumpyAll(vdim.bounds,np.array([[275.65,280.65],[280.65,285.65],[285.65,290.65]])) def test_load_from_source(self): - vdim = VectorDimension(src_idx=[0,1,2,3],data='foo') - self.assertNumpyAll(vdim.uid,np.array([1,2,3,4])) + """Test loading from a fake data source.""" + + vdim = VectorDimension(src_idx=[0, 1, 2, 3], data='foo') + self.assertNumpyAll(vdim.uid, np.array([1, 2, 3, 4], dtype=constants.np_int)) with self.assertRaises(NotImplementedError): vdim.value with self.assertRaises(NotImplementedError): diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py index 8d8e84229..97a45a7e7 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py @@ -7,14 +7,13 @@ from ocgis.interface.base.dimension.spatial import SpatialDimension,\ SpatialGeometryDimension, SpatialGeometryPolygonDimension,\ SpatialGridDimension, SpatialGeometryPointDimension, SingleElementRetriever -from ocgis.util.helpers import iter_array, make_poly, get_interpolated_bounds,\ +from ocgis.util.helpers import iter_array, make_poly, get_bounds_from_1d,\ get_date_list, write_geom_dict import fiona from fiona.crs import from_epsg from shapely.geometry import shape, mapping, Polygon from shapely.geometry.point import Point -from ocgis.exc import EmptySubsetError, ImproperPolygonBoundsError, SpatialWrappingError, MultipleElementsFound, \ - CornersUnavailable +from ocgis.exc import EmptySubsetError, SpatialWrappingError, MultipleElementsFound from ocgis.test.base import TestBase from ocgis.interface.base.crs import CoordinateReferenceSystem, WGS84, CFWGS84, CFRotatedPole from ocgis.interface.base.dimension.base import VectorDimension @@ -25,7 +24,7 @@ from ocgis.util.spatial.wrap import Wrapper -class TestSpatialBase(TestBase): +class AbstractTestSpatialDimension(TestBase): def assertGeometriesAlmostEquals(self, a, b): @@ -111,7 +110,7 @@ def write_sdim(self): sink.write(row) -class TestSingleElementRetriever(TestSpatialBase): +class TestSingleElementRetriever(AbstractTestSpatialDimension): def test_init(self): sdim = self.get_sdim() @@ -127,7 +126,7 @@ def test_init(self): self.assertIsNone(single.crs) -class TestSpatialDimension(TestSpatialBase): +class TestSpatialDimension(AbstractTestSpatialDimension): def get_records(self): sc = ShpCabinet() @@ -144,6 +143,7 @@ def get_spatial_dimension_from_records(self): def test_init(self): sdim = self.get_sdim(bounds=True) + self.assertIsNone(sdim.abstraction) self.assertNumpyAll(sdim.grid.value, self.grid_value_regular) def _almost_equals_(a, b): @@ -156,6 +156,23 @@ def _almost_equals_(a, b): self.assertTrue(to_test.all()) self.assertFalse(sdim.geom.polygon.value.mask.any()) + def test_abstraction(self): + sdim = self.get_sdim() + self.assertIsNone(sdim.abstraction) + self.assertEqual(sdim.abstraction, sdim._abstraction) + self.assertIsInstance(sdim.abstraction_geometry, SpatialGeometryPointDimension) + + sdim = self.get_sdim(bounds=True) + self.assertIsInstance(sdim.geom.polygon, SpatialGeometryPolygonDimension) + sdim.abstraction = 'point' + self.assertEqual(sdim.geom.abstraction, 'point') + self.assertEqual(sdim.abstraction, sdim._abstraction) + self.assertIsInstance(sdim.abstraction_geometry, SpatialGeometryPointDimension) + + def test_abstraction_geometry(self): + sdim = self.get_sdim(bounds=True) + self.assertIsInstance(sdim.abstraction_geometry, SpatialGeometryPolygonDimension) + def test_init_combinations(self): """ - points only @@ -220,21 +237,21 @@ def iter_geom(): continue raise if k['polygon'] is not None or k['grid'] is not None: - try: + if geom.polygon is not None: try: self.assertGeometriesAlmostEquals(geom.polygon.value, self.polygon_value) except AssertionError: # coordinates may be ordered differently self.assertGeometriesAlmostEquals(geom.polygon.value, self.polygon_value_alternate_ordering) self.assertNumpyAll(geom.polygon.uid, self.uid_value) - except ImproperPolygonBoundsError: + else: try: if k['polygon'] is None and k['grid'].corners is None: if k['grid'].row is None or k['grid'].col is None: continue except CornersUnavailable: continue - if geom.grid.bounds is None: + if geom.grid.corners is None: if geom.grid.row.bounds is None or geom.grid.col.bounds is None: continue raise @@ -271,9 +288,6 @@ def geom_iterator(): if sdim.geom.polygon is None and sdim.grid is None: continue raise - except ImproperPolygonBoundsError: - self.assertIsNone(sdim.grid) - continue try: self.assertNumpyAll(sdim.grid.value, self.grid_value_regular) @@ -284,20 +298,33 @@ def geom_iterator(): raise try: - self.assertNumpyAll(sdim.grid.corners, self.grid_corners_regular) + if sdim.grid.corners is not None: + self.assertNumpyAll(sdim.grid.corners, self.grid_corners_regular) + else: + if k['row'] is None or k['col'] is None: + pass + else: + if k['row'].bounds is None or k['col'].bounds is None: + pass + else: + raise except AttributeError: if k['grid'] is None: pass else: raise - except CornersUnavailable: - if k['row'] is None or k['col'] is None: - pass - else: - if k['row'].bounds is None or k['col'].bounds is None: - pass - else: - raise + + def test_abstraction_behavior(self): + """Test abstraction limits what elements are loaded and returned.""" + + row = VectorDimension(value=[2, 4]) + col = VectorDimension(value=[4, 6]) + for element in [row, col]: + element.set_extrapolated_bounds() + grid = SpatialGridDimension(row=row, col=col) + + sdim = SpatialDimension(grid=grid, abstraction='point') + self.assertIsNone(sdim.geom.polygon) def test_set_mask(self): @@ -363,7 +390,8 @@ def test_set_mask(self): if k.with_grid or k.with_geom: try: self.assertNumpyAll(sdim.get_mask(), actual) - except ImproperPolygonBoundsError: + except AttributeError: + # there is actually nothing on this sdim, so the mask may not be retrieved if not k.with_grid and not k.with_point and not k.with_polygon: continue else: @@ -481,9 +509,9 @@ def test_get_intersects_select_nearest(self): self.assertEqual(ret.shape, (1, 1)) try: self.assertTrue(ret.geom.polygon.value[0,0].centroid.almost_equals(pt)) - ## polygons will not be present if the abstraction is point or there are no bounds on the created - ## spatial dimension object - except ImproperPolygonBoundsError: + # ## polygons will not be present if the abstraction is point or there are no bounds on the created + # ## spatial dimension object + except AttributeError: if a == 'point' or b is False: self.assertTrue(ret.geom.point.value[0, 0].almost_equals(pt)) else: @@ -497,37 +525,6 @@ def test_get_intersects_select_nearest(self): else: self.assertEqual(ret.shape, (1, 1)) - def test_get_interpolated_bounds(self): - - sdim = self.get_sdim(bounds=False) - test_sdim = self.get_sdim(bounds=True) - - row_bounds = get_interpolated_bounds(sdim.grid.row.value) - col_bounds = get_interpolated_bounds(sdim.grid.col.value) - - self.assertNumpyAll(row_bounds,test_sdim.grid.row.bounds) - self.assertNumpyAll(col_bounds,test_sdim.grid.col.bounds) - - across_180 = np.array([-180,-90,0,90,180],dtype=float) - bounds_180 = get_interpolated_bounds(across_180) - self.assertEqual(bounds_180.tostring(),'\x00\x00\x00\x00\x00 l\xc0\x00\x00\x00\x00\x00\xe0`\xc0\x00\x00\x00\x00\x00\xe0`\xc0\x00\x00\x00\x00\x00\x80F\xc0\x00\x00\x00\x00\x00\x80F\xc0\x00\x00\x00\x00\x00\x80F@\x00\x00\x00\x00\x00\x80F@\x00\x00\x00\x00\x00\xe0`@\x00\x00\x00\x00\x00\xe0`@\x00\x00\x00\x00\x00 l@') - - dates = get_date_list(datetime.datetime(2000,1,31),datetime.datetime(2002,12,31),1) - with self.assertRaises(NotImplementedError): - get_interpolated_bounds(np.array(dates)) - - with self.assertRaises(ValueError): - get_interpolated_bounds(np.array([0],dtype=float)) - - just_two = get_interpolated_bounds(np.array([50,75],dtype=float)) - self.assertEqual(just_two.tostring(),'\x00\x00\x00\x00\x00\xc0B@\x00\x00\x00\x00\x00@O@\x00\x00\x00\x00\x00@O@\x00\x00\x00\x00\x00\xe0U@') - - just_two_reversed = get_interpolated_bounds(np.array([75,50],dtype=float)) - self.assertEqual(just_two_reversed.tostring(),'\x00\x00\x00\x00\x00\xe0U@\x00\x00\x00\x00\x00@O@\x00\x00\x00\x00\x00@O@\x00\x00\x00\x00\x00\xc0B@') - - zero_origin = get_interpolated_bounds(np.array([0,50,100],dtype=float)) - self.assertEqual(zero_origin.tostring(),'\x00\x00\x00\x00\x00\x009\xc0\x00\x00\x00\x00\x00\x009@\x00\x00\x00\x00\x00\x009@\x00\x00\x00\x00\x00\xc0R@\x00\x00\x00\x00\x00\xc0R@\x00\x00\x00\x00\x00@_@') - def test_get_clip(self): sdim = self.get_sdim(bounds=True) poly = make_poly((37.75,38.25),(-100.25,-99.75)) @@ -561,8 +558,9 @@ def test_get_geom_iter(self): self.assertEqual(ttt,[1, 0, [-100.0, 39.0],5]) sdim = self.get_sdim(bounds=False) - self.assertEqual(sdim.abstraction,'polygon') - with self.assertRaises(ImproperPolygonBoundsError): + self.assertIsNone(sdim.abstraction) + # this abstraction is not available + with self.assertRaises(ValueError): list(sdim.get_geom_iter(target='polygon')) def test_get_intersects_point_abstraction(self): @@ -599,8 +597,7 @@ def test_get_intersects_polygon_small(self): to_test = ret.geom.point.value.compressed()[0] self.assertTrue(to_test.almost_equals(Point(-100,38))) if b is False: - with self.assertRaises(ImproperPolygonBoundsError): - ret.geom.polygon + self.assertIsNone(ret.geom.polygon) else: to_test = ret.geom.polygon.value.compressed()[0].bounds self.assertEqual((-100.5,37.5,-99.5,38.5),to_test) @@ -720,8 +717,7 @@ def test_update_crs_geom_combinations(self): if not k.with_polygon: sdim.geom._polygon = None - with self.assertRaises(ImproperPolygonBoundsError): - sdim.geom.polygon + self.assertIsNone(sdim.geom.polygon) sdim.update_crs(to_crs) @@ -769,21 +765,20 @@ def test_update_crs_grid_combinations(self): if k.with_grid: self.assertAlmostEqual(sdim.grid.value.data.mean(), -267630.25728117273) - try: + if sdim.grid.corners is None: + self.assertFalse(k.with_corners) + else: self.assertAlmostEqual(sdim.grid.corners.data.mean(), -267565.33741344721) - except CornersUnavailable: - if sdim.grid.row is not None: - raise - self.assertIsNone(sdim._geom) + self.assertIsNone(sdim._geom._point) + self.assertIsNone(sdim._geom._polygon) self.assertIsNone(sdim.grid.row) self.assertIsNone(sdim.grid.col) try: self.assertEqual(sdim.geom.polygon.value[2, 2].bounds, (130734.585229303, -832179.0855220362, 220974.77455120225, -719113.1357226598)) - except ImproperPolygonBoundsError: - if not k.with_corners: - pass + except AttributeError: + self.assertFalse(k.with_corners) self.assertEqual(sdim.geom.point.value[2, 2].bounds, (175552.29305101855, -775779.6191590576, 175552.29305101855, -775779.6191590576)) @@ -791,11 +786,10 @@ def test_update_crs_grid_combinations(self): if k.with_grid: self.assertNumpyAllClose(sdim.grid.value, original_grid_value) - try: + if sdim.grid.corners is None: + self.assertFalse(k.with_corners) + else: self.assertNumpyAllClose(sdim.grid.corners, original_grid_corners) - except CornersUnavailable: - if not k.with_corners: - pass def test_update_crs_general_error(self): """Test general OGR errors are appropriately raised if it is not a rotated pole transformation.""" @@ -870,8 +864,7 @@ def test_geom_point(self): def test_geom_polygon_no_bounds(self): sdim = self.get_sdim(bounds=False) - with self.assertRaises(ImproperPolygonBoundsError): - sdim.geom.polygon.value + self.assertIsNone(sdim.geom.polygon) def test_geom_polygon_bounds(self): sdim = self.get_sdim(bounds=True) @@ -905,30 +898,33 @@ def test_geoms_only(self): self.assertEqual(sdim.shape,(1,51)) def test_slicing(self): + """Test variations of slicing.""" + sdim = self.get_sdim(bounds=True) - self.assertEqual(sdim.shape,(3,4)) - self.assertEqual(sdim._geom,None) - self.assertEqual(sdim.geom.point.shape,(3,4)) - self.assertEqual(sdim.geom.polygon.shape,(3,4)) - self.assertEqual(sdim.grid.shape,(3,4)) + self.assertIsNone(sdim._geom._point) + self.assertIsNone(sdim._geom._polygon) + self.assertEqual(sdim.shape, (3, 4)) + self.assertEqual(sdim.geom.point.shape, (3, 4)) + self.assertEqual(sdim.geom.polygon.shape, (3, 4)) + self.assertEqual(sdim.grid.shape, (3, 4)) with self.assertRaises(IndexError): sdim[0] - sdim_slc = sdim[0,1] - self.assertEqual(sdim_slc.shape,(1,1)) - self.assertEqual(sdim_slc.uid,np.array([[2]],dtype=np.int32)) - self.assertNumpyAll(sdim_slc.grid.value,np.ma.array([[[40.]],[[-99.]]],mask=False)) - self.assertNotEqual(sdim_slc,None) - to_test = sdim_slc.geom.point.value[0,0].y,sdim_slc.geom.point.value[0,0].x - self.assertEqual((40.0,-99.0),(to_test)) - to_test = sdim_slc.geom.polygon.value[0,0].centroid.y,sdim_slc.geom.polygon.value[0,0].centroid.x - self.assertEqual((40.0,-99.0),(to_test)) - - refs = [sdim_slc.geom.point.value,sdim_slc.geom.polygon.value] + sdim_slc = sdim[0, 1] + self.assertEqual(sdim_slc.shape, (1, 1)) + self.assertEqual(sdim_slc.uid, np.array([[2]], dtype=np.int32)) + self.assertNumpyAll(sdim_slc.grid.value, np.ma.array([[[40.]], [[-99.]]], mask=False)) + self.assertNotEqual(sdim_slc, None) + to_test = sdim_slc.geom.point.value[0, 0].y, sdim_slc.geom.point.value[0, 0].x + self.assertEqual((40.0, -99.0), to_test) + to_test = sdim_slc.geom.polygon.value[0, 0].centroid.y, sdim_slc.geom.polygon.value[0, 0].centroid.x + self.assertEqual((40.0, -99.0), to_test) + + refs = [sdim_slc.geom.point.value, sdim_slc.geom.polygon.value] for ref in refs: - self.assertIsInstance(ref,np.ma.MaskedArray) - - sdim_all = sdim[:,:] - self.assertNumpyAll(sdim_all.grid.value,sdim.grid.value) + self.assertIsInstance(ref, np.ma.MaskedArray) + + sdim_all = sdim[:, :] + self.assertNumpyAll(sdim_all.grid.value, sdim.grid.value) def test_slicing_1d_none(self): sdim = self.get_sdim(bounds=True) @@ -1056,7 +1052,81 @@ def test_wrap_unwrap_non_wgs84(self): getattr(sdim, method)() -class TestSpatialGeometryPointDimension(TestSpatialBase): +class TestSpatialGeometryDimension(TestBase): + + def get(self, **kwargs): + with_bounds = kwargs.pop('with_bounds', True) + + row = VectorDimension(value=[2., 4.]) + col = VectorDimension(value=[4., 6.]) + if with_bounds: + for element in [row, col]: + element.set_extrapolated_bounds() + grid = SpatialGridDimension(row=row, col=col) + kwargs['grid'] = grid + gdim = SpatialGeometryDimension(**kwargs) + + return gdim + + def test_init(self): + with self.assertRaises(ValueError): + SpatialGeometryDimension() + + gdim = self.get() + self.assertIsNone(gdim.abstraction) + self.assertIsInstance(gdim.point, SpatialGeometryPointDimension) + self.assertIsInstance(gdim.polygon, SpatialGeometryPolygonDimension) + + gdim = self.get(abstraction='point') + self.assertEqual(gdim.abstraction, 'point') + self.assertIsNone(gdim.polygon) + self.assertIsInstance(gdim.point, SpatialGeometryPointDimension) + + gdim = self.get() + gdim2 = SpatialGeometryDimension(point=gdim.point) + self.assertIsNone(gdim2.abstraction) + + def test_abstraction(self): + gdim = self.get() + with self.assertRaises(ValueError): + gdim.abstraction = 'foo' + self.assertIsInstance(gdim.polygon, SpatialGeometryPolygonDimension) + gdim.abstraction = 'point' + self.assertIsNone(gdim.polygon) + + def test_polygon(self): + gdim = self.get() + self.assertIsNone(gdim.abstraction) + self.assertIsInstance(gdim.polygon, SpatialGeometryPolygonDimension) + + gdim = self.get() + gdim.abstraction = 'polygon' + self.assertIsInstance(gdim.polygon, SpatialGeometryPolygonDimension) + + gdim = self.get(with_bounds=False) + self.assertIsNone(gdim.grid.row.bounds) + self.assertIsNone(gdim.polygon) + + def test_get_highest_order_abstraction(self): + gdim = self.get() + self.assertIsNone(gdim.abstraction) + self.assertIsInstance(gdim.get_highest_order_abstraction(), SpatialGeometryPolygonDimension) + + gdim = self.get(abstraction='point') + self.assertIsInstance(gdim.get_highest_order_abstraction(), SpatialGeometryPointDimension) + + gdim = self.get() + gdim.point + gdim.grid = None + self.assertIsNone(gdim.polygon) + self.assertIsInstance(gdim.get_highest_order_abstraction(), SpatialGeometryPointDimension) + + gdim = self.get() + gdim2 = SpatialGeometryDimension(point=gdim.point, abstraction='polygon') + with self.assertRaises(ValueError): + gdim2.get_highest_order_abstraction() + +class TestSpatialGeometryPointDimension(AbstractTestSpatialDimension): def test_get_intersects_masked(self): sdim = self.get_sdim(crs=WGS84()) @@ -1076,12 +1146,27 @@ def test_get_intersects_masked(self): self.assertIsNotNone(sdim.grid) -class TestSpatialGeometryPolygonDimension(TestSpatialBase): +class TestSpatialGeometryPolygonDimension(AbstractTestSpatialDimension): + + def test_init(self): + with self.assertRaises(ValueError): + SpatialGeometryPolygonDimension() + + row = VectorDimension(value=[2, 3]) + col = VectorDimension(value=[4, 5]) + grid = SpatialGridDimension(row=row, col=col) + self.assertIsNone(grid.corners) + with self.assertRaises(ValueError): + SpatialGeometryPolygonDimension(grid=grid) + + value = grid.value + grid = SpatialGridDimension(value=value) + with self.assertRaises(ValueError): + SpatialGeometryPolygonDimension(grid=grid) def test_get_value(self): # the ordering of vertices when creating from corners is slightly different - keywords = dict(with_grid_row_col_bounds=[True, False], with_grid_mask=[True, False]) for k in itr_products_keywords(keywords, as_namedtuple=True): @@ -1102,7 +1187,7 @@ def test_get_value(self): self.assertGeometriesAlmostEquals(poly, actual) -class TestSpatialGridDimension(TestSpatialBase): +class TestSpatialGridDimension(AbstractTestSpatialDimension): def assertGridCorners(self, grid): """ @@ -1191,11 +1276,15 @@ def iter_grid_combinations_for_corners(self): yield sdim.grid + def test_init(self): + with self.assertRaises(ValueError): + SpatialGridDimension() + def test_corners(self): for grid in self.iter_grid_combinations_for_corners(): try: self.assertGridCorners(grid) - except CornersUnavailable: + except AssertionError: if grid.row is None or grid.row.bounds is None: continue else: diff --git a/src/ocgis/test/test_ocgis/test_regrid/test_base.py b/src/ocgis/test/test_ocgis/test_regrid/test_base.py index 212f868c9..c3209f85c 100644 --- a/src/ocgis/test/test_ocgis/test_regrid/test_base.py +++ b/src/ocgis/test/test_ocgis/test_regrid/test_base.py @@ -2,7 +2,7 @@ import ESMF from shapely.geometry import Polygon, MultiPolygon import ocgis -from ocgis.exc import CornersUnavailable, RegriddingError, ImproperPolygonBoundsError, CornersInconsistentError +from ocgis.exc import RegriddingError, CornersInconsistentError from ocgis.interface.base.crs import CoordinateReferenceSystem, WGS84, Spherical from ocgis.interface.base.dimension.spatial import SpatialGridDimension, SpatialDimension from ocgis.interface.base.field import Field @@ -159,8 +159,7 @@ def test_check_fields_for_regridding(self): ref.spatial.grid.row = None ref.spatial.grid.col = None ref.spatial.grid._corners = None - with self.assertRaises(CornersUnavailable): - ref.spatial.grid.corners + self.assertIsNone(ref.spatial.grid.corners) for with_corners in [True, False]: if with_corners: with self.assertRaises(CornersInconsistentError): @@ -174,8 +173,7 @@ def test_check_fields_for_regridding(self): ref.spatial.grid.row = None ref.spatial.grid.col = None ref.spatial.grid._corners = None - with self.assertRaises(CornersUnavailable): - ref.spatial.grid.corners + self.assertIsNone(ref.spatial.grid.corners) for with_corners in [True, False]: if with_corners: with self.assertRaises(CornersInconsistentError): @@ -298,10 +296,8 @@ def test_iter_regridded_field_with_corners(self): for regridded in iter_regridded_fields(sources, destination_field, with_corners=False): self.assertIsNone(regridded.spatial.grid.row.bounds) self.assertIsNone(regridded.spatial.grid.col.bounds) - with self.assertRaises(CornersUnavailable): - regridded.spatial.grid.corners - with self.assertRaises(ImproperPolygonBoundsError): - regridded.spatial.geom.polygon + self.assertIsNone(regridded.spatial.grid.corners) + self.assertIsNone(regridded.spatial.geom.polygon) # check that the destination grid is not modified self.assertIsNotNone(destination_field.spatial.grid.row.bounds) @@ -314,8 +310,7 @@ def test_iter_regridded_field_with_corners(self): dest.grid.col.bounds dest.grid.col.bounds = None dest.grid._corners = None - with self.assertRaises(CornersUnavailable): - dest.grid.corners + self.assertIsNone(dest.grid.corners) with self.assertRaises(CornersInconsistentError): list(iter_regridded_fields(sources, dest, with_corners=True)) # if this is now false, then there should be no problem as only centroids are used @@ -443,8 +438,7 @@ def test_get_sdim_from_esmf_grid(self): if k.has_corners: self.assertNumpyAll(sdim.grid.corners, nsdim.grid.corners) else: - with self.assertRaises(CornersUnavailable): - self.assertIsNone(nsdim.grid.corners) + self.assertIsNone(nsdim.grid.corners) def test_get_esmf_grid_from_sdim_with_mask(self): """Test with masked data.""" diff --git a/src/ocgis/test/test_ocgis/test_util/test_environment.py b/src/ocgis/test/test_ocgis/test_util/test_environment.py index 2256c3306..84c57e5c1 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_environment.py +++ b/src/ocgis/test/test_ocgis/test_util/test_environment.py @@ -43,6 +43,9 @@ def get_is_available(self,module_name): def test_conf_path(self): env.CONF_PATH + def test_default_coordsys(self): + env.DEFAULT_COORDSYS + def test_import_attributes(self): ## with both modules installed, these are expected to be true self.assertEqual(env.USE_CFUNITS,self.get_is_available('cfunits')) diff --git a/src/ocgis/test/test_ocgis/test_util/test_helpers.py b/src/ocgis/test/test_ocgis/test_util/test_helpers.py index 734863b73..2345b8ac6 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_helpers.py +++ b/src/ocgis/test/test_ocgis/test_util/test_helpers.py @@ -2,15 +2,202 @@ import os import numpy as np from shapely.geometry import Point +from ocgis.exc import SingleElementError, ShapeError +from ocgis.test.test_ocgis.test_interface.test_base.test_dimension.test_spatial import AbstractTestSpatialDimension from ocgis.util.helpers import format_bool, iter_array, validate_time_subset,\ get_formatted_slice, get_is_date_between, get_trimmed_array_by_mask,\ - get_added_slice, get_iter, get_ordered_dicts_from_records_array, get_sorted_uris_by_time_dimension + get_added_slice, get_iter, get_ordered_dicts_from_records_array, get_sorted_uris_by_time_dimension, \ + get_bounds_from_1d, get_date_list, get_bounds_vector_from_centroids, get_extrapolated_corners_esmf, get_is_increasing, \ + get_extrapolated_corners_esmf_vector import itertools from ocgis.test.base import TestBase -from datetime import datetime as dt +from datetime import datetime as dt, datetime -class Test(TestBase): +class Test1(AbstractTestSpatialDimension): + + def test_get_bounds_from_1d(self): + sdim = self.get_sdim(bounds=False) + test_sdim = self.get_sdim(bounds=True) + + row_bounds = get_bounds_from_1d(sdim.grid.row.value) + col_bounds = get_bounds_from_1d(sdim.grid.col.value) + + self.assertNumpyAll(row_bounds, test_sdim.grid.row.bounds) + self.assertNumpyAll(col_bounds, test_sdim.grid.col.bounds) + + across_180 = np.array([-180, -90, 0, 90, 180], dtype=float) + bounds_180 = get_bounds_from_1d(across_180) + self.assertEqual(bounds_180.tostring(), '\x00\x00\x00\x00\x00 l\xc0\x00\x00\x00\x00\x00\xe0`\xc0\x00\x00\x00\x00\x00\xe0`\xc0\x00\x00\x00\x00\x00\x80F\xc0\x00\x00\x00\x00\x00\x80F\xc0\x00\x00\x00\x00\x00\x80F@\x00\x00\x00\x00\x00\x80F@\x00\x00\x00\x00\x00\xe0`@\x00\x00\x00\x00\x00\xe0`@\x00\x00\x00\x00\x00 l@') + + dates = get_date_list(datetime(2000, 1, 31), datetime(2002, 12, 31), 1) + with self.assertRaises(NotImplementedError): + get_bounds_from_1d(np.array(dates)) + + with self.assertRaises(ValueError): + get_bounds_from_1d(np.array([0], dtype=float)) + + just_two = get_bounds_from_1d(np.array([50, 75], dtype=float)) + self.assertEqual(just_two.tostring(), '\x00\x00\x00\x00\x00\xc0B@\x00\x00\x00\x00\x00@O@\x00\x00\x00\x00\x00@O@\x00\x00\x00\x00\x00\xe0U@') + + just_two_reversed = get_bounds_from_1d(np.array([75, 50], dtype=float)) + self.assertEqual(just_two_reversed.tostring(), '\x00\x00\x00\x00\x00\xe0U@\x00\x00\x00\x00\x00@O@\x00\x00\x00\x00\x00@O@\x00\x00\x00\x00\x00\xc0B@') + + zero_origin = get_bounds_from_1d(np.array([0, 50, 100], dtype=float)) + self.assertEqual(zero_origin.tostring(), '\x00\x00\x00\x00\x00\x009\xc0\x00\x00\x00\x00\x00\x009@\x00\x00\x00\x00\x00\x009@\x00\x00\x00\x00\x00\xc0R@\x00\x00\x00\x00\x00\xc0R@\x00\x00\x00\x00\x00@_@') + + def test_get_is_increasing(self): + ret = get_is_increasing(np.array([1, 2, 3])) + self.assertTrue(ret) + + ret = get_is_increasing(np.array([3, 2, 1])) + self.assertFalse(ret) + + with self.assertRaises(SingleElementError): + get_is_increasing(np.array([1])) + + with self.assertRaises(ShapeError): + get_is_increasing(np.zeros((2, 2))) + + def test_get_extrapolated_corners_esmf(self): + dtype = np.float32 + + row_increasing = np.array([[1, 1.5, 2], + [2, 2.5, 3], + [3, 3.5, 4]], dtype=dtype) + corners = get_extrapolated_corners_esmf(row_increasing) + actual = np.array([[0.25, 0.75, 1.25, 1.75], + [1.25, 1.75, 2.25, 2.75], + [2.25, 2.75, 3.25, 3.75], + [3.25, 3.75, 4.25, 4.75]], dtype=dtype) + self.assertNumpyAll(corners, actual) + + row_decreasing = np.flipud(row_increasing) + corners = get_extrapolated_corners_esmf(row_decreasing) + actual = np.array([[3.25, 3.75, 4.25, 4.75], + [2.25, 2.75, 3.25, 3.75], + [1.25, 1.75, 2.25, 2.75], + [0.25, 0.75, 1.25, 1.75]], dtype=dtype) + self.assertNumpyAll(corners, actual) + + col_decreasing = np.fliplr(row_increasing) + corners = get_extrapolated_corners_esmf(col_decreasing) + actual = np.array([[1.75, 1.25, 0.75, 0.25], + [2.75, 2.25, 1.75, 1.25], + [3.75, 3.25, 2.75, 2.25], + [4.75, 4.25, 3.75, 3.25]], dtype=dtype) + self.assertNumpyAll(corners, actual) + + row_monotonic_increasing = np.array([[1, 1, 1], + [2, 2, 2], + [3, 3, 3]], dtype=dtype) + corners = get_extrapolated_corners_esmf(row_monotonic_increasing) + actual = np.array([[0.5, 0.5, 0.5, 0.5], + [1.5, 1.5, 1.5, 1.5], + [2.5, 2.5, 2.5, 2.5], + [3.5, 3.5, 3.5, 3.5]], dtype=dtype) + self.assertNumpyAll(corners, actual) + + row_monotonic_decreasing = np.flipud(row_monotonic_increasing) + corners = get_extrapolated_corners_esmf(row_monotonic_decreasing) + actual = np.array([[3.5, 3.5, 3.5, 3.5], + [2.5, 2.5, 2.5, 2.5], + [1.5, 1.5, 1.5, 1.5], + [0.5, 0.5, 0.5, 0.5]], dtype=dtype) + self.assertNumpyAll(corners, actual) + + row_negative = row_increasing*-1 + corners = get_extrapolated_corners_esmf(row_negative) + actual = np.array([[-0.25, -0.75, -1.25, -1.75], + [-1.25, -1.75, -2.25, -2.75], + [-2.25, -2.75, -3.25, -3.75], + [-3.25, -3.75, -4.25, -4.75]], dtype=dtype) + self.assertNumpyAll(corners, actual) + + two_by_two = np.array([[1, 1], + [2, 2]], dtype=dtype) + corners = get_extrapolated_corners_esmf(two_by_two) + actual = np.array([[0.5, 0.5, 0.5], + [1.5, 1.5, 1.5], + [2.5, 2.5, 2.5]], dtype=dtype) + self.assertNumpyAll(corners, actual) + + one_by_four = np.array([[1, 2, 3, 4]], dtype=dtype) + corners = get_extrapolated_corners_esmf(one_by_four) + actual = np.array([[0.5, 1.5, 2.5, 3.5, 4.5], + [0.5, 1.5, 2.5, 3.5, 4.5]], dtype=dtype) + self.assertNumpyAll(corners, actual) + + four_by_one = np.array([[1, 2, 3, 4]], dtype=dtype).reshape(-1, 1) + corners = get_extrapolated_corners_esmf(four_by_one) + actual = np.array([[0.5, 0.5], + [1.5, 1.5], + [2.5, 2.5], + [3.5, 3.5], + [4.5, 4.5]], dtype=dtype) + self.assertNumpyAll(corners, actual) + + four_by_one_reversed = np.flipud(four_by_one) + corners = get_extrapolated_corners_esmf(four_by_one_reversed) + actual = np.array([[4.5, 4.5], + [3.5, 3.5], + [2.5, 2.5], + [1.5, 1.5], + [0.5, 0.5]], dtype=dtype) + self.assertNumpyAll(corners, actual) + + with self.assertRaises(SingleElementError): + get_extrapolated_corners_esmf(np.array([[1]])) + + with self.assertRaises(SingleElementError): + get_extrapolated_corners_esmf(np.array([1])) + + def test_get_extrapolated_corners_esmf_vector(self): + vec = np.array([1, 2, 3], dtype=np.float32) + corners = get_extrapolated_corners_esmf_vector(vec) + actual = np.array([[0.5, 1.5, 2.5, 3.5], [0.5, 1.5, 2.5, 3.5]], dtype=np.float32) + self.assertNumpyAll(corners, actual) + + vec = np.array([3, 2, 1], dtype=float) + corners = get_extrapolated_corners_esmf_vector(vec) + actual = np.array([[3.5, 2.5, 1.5, 0.5], [3.5, 2.5, 1.5, 0.5]]) + self.assertNumpyAll(corners, actual) + + with self.assertRaises(ShapeError): + get_extrapolated_corners_esmf_vector(np.zeros((2, 2))) + + def test_get_bounds_vector_from_centroids(self): + # must have length greater than one to determine resolution. + centroids = np.array([1]) + with self.assertRaises(ValueError): + get_bounds_vector_from_centroids(centroids) + + centroids = np.array([1, 2], dtype=float) + ret = get_bounds_vector_from_centroids(centroids) + self.assertNumpyAll(ret, np.array([0.5, 1.5, 2.5])) + + centroids = np.array([2, 1], dtype=np.float32) + ret = get_bounds_vector_from_centroids(centroids) + self.assertNumpyAll(ret, np.array([2.5, 1.5, 0.5], dtype=np.float32)) + + centroids = np.array([-2, -1], dtype=float) + ret = get_bounds_vector_from_centroids(centroids) + self.assertNumpyAll(ret, np.array([-2.5, -1.5, -0.5])) + + centroids = np.array([-1, -2], dtype=float) + ret = get_bounds_vector_from_centroids(centroids) + self.assertNumpyAll(ret, np.array([-0.5, -1.5, -2.5])) + + centroids = np.array([-1, 2], dtype=float) + ret = get_bounds_vector_from_centroids(centroids) + self.assertNumpyAll(ret, np.array([-2.5, 0.5, 3.5])) + + centroids = np.array([2, -1], dtype=float) + ret = get_bounds_vector_from_centroids(centroids) + self.assertNumpyAll(ret, np.array([3.5, 0.5, -2.5])) + + +class Test2(TestBase): def test_get_sorted_uris_by_time_dimension(self): rd_2001 = self.test_data.get_rd('cancm4_tasmax_2001') diff --git a/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py b/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py index e46854e20..28ada52c9 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py +++ b/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py @@ -15,7 +15,7 @@ from ocgis.util.helpers import make_poly from ocgis.util.itester import itr_products_keywords from ocgis.util.spatial.spatial_subset import SpatialSubsetOperation -from ocgis import constants +from ocgis import constants, env class TestSpatialSubset(TestBase): @@ -180,16 +180,16 @@ def test_get_spatial_subset(self): for subset_sdim in self.get_subset_sdim(): for operation in ['intersects', 'clip', 'foo']: + use_subset_sdim = deepcopy(subset_sdim) + use_ss = deepcopy(ss) + # ctr += 1 # print ctr - # if ctr != 223: + # if ctr != 73: # continue # else: # import ipdb;ipdb.set_trace() - use_subset_sdim = deepcopy(subset_sdim) - use_ss = deepcopy(ss) - ctr += 1 try: ret = use_ss.get_spatial_subset(operation, use_subset_sdim, use_spatial_index=True, select_nearest=False, buffer_value=None, buffer_crs=None) @@ -242,10 +242,10 @@ def test_get_spatial_subset_output_crs(self): # test with an input rotated pole coordinate system rd = self.rd_rotated_pole - ss = SpatialSubsetOperation(rd, output_crs=constants.default_coordinate_system) + ss = SpatialSubsetOperation(rd, output_crs=env.DEFAULT_COORDSYS) subset_sdim = SpatialDimension.from_records([self.germany]) ret = ss.get_spatial_subset('intersects', subset_sdim) - self.assertEqual(ret.spatial.crs, constants.default_coordinate_system) + self.assertEqual(ret.spatial.crs, env.DEFAULT_COORDSYS) def test_get_spatial_subset_rotated_pole(self): """Test input has rotated pole with now output CRS.""" diff --git a/src/ocgis/test/test_simple/test_simple.py b/src/ocgis/test/test_simple/test_simple.py index a77a70c2b..518761163 100644 --- a/src/ocgis/test/test_simple/test_simple.py +++ b/src/ocgis/test/test_simple/test_simple.py @@ -16,7 +16,7 @@ from ocgis.test.base import TestBase from shapely.geometry.point import Point import ocgis -from ocgis.exc import ExtentError, DefinitionValidationError, ImproperPolygonBoundsError +from ocgis.exc import ExtentError, DefinitionValidationError from shapely.geometry.polygon import Polygon import csv import fiona @@ -392,7 +392,9 @@ def test_point_subset(self): ret = ops.execute() ref = ret[1]['foo'] self.assertEqual(ref.spatial.grid.shape,(1,1)) - self.assertTrue(ref.spatial.geom.polygon.value[0,0].intersects(ops.geom[0].geom.point.value[0, 0])) + # this is a point abstraction. polygons are not available. + self.assertIsNone(ref.spatial.geom.polygon) + # self.assertTrue(ref.spatial.geom.polygon.value[0,0].intersects(ops.geom[0].geom.point.value[0, 0])) def test_slicing(self): ops = self.get_ops(kwds={'slice':[None,None,0,[0,2],[0,2]]}) @@ -1263,8 +1265,7 @@ class TestSimpleNoSpatialBounds(TestSimpleBase): def test_interpolate_bounds(self): ret = self.get_ops(kwds={'interpolate_spatial_bounds':False}).execute() - with self.assertRaises(ImproperPolygonBoundsError): - ret[1]['foo'].spatial.geom.polygon + self.assertIsNone(ret[1]['foo'].spatial.geom.polygon) ret = self.get_ops(kwds={'interpolate_spatial_bounds':True}).execute() polygons = ret[1]['foo'].spatial.geom.polygon.value diff --git a/src/ocgis/util/environment.py b/src/ocgis/util/environment.py index fed21b7fe..df7b42623 100644 --- a/src/ocgis/util/environment.py +++ b/src/ocgis/util/environment.py @@ -21,6 +21,9 @@ def __init__(self): self.USE_SPATIAL_INDEX = EnvParmImport('USE_SPATIAL_INDEX',None,'rtree') self.USE_CFUNITS = EnvParmImport('USE_CFUNITS',None,'cfunits') self.CONF_PATH = EnvParm('CONF_PATH', os.path.expanduser('~/.config/ocgis.conf')) + + from ocgis.interface.base.crs import CFWGS84 + self.DEFAULT_COORDSYS = EnvParm('DEFAULT_COORDSYS', CFWGS84()) self.ops = None self._optimize_store = {} diff --git a/src/ocgis/util/helpers.py b/src/ocgis/util/helpers.py index 1bb1eb40c..c370b2391 100644 --- a/src/ocgis/util/helpers.py +++ b/src/ocgis/util/helpers.py @@ -15,6 +15,7 @@ import fiona from shapely.geometry.geo import mapping from fiona.crs import from_epsg +from ocgis.exc import SingleElementError, ShapeError from ocgis.util.logging_ocgis import ocgis_lh @@ -162,57 +163,181 @@ def get_trimmed_array_by_mask(arr,return_adjustments=False): return(ret) -def get_interpolated_bounds(centroids): - ''' - :param centroids: Vector representing center coordinates from which - to interpolate bounds. - :type centroids: np.ndarray +def get_is_increasing(vec): + """ + :param vec: A vector array. + :type vec: :class:`numpy.ndarray` + :returns: ``True`` if the array is increasing from index 0 to -1. ``False`` otherwise. + :rtype: bool + :raises: SingleElementError, ShapeError + """ + + if vec.shape == (1,): + raise SingleElementError('Increasing can only be determined with a minimum of two elements.') + if len(vec.shape) > 1: + msg = 'Only vectors allowed.' + raise ShapeError(msg) + + if vec[0] < vec[-1]: + ret = True + else: + ret = False + + return ret + + +def get_extrapolated_corners_esmf_vector(vec): + """ + :param vec: A vector. + :type vec: :class:`numpy.ndarray` + :returns: A two-dimensional corners array with dimension ``(2, vec.shape[0]+1)``. + :rtype: :class:`numpy.ndarray` + :raises: ShapeError + """ + + if len(vec.shape) > 1: + msg = 'A vector is required.' + raise ShapeError(msg) + + corners = np.zeros((2, vec.shape[0]+1), dtype=vec.dtype) + corners[:] = get_bounds_vector_from_centroids(vec) + + return corners + + +def get_extrapolated_corners_esmf(arr): + """ + :param arr: Array of centroids. + :type arr: :class:`numpy.ndarray` + :returns: A two-dimensional array of extrapolated corners with dimension ``(arr.shape[0]+1, arr.shape[1]+1)``. + :rtype: :class:`numpy.ndarray` + """ + + # if this is only a single element, we cannot make corners + if all([element == 1 for element in arr.shape]): + msg = 'At least two elements required to extrapolate corners.' + raise SingleElementError(msg) + + # if one of the dimensions has only a single element, the fill approach is different + if any([element == 1 for element in arr.shape]): + ret = get_extrapolated_corners_esmf_vector(arr.reshape(-1)) + if arr.shape[1] == 1: + ret = ret.swapaxes(0, 1) + return ret + + # the corners array has one additional row and column + corners = np.zeros((arr.shape[0]+1, arr.shape[1]+1), dtype=arr.dtype) + + # fill the interior of the array first with a 2x2 moving window. then do edges. + for ii in range(arr.shape[0]-1): + for jj in range(arr.shape[1]-1): + window_values = arr[ii:ii+2, jj:jj+2] + corners[ii+1, jj+1] = np.mean(window_values) + + # flag to determine if rows are increasing in value + row_increasing = get_is_increasing(arr[:, 0]) + # flag to determine if columns are increasing in value + col_increasing = get_is_increasing(arr[0, :]) + + # the absolute difference of row and column elements + row_diff = np.mean(np.abs(np.diff(arr[:, 0]))) + col_diff = np.mean(np.abs(np.diff(arr[0, :]))) + + # fill the rows accounting for increasing flag + for ii in range(1, corners.shape[0]-1): + if col_increasing: + corners[ii, 0] = corners[ii, 1] - col_diff + corners[ii, -1] = corners[ii, -2] + col_diff + else: + corners[ii, 0] = corners[ii, 1] + col_diff + corners[ii, -1] = corners[ii, -2] - col_diff + + # fill the columns accounting for increasing flag + for jj in range(1, corners.shape[1]-1): + if row_increasing: + corners[0, jj] = corners[1, jj] - row_diff + corners[-1, jj] = corners[-2, jj] + row_diff + else: + corners[0, jj] = corners[1, jj] + row_diff + corners[-1, jj] = corners[-2, jj] - row_diff + + # fill the extreme corners accounting for increasing flag + for row_idx in [0, -1]: + if col_increasing: + corners[row_idx, 0] = corners[row_idx, 1] - col_diff + corners[row_idx, -1] = corners[row_idx, -2] + col_diff + else: + corners[row_idx, 0] = corners[row_idx, 1] + col_diff + corners[row_idx, -1] = corners[row_idx, -2] - col_diff + + return corners + + +def get_bounds_vector_from_centroids(centroids): + """ + :param centroids: Vector representing center coordinates from which to interpolate bounds. + :type centroids: :class:`numpy.ndarray` + :returns: Vector representing upper and lower bounds for centroids with edges extrapolated. + :rtype: :class:`numpy.ndarray` with shape ``centroids.shape[0]+1`` :raises: NotImplementedError, ValueError - - >>> import numpy as np - >>> centroids = np.array([1,2,3]) - >>> get_interpolated_bounds(centroids) - np.array([[0, 1],[1, 2],[2, 3]]) - ''' - + """ + if len(centroids) < 2: - raise(ValueError('Centroid arrays must have length >= 2.')) - - ## will hold the mean midpoints between coordinate elements - mids = np.zeros(centroids.shape[0]-1,dtype=centroids.dtype) - ## this is essentially a two-element span moving average kernel + raise ValueError('Centroid arrays must have length >= 2.') + + # will hold the mean midpoints between coordinate elements + mids = np.zeros(centroids.shape[0] - 1, dtype=centroids.dtype) + # this is essentially a two-element span moving average kernel for ii in range(mids.shape[0]): try: - mids[ii] = np.mean(centroids[ii:ii+2]) - ## if the data type is datetime.datetime raise a more verbose error - ## message + mids[ii] = np.mean(centroids[ii:ii + 2]) + # if the data type is datetime.datetime raise a more verbose error message except TypeError: - if isinstance(centroids[ii],datetime.datetime): - raise(NotImplementedError('Bounds interpolation is not implemented for datetime.datetime objects.')) + if isinstance(centroids[ii], datetime.datetime): + raise NotImplementedError('Bounds interpolation is not implemented for datetime.datetime objects.') else: raise - ## account for edge effects by averaging the difference of the - ## midpoints. if there is only a single value, use the different of the - ## original values instead. + # account for edge effects by averaging the difference of the midpoints. if there is only a single value, use the + # different of the original values instead. if len(mids) == 1: diff = np.diff(centroids) else: diff = np.mean(np.diff(mids)) - ## appends for the edges shifting the nearest coordinate by the mean - ## difference - mids = np.append([mids[0]-diff],mids) - mids = np.append(mids,[mids[-1]+diff]) + # appends for the edges shifting the nearest coordinate by the mean difference + mids = np.append([mids[0] - diff], mids) + mids = np.append(mids, [mids[-1] + diff]) + + return mids + - ## loop to fill the bounds array - bounds = np.zeros((centroids.shape[0],2),dtype=centroids.dtype) +def get_bounds_from_1d(centroids): + """ + :param centroids: Vector representing center coordinates from which to interpolate bounds. + :type centroids: :class:`numpy.ndarray` + :returns: A *n*-by-2 array with *n* equal to the shape of ``centroids``. + + >>> import numpy as np + >>> centroids = np.array([1,2,3]) + >>> get_bounds_from_1d(centroids) + np.array([[0, 1],[1, 2],[2, 3]]) + + :rtype: :class:`numpy.ndarray` + :raises: NotImplementedError, ValueError + """ + + mids = get_bounds_vector_from_centroids(centroids) + + # loop to fill the bounds array + bounds = np.zeros((centroids.shape[0], 2), dtype=centroids.dtype) for ii in range(mids.shape[0]): try: - bounds[ii,0] = mids[ii] - bounds[ii,1] = mids[ii+1] + bounds[ii, 0] = mids[ii] + bounds[ii, 1] = mids[ii + 1] except IndexError: break - - return(bounds) + + return bounds + def get_is_date_between(lower,upper,month=None,year=None): if month is not None: @@ -349,6 +474,7 @@ def iter_arg(arg): for element in itr: yield(element) + def get_date_list(start,stop,days): ret = [] delta = datetime.timedelta(days=days) @@ -358,11 +484,13 @@ def get_date_list(start,stop,days): check += delta return(ret) + def bbox_poly(minx,miny,maxx, maxy): rtup = (miny,maxy) ctup = (minx,maxx) return(make_poly(rtup,ctup)) + def validate_time_subset(time_range,time_region): ''' Ensure `time_range` and `time_region` overlap. If one of the values is `None`, the From 8aabef8b0c6616d1c0640627fac9d95b053a12ae Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Wed, 5 Nov 2014 11:26:45 -0700 Subject: [PATCH 11/71] Removed two defunct exceptions. --- src/ocgis/exc.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/ocgis/exc.py b/src/ocgis/exc.py index 04be223ef..a583209f1 100644 --- a/src/ocgis/exc.py +++ b/src/ocgis/exc.py @@ -162,10 +162,6 @@ class SpatialWrappingError(OcgException): pass -class _ImproperPolygonBoundsError(OcgException): - pass - - class MaskedDataError(SubsetException): def __init__(self): self.message = 'Geometric intersection returned all masked values.' @@ -282,9 +278,3 @@ def __str__(self): message = 'Validation failed on the keyword parameter "{0}" with the message: {1}'.format(self.keyword, self.message) return message - - -class _CornersUnavailable(OcgException): - """Raised when grid corners may not be constructed.""" - - pass \ No newline at end of file From e73fb335c2f3d464b8238753cd6fe5906be75dc9 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Wed, 5 Nov 2014 13:51:46 -0700 Subject: [PATCH 12/71] doc changes --- fabfile/fabfile.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/fabfile/fabfile.py b/fabfile/fabfile.py index 6515d0ec6..06a7e0f5c 100644 --- a/fabfile/fabfile.py +++ b/fabfile/fabfile.py @@ -65,6 +65,10 @@ def list_storage(): @task def put_file(local_path, remote_path): + """ + Put a file on the remote server: local_path,remote_path + """ + put(local_path=local_path, remote_path=remote_path) From d245bd729efa084ea8a1c82b39e64e414e66a027 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 7 Nov 2014 11:00:05 -0700 Subject: [PATCH 13/71] Added a get_time_series method to TestBase. --- src/ocgis/test/base.py | 18 ++++++++++++++++++ src/ocgis/test/test_base.py | 13 +++++++++++-- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/src/ocgis/test/base.py b/src/ocgis/test/base.py index 4b2e477e6..a03b63eca 100644 --- a/src/ocgis/test/base.py +++ b/src/ocgis/test/base.py @@ -1,6 +1,7 @@ import unittest import abc import tempfile +import datetime from ocgis import env import shutil from copy import deepcopy, copy @@ -244,6 +245,23 @@ def get_temporary_output_directory(self): return tempfile.mkdtemp(prefix=self._prefix_path_test) + def get_time_series(self, start, end): + """ + :param start: The start date. + :type start: :class:`datetime.datetime` + :param end: The end date. + :type end: :class:`datetime.datetime` + :returns: A list of dates separated by a day. + :rtype: list of :class:`datetime.datetime` + """ + + delta = datetime.timedelta(days=1) + ret = [] + while start <= end: + ret.append(start) + start += delta + return ret + @staticmethod def get_tst_data(): """ diff --git a/src/ocgis/test/test_base.py b/src/ocgis/test/test_base.py index a8a216258..27e9c5fb2 100644 --- a/src/ocgis/test/test_base.py +++ b/src/ocgis/test/test_base.py @@ -1,3 +1,4 @@ +import datetime from ocgis.test.base import TestBase, TestData import ocgis from unittest.case import SkipTest @@ -27,8 +28,16 @@ def skip(*args): return ret -class Test(TestBase): - +class TestTestBase(TestBase): + + def test_get_time_series(self): + start = datetime.datetime(1900, 1, 1) + end = datetime.datetime(1902, 12, 31) + ret = self.get_time_series(start, end) + self.assertEqual(ret[0], start) + self.assertEqual(ret[-1], end) + self.assertEqual(ret[1]-ret[0], datetime.timedelta(days=1)) + def test_assertNumpyAll_bad_mask(self): arr = np.ma.array([1,2,3],mask=[True,False,True]) arr2 = np.ma.array([1,2,3],mask=[False,True,False]) From d88fd08d8cd47af7979d94f0f88bcc2bed9252aa Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 7 Nov 2014 11:41:01 -0700 Subject: [PATCH 14/71] minor --- .../test_base/test_dimension/test_temporal.py | 204 +++++++++--------- 1 file changed, 104 insertions(+), 100 deletions(-) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py index a75fed33a..4263468de 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py @@ -30,29 +30,12 @@ def get_temporal_dimension(self,add_bounds=True,start=None,stop=None,days=1): td = TemporalDimension(value=dates,bounds=bounds) return(td) - def test_get_grouping_other(self): - tdim = self.get_temporal_dimension() - grouping = [[12, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11], 'year'] - new_bounds, date_parts, repr_dt, dgroups = tdim._get_grouping_other_(grouping) - - actual_repr_dt = np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\x0c\x85cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tb\x89]q\x06(cdatetime\ndatetime\nq\x07U\n\x07k\x01\x10\x00\x00\x00\x00\x00\x00\x85Rq\x08h\x07U\n\x07k\x04\x10\x00\x00\x00\x00\x00\x00\x85Rq\th\x07U\n\x07k\x07\x10\x00\x00\x00\x00\x00\x00\x85Rq\nh\x07U\n\x07k\n\x10\x00\x00\x00\x00\x00\x00\x85Rq\x0bh\x07U\n\x07l\x01\x10\x00\x00\x00\x00\x00\x00\x85Rq\x0ch\x07U\n\x07l\x04\x10\x00\x00\x00\x00\x00\x00\x85Rq\rh\x07U\n\x07l\x07\x10\x00\x00\x00\x00\x00\x00\x85Rq\x0eh\x07U\n\x07l\n\x10\x00\x00\x00\x00\x00\x00\x85Rq\x0fh\x07U\n\x07m\x01\x10\x00\x00\x00\x00\x00\x00\x85Rq\x10h\x07U\n\x07m\x04\x10\x00\x00\x00\x00\x00\x00\x85Rq\x11h\x07U\n\x07m\x07\x10\x00\x00\x00\x00\x00\x00\x85Rq\x12h\x07U\n\x07m\n\x10\x00\x00\x00\x00\x00\x00\x85Rq\x13etb.') - self.assertNumpyAll(repr_dt, actual_repr_dt) - - actual_new_bounds = np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\x0cK\x02\x86cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tb\x89]q\x06(cdatetime\ndatetime\nq\x07U\n\x07k\x01\x01\x00\x00\x00\x00\x00\x00\x85Rq\x08h\x07U\n\x07l\x01\x01\x00\x00\x00\x00\x00\x00\x85Rq\th\x07U\n\x07k\x03\x01\x00\x00\x00\x00\x00\x00\x85Rq\nh\x07U\n\x07k\x06\x01\x00\x00\x00\x00\x00\x00\x85Rq\x0bh\x07U\n\x07k\x06\x01\x00\x00\x00\x00\x00\x00\x85Rq\x0ch\x07U\n\x07k\t\x01\x00\x00\x00\x00\x00\x00\x85Rq\rh\x07U\n\x07k\t\x01\x00\x00\x00\x00\x00\x00\x85Rq\x0eh\x07U\n\x07k\x0c\x01\x00\x00\x00\x00\x00\x00\x85Rq\x0fh\x07U\n\x07l\x01\x01\x00\x00\x00\x00\x00\x00\x85Rq\x10h\x07U\n\x07m\x01\x01\x00\x00\x00\x00\x00\x00\x85Rq\x11h\x07U\n\x07l\x03\x01\x00\x00\x00\x00\x00\x00\x85Rq\x12h\x07U\n\x07l\x06\x01\x00\x00\x00\x00\x00\x00\x85Rq\x13h\x07U\n\x07l\x06\x01\x00\x00\x00\x00\x00\x00\x85Rq\x14h\x07U\n\x07l\t\x01\x00\x00\x00\x00\x00\x00\x85Rq\x15h\x07U\n\x07l\t\x01\x00\x00\x00\x00\x00\x00\x85Rq\x16h\x07U\n\x07l\x0c\x01\x00\x00\x00\x00\x00\x00\x85Rq\x17h\x07U\n\x07m\x01\x01\x00\x00\x00\x00\x00\x00\x85Rq\x18h\x07U\n\x07n\x01\x01\x00\x00\x00\x00\x00\x00\x85Rq\x19h\x07U\n\x07m\x03\x01\x00\x00\x00\x00\x00\x00\x85Rq\x1ah\x07U\n\x07m\x06\x01\x00\x00\x00\x00\x00\x00\x85Rq\x1bh\x07U\n\x07m\x06\x01\x00\x00\x00\x00\x00\x00\x85Rq\x1ch\x07U\n\x07m\t\x01\x00\x00\x00\x00\x00\x00\x85Rq\x1dh\x07U\n\x07m\t\x01\x00\x00\x00\x00\x00\x00\x85Rq\x1eh\x07U\n\x07m\x0c\x01\x00\x00\x00\x00\x00\x00\x85Rq\x1fetb.') - self.assertNumpyAll(new_bounds, actual_new_bounds) - - actual_dgroups = np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01MG\x04\x85cnumpy\ndtype\nq\x04U\x02b1K\x00K\x01\x87Rq\x05(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK\x00tb\x89TG\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00tb.') - self.assertNumpyAll(actual_dgroups, dgroups[4]) - - actual_date_parts = np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\x0c\x85cnumpy\ndtype\nq\x04U\x03V16K\x00K\x01\x87Rq\x05(K\x03U\x01|NU\x06monthsq\x06U\x04yearq\x07\x86q\x08}q\t(h\x06h\x04U\x02O8K\x00K\x01\x87Rq\n(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tbK\x00\x86h\x07h\x04U\x02i8K\x00K\x01\x87Rq\x0b(K\x03U\x01h\x07U\n\x07\xdc\x07\x1a\x00\x00\x00\x00\x00\x00\x85Rq?h\x07U\n\x07\xdc\x07\x1b\x00\x00\x00\x00\x00\x00\x85Rq@h\x07U\n\x07\xdc\x07\x1c\x00\x00\x00\x00\x00\x00\x85RqAh\x07U\n\x07\xdc\x07\x1d\x00\x00\x00\x00\x00\x00\x85RqBh\x07U\n\x07\xdc\x07\x1e\x00\x00\x00\x00\x00\x00\x85RqCh\x07U\n\x07\xdc\x07\x1f\x00\x00\x00\x00\x00\x00\x85RqDh\x07U\n\x07\xdc\x08\x01\x00\x00\x00\x00\x00\x00\x85RqEh\x07U\n\x07\xdc\x08\x02\x00\x00\x00\x00\x00\x00\x85RqFh\x07U\n\x07\xdc\x08\x03\x00\x00\x00\x00\x00\x00\x85RqGh\x07U\n\x07\xdc\x08\x04\x00\x00\x00\x00\x00\x00\x85RqHh\x07U\n\x07\xdc\x08\x05\x00\x00\x00\x00\x00\x00\x85RqIh\x07U\n\x07\xdc\x08\x06\x00\x00\x00\x00\x00\x00\x85RqJh\x07U\n\x07\xdc\x08\x07\x00\x00\x00\x00\x00\x00\x85RqKh\x07U\n\x07\xdc\x08\x08\x00\x00\x00\x00\x00\x00\x85RqLh\x07U\n\x07\xdc\x08\t\x00\x00\x00\x00\x00\x00\x85RqMh\x07U\n\x07\xdc\x08\n\x00\x00\x00\x00\x00\x00\x85RqNh\x07U\n\x07\xdc\x08\x0b\x00\x00\x00\x00\x00\x00\x85RqOh\x07U\n\x07\xdc\x08\x0c\x00\x00\x00\x00\x00\x00\x85RqPh\x07U\n\x07\xdc\x08\r\x00\x00\x00\x00\x00\x00\x85RqQh\x07U\n\x07\xdc\x08\x0e\x00\x00\x00\x00\x00\x00\x85RqRh\x07U\n\x07\xdc\x08\x0f\x00\x00\x00\x00\x00\x00\x85RqSh\x07U\n\x07\xdc\x08\x10\x00\x00\x00\x00\x00\x00\x85RqTh\x07U\n\x07\xdc\x08\x11\x00\x00\x00\x00\x00\x00\x85RqUh\x07U\n\x07\xdc\x08\x12\x00\x00\x00\x00\x00\x00\x85RqVh\x07U\n\x07\xdc\x08\x13\x00\x00\x00\x00\x00\x00\x85RqWh\x07U\n\x07\xdc\x08\x14\x00\x00\x00\x00\x00\x00\x85RqXh\x07U\n\x07\xdc\x08\x15\x00\x00\x00\x00\x00\x00\x85RqYh\x07U\n\x07\xdc\x08\x16\x00\x00\x00\x00\x00\x00\x85RqZh\x07U\n\x07\xdc\x08\x17\x00\x00\x00\x00\x00\x00\x85Rq[h\x07U\n\x07\xdc\x08\x18\x00\x00\x00\x00\x00\x00\x85Rq\\h\x07U\n\x07\xdc\x08\x19\x00\x00\x00\x00\x00\x00\x85Rq]h\x07U\n\x07\xdc\x08\x1a\x00\x00\x00\x00\x00\x00\x85Rq^h\x07U\n\x07\xdc\x08\x1b\x00\x00\x00\x00\x00\x00\x85Rq_h\x07U\n\x07\xdc\x08\x1c\x00\x00\x00\x00\x00\x00\x85Rq`h\x07U\n\x07\xdc\x08\x1d\x00\x00\x00\x00\x00\x00\x85Rqah\x07U\n\x07\xdc\x08\x1e\x00\x00\x00\x00\x00\x00\x85Rqbh\x07U\n\x07\xdc\x08\x1f\x00\x00\x00\x00\x00\x00\x85Rqcetb.') - sub0 = td.value[tg.dgroups[0]] - self.assertNumpyAll(sub0,actual) - - # '[datetime.datetime(2013, 6, 1, 0, 0) datetime.datetime(2013, 6, 2, 0, 0)\n datetime.datetime(2013, 6, 3, 0, 0) datetime.datetime(2013, 6, 4, 0, 0)\n datetime.datetime(2013, 6, 5, 0, 0) datetime.datetime(2013, 6, 6, 0, 0)\n datetime.datetime(2013, 6, 7, 0, 0) datetime.datetime(2013, 6, 8, 0, 0)\n datetime.datetime(2013, 6, 9, 0, 0) datetime.datetime(2013, 6, 10, 0, 0)\n datetime.datetime(2013, 6, 11, 0, 0) datetime.datetime(2013, 6, 12, 0, 0)\n datetime.datetime(2013, 6, 13, 0, 0) datetime.datetime(2013, 6, 14, 0, 0)\n datetime.datetime(2013, 6, 15, 0, 0) datetime.datetime(2013, 6, 16, 0, 0)\n datetime.datetime(2013, 6, 17, 0, 0) datetime.datetime(2013, 6, 18, 0, 0)\n datetime.datetime(2013, 6, 19, 0, 0) datetime.datetime(2013, 6, 20, 0, 0)\n datetime.datetime(2013, 6, 21, 0, 0) datetime.datetime(2013, 6, 22, 0, 0)\n datetime.datetime(2013, 6, 23, 0, 0) datetime.datetime(2013, 6, 24, 0, 0)\n datetime.datetime(2013, 6, 25, 0, 0) datetime.datetime(2013, 6, 26, 0, 0)\n datetime.datetime(2013, 6, 27, 0, 0) datetime.datetime(2013, 6, 28, 0, 0)\n datetime.datetime(2013, 6, 29, 0, 0) datetime.datetime(2013, 6, 30, 0, 0)\n datetime.datetime(2013, 7, 1, 0, 0) datetime.datetime(2013, 7, 2, 0, 0)\n datetime.datetime(2013, 7, 3, 0, 0) datetime.datetime(2013, 7, 4, 0, 0)\n datetime.datetime(2013, 7, 5, 0, 0) datetime.datetime(2013, 7, 6, 0, 0)\n datetime.datetime(2013, 7, 7, 0, 0) datetime.datetime(2013, 7, 8, 0, 0)\n datetime.datetime(2013, 7, 9, 0, 0) datetime.datetime(2013, 7, 10, 0, 0)\n datetime.datetime(2013, 7, 11, 0, 0) datetime.datetime(2013, 7, 12, 0, 0)\n datetime.datetime(2013, 7, 13, 0, 0) datetime.datetime(2013, 7, 14, 0, 0)\n datetime.datetime(2013, 7, 15, 0, 0) datetime.datetime(2013, 7, 16, 0, 0)\n datetime.datetime(2013, 7, 17, 0, 0) datetime.datetime(2013, 7, 18, 0, 0)\n datetime.datetime(2013, 7, 19, 0, 0) datetime.datetime(2013, 7, 20, 0, 0)\n datetime.datetime(2013, 7, 21, 0, 0) datetime.datetime(2013, 7, 22, 0, 0)\n datetime.datetime(2013, 7, 23, 0, 0) datetime.datetime(2013, 7, 24, 0, 0)\n datetime.datetime(2013, 7, 25, 0, 0) datetime.datetime(2013, 7, 26, 0, 0)\n datetime.datetime(2013, 7, 27, 0, 0) datetime.datetime(2013, 7, 28, 0, 0)\n datetime.datetime(2013, 7, 29, 0, 0) datetime.datetime(2013, 7, 30, 0, 0)\n datetime.datetime(2013, 7, 31, 0, 0) datetime.datetime(2013, 8, 1, 0, 0)\n datetime.datetime(2013, 8, 2, 0, 0) datetime.datetime(2013, 8, 3, 0, 0)\n datetime.datetime(2013, 8, 4, 0, 0) datetime.datetime(2013, 8, 5, 0, 0)\n datetime.datetime(2013, 8, 6, 0, 0) datetime.datetime(2013, 8, 7, 0, 0)\n datetime.datetime(2013, 8, 8, 0, 0) datetime.datetime(2013, 8, 9, 0, 0)\n datetime.datetime(2013, 8, 10, 0, 0) datetime.datetime(2013, 8, 11, 0, 0)\n datetime.datetime(2013, 8, 12, 0, 0) datetime.datetime(2013, 8, 13, 0, 0)\n datetime.datetime(2013, 8, 14, 0, 0) datetime.datetime(2013, 8, 15, 0, 0)\n datetime.datetime(2013, 8, 16, 0, 0) datetime.datetime(2013, 8, 17, 0, 0)\n datetime.datetime(2013, 8, 18, 0, 0) datetime.datetime(2013, 8, 19, 0, 0)\n datetime.datetime(2013, 8, 20, 0, 0) datetime.datetime(2013, 8, 21, 0, 0)\n datetime.datetime(2013, 8, 22, 0, 0) datetime.datetime(2013, 8, 23, 0, 0)\n datetime.datetime(2013, 8, 24, 0, 0) datetime.datetime(2013, 8, 25, 0, 0)\n datetime.datetime(2013, 8, 26, 0, 0) datetime.datetime(2013, 8, 27, 0, 0)\n datetime.datetime(2013, 8, 28, 0, 0) datetime.datetime(2013, 8, 29, 0, 0)\n datetime.datetime(2013, 8, 30, 0, 0) datetime.datetime(2013, 8, 31, 0, 0)]' - actual = np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\\\x85cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tb\x89]q\x06(cdatetime\ndatetime\nq\x07U\n\x07\xdd\x06\x01\x00\x00\x00\x00\x00\x00\x85Rq\x08h\x07U\n\x07\xdd\x06\x02\x00\x00\x00\x00\x00\x00\x85Rq\th\x07U\n\x07\xdd\x06\x03\x00\x00\x00\x00\x00\x00\x85Rq\nh\x07U\n\x07\xdd\x06\x04\x00\x00\x00\x00\x00\x00\x85Rq\x0bh\x07U\n\x07\xdd\x06\x05\x00\x00\x00\x00\x00\x00\x85Rq\x0ch\x07U\n\x07\xdd\x06\x06\x00\x00\x00\x00\x00\x00\x85Rq\rh\x07U\n\x07\xdd\x06\x07\x00\x00\x00\x00\x00\x00\x85Rq\x0eh\x07U\n\x07\xdd\x06\x08\x00\x00\x00\x00\x00\x00\x85Rq\x0fh\x07U\n\x07\xdd\x06\t\x00\x00\x00\x00\x00\x00\x85Rq\x10h\x07U\n\x07\xdd\x06\n\x00\x00\x00\x00\x00\x00\x85Rq\x11h\x07U\n\x07\xdd\x06\x0b\x00\x00\x00\x00\x00\x00\x85Rq\x12h\x07U\n\x07\xdd\x06\x0c\x00\x00\x00\x00\x00\x00\x85Rq\x13h\x07U\n\x07\xdd\x06\r\x00\x00\x00\x00\x00\x00\x85Rq\x14h\x07U\n\x07\xdd\x06\x0e\x00\x00\x00\x00\x00\x00\x85Rq\x15h\x07U\n\x07\xdd\x06\x0f\x00\x00\x00\x00\x00\x00\x85Rq\x16h\x07U\n\x07\xdd\x06\x10\x00\x00\x00\x00\x00\x00\x85Rq\x17h\x07U\n\x07\xdd\x06\x11\x00\x00\x00\x00\x00\x00\x85Rq\x18h\x07U\n\x07\xdd\x06\x12\x00\x00\x00\x00\x00\x00\x85Rq\x19h\x07U\n\x07\xdd\x06\x13\x00\x00\x00\x00\x00\x00\x85Rq\x1ah\x07U\n\x07\xdd\x06\x14\x00\x00\x00\x00\x00\x00\x85Rq\x1bh\x07U\n\x07\xdd\x06\x15\x00\x00\x00\x00\x00\x00\x85Rq\x1ch\x07U\n\x07\xdd\x06\x16\x00\x00\x00\x00\x00\x00\x85Rq\x1dh\x07U\n\x07\xdd\x06\x17\x00\x00\x00\x00\x00\x00\x85Rq\x1eh\x07U\n\x07\xdd\x06\x18\x00\x00\x00\x00\x00\x00\x85Rq\x1fh\x07U\n\x07\xdd\x06\x19\x00\x00\x00\x00\x00\x00\x85Rq h\x07U\n\x07\xdd\x06\x1a\x00\x00\x00\x00\x00\x00\x85Rq!h\x07U\n\x07\xdd\x06\x1b\x00\x00\x00\x00\x00\x00\x85Rq"h\x07U\n\x07\xdd\x06\x1c\x00\x00\x00\x00\x00\x00\x85Rq#h\x07U\n\x07\xdd\x06\x1d\x00\x00\x00\x00\x00\x00\x85Rq$h\x07U\n\x07\xdd\x06\x1e\x00\x00\x00\x00\x00\x00\x85Rq%h\x07U\n\x07\xdd\x07\x01\x00\x00\x00\x00\x00\x00\x85Rq&h\x07U\n\x07\xdd\x07\x02\x00\x00\x00\x00\x00\x00\x85Rq\'h\x07U\n\x07\xdd\x07\x03\x00\x00\x00\x00\x00\x00\x85Rq(h\x07U\n\x07\xdd\x07\x04\x00\x00\x00\x00\x00\x00\x85Rq)h\x07U\n\x07\xdd\x07\x05\x00\x00\x00\x00\x00\x00\x85Rq*h\x07U\n\x07\xdd\x07\x06\x00\x00\x00\x00\x00\x00\x85Rq+h\x07U\n\x07\xdd\x07\x07\x00\x00\x00\x00\x00\x00\x85Rq,h\x07U\n\x07\xdd\x07\x08\x00\x00\x00\x00\x00\x00\x85Rq-h\x07U\n\x07\xdd\x07\t\x00\x00\x00\x00\x00\x00\x85Rq.h\x07U\n\x07\xdd\x07\n\x00\x00\x00\x00\x00\x00\x85Rq/h\x07U\n\x07\xdd\x07\x0b\x00\x00\x00\x00\x00\x00\x85Rq0h\x07U\n\x07\xdd\x07\x0c\x00\x00\x00\x00\x00\x00\x85Rq1h\x07U\n\x07\xdd\x07\r\x00\x00\x00\x00\x00\x00\x85Rq2h\x07U\n\x07\xdd\x07\x0e\x00\x00\x00\x00\x00\x00\x85Rq3h\x07U\n\x07\xdd\x07\x0f\x00\x00\x00\x00\x00\x00\x85Rq4h\x07U\n\x07\xdd\x07\x10\x00\x00\x00\x00\x00\x00\x85Rq5h\x07U\n\x07\xdd\x07\x11\x00\x00\x00\x00\x00\x00\x85Rq6h\x07U\n\x07\xdd\x07\x12\x00\x00\x00\x00\x00\x00\x85Rq7h\x07U\n\x07\xdd\x07\x13\x00\x00\x00\x00\x00\x00\x85Rq8h\x07U\n\x07\xdd\x07\x14\x00\x00\x00\x00\x00\x00\x85Rq9h\x07U\n\x07\xdd\x07\x15\x00\x00\x00\x00\x00\x00\x85Rq:h\x07U\n\x07\xdd\x07\x16\x00\x00\x00\x00\x00\x00\x85Rq;h\x07U\n\x07\xdd\x07\x17\x00\x00\x00\x00\x00\x00\x85Rqh\x07U\n\x07\xdd\x07\x1a\x00\x00\x00\x00\x00\x00\x85Rq?h\x07U\n\x07\xdd\x07\x1b\x00\x00\x00\x00\x00\x00\x85Rq@h\x07U\n\x07\xdd\x07\x1c\x00\x00\x00\x00\x00\x00\x85RqAh\x07U\n\x07\xdd\x07\x1d\x00\x00\x00\x00\x00\x00\x85RqBh\x07U\n\x07\xdd\x07\x1e\x00\x00\x00\x00\x00\x00\x85RqCh\x07U\n\x07\xdd\x07\x1f\x00\x00\x00\x00\x00\x00\x85RqDh\x07U\n\x07\xdd\x08\x01\x00\x00\x00\x00\x00\x00\x85RqEh\x07U\n\x07\xdd\x08\x02\x00\x00\x00\x00\x00\x00\x85RqFh\x07U\n\x07\xdd\x08\x03\x00\x00\x00\x00\x00\x00\x85RqGh\x07U\n\x07\xdd\x08\x04\x00\x00\x00\x00\x00\x00\x85RqHh\x07U\n\x07\xdd\x08\x05\x00\x00\x00\x00\x00\x00\x85RqIh\x07U\n\x07\xdd\x08\x06\x00\x00\x00\x00\x00\x00\x85RqJh\x07U\n\x07\xdd\x08\x07\x00\x00\x00\x00\x00\x00\x85RqKh\x07U\n\x07\xdd\x08\x08\x00\x00\x00\x00\x00\x00\x85RqLh\x07U\n\x07\xdd\x08\t\x00\x00\x00\x00\x00\x00\x85RqMh\x07U\n\x07\xdd\x08\n\x00\x00\x00\x00\x00\x00\x85RqNh\x07U\n\x07\xdd\x08\x0b\x00\x00\x00\x00\x00\x00\x85RqOh\x07U\n\x07\xdd\x08\x0c\x00\x00\x00\x00\x00\x00\x85RqPh\x07U\n\x07\xdd\x08\r\x00\x00\x00\x00\x00\x00\x85RqQh\x07U\n\x07\xdd\x08\x0e\x00\x00\x00\x00\x00\x00\x85RqRh\x07U\n\x07\xdd\x08\x0f\x00\x00\x00\x00\x00\x00\x85RqSh\x07U\n\x07\xdd\x08\x10\x00\x00\x00\x00\x00\x00\x85RqTh\x07U\n\x07\xdd\x08\x11\x00\x00\x00\x00\x00\x00\x85RqUh\x07U\n\x07\xdd\x08\x12\x00\x00\x00\x00\x00\x00\x85RqVh\x07U\n\x07\xdd\x08\x13\x00\x00\x00\x00\x00\x00\x85RqWh\x07U\n\x07\xdd\x08\x14\x00\x00\x00\x00\x00\x00\x85RqXh\x07U\n\x07\xdd\x08\x15\x00\x00\x00\x00\x00\x00\x85RqYh\x07U\n\x07\xdd\x08\x16\x00\x00\x00\x00\x00\x00\x85RqZh\x07U\n\x07\xdd\x08\x17\x00\x00\x00\x00\x00\x00\x85Rq[h\x07U\n\x07\xdd\x08\x18\x00\x00\x00\x00\x00\x00\x85Rq\\h\x07U\n\x07\xdd\x08\x19\x00\x00\x00\x00\x00\x00\x85Rq]h\x07U\n\x07\xdd\x08\x1a\x00\x00\x00\x00\x00\x00\x85Rq^h\x07U\n\x07\xdd\x08\x1b\x00\x00\x00\x00\x00\x00\x85Rq_h\x07U\n\x07\xdd\x08\x1c\x00\x00\x00\x00\x00\x00\x85Rq`h\x07U\n\x07\xdd\x08\x1d\x00\x00\x00\x00\x00\x00\x85Rqah\x07U\n\x07\xdd\x08\x1e\x00\x00\x00\x00\x00\x00\x85Rqbh\x07U\n\x07\xdd\x08\x1f\x00\x00\x00\x00\x00\x00\x85Rqcetb.') - sub1 = td.value[tg.dgroups[1]] - self.assertNumpyAll(sub1,actual) - - ## test crossing year boundary - for calc_grouping in [[[12,1,2],'year'],['year',[12,1,2]]]: - tg = td.get_grouping(calc_grouping) + def test_get_grouping_seasonal_unique_flag(self): + """Test the unique flag for seasonal groups.""" - # '[datetime.datetime(2012, 1, 16, 0, 0) datetime.datetime(2013, 1, 16, 0, 0)]' - self.assertNumpyAll(tg.value,np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\x02\x85cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tb\x89]q\x06(cdatetime\ndatetime\nq\x07U\n\x07\xdc\x01\x10\x00\x00\x00\x00\x00\x00\x85Rq\x08h\x07U\n\x07\xdd\x01\x10\x00\x00\x00\x00\x00\x00\x85Rq\tetb.')) - - # '[[datetime.datetime(2012, 1, 1, 0, 0) datetime.datetime(2012, 12, 31, 0, 0)]\n [datetime.datetime(2013, 1, 1, 0, 0) datetime.datetime(2013, 12, 31, 0, 0)]]' - self.assertNumpyAll(tg.bounds,np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\x02K\x02\x86cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tb\x89]q\x06(cdatetime\ndatetime\nq\x07U\n\x07\xdc\x01\x01\x00\x00\x00\x00\x00\x00\x85Rq\x08h\x07U\n\x07\xdc\x0c\x1f\x00\x00\x00\x00\x00\x00\x85Rq\th\x07U\n\x07\xdd\x01\x01\x00\x00\x00\x00\x00\x00\x85Rq\nh\x07U\n\x07\xdd\x0c\x1f\x00\x00\x00\x00\x00\x00\x85Rq\x0betb.')) - - # '[datetime.datetime(2013, 1, 1, 0, 0) datetime.datetime(2013, 1, 2, 0, 0)\n datetime.datetime(2013, 1, 3, 0, 0) datetime.datetime(2013, 1, 4, 0, 0)\n datetime.datetime(2013, 1, 5, 0, 0) datetime.datetime(2013, 1, 6, 0, 0)\n datetime.datetime(2013, 1, 7, 0, 0) datetime.datetime(2013, 1, 8, 0, 0)\n datetime.datetime(2013, 1, 9, 0, 0) datetime.datetime(2013, 1, 10, 0, 0)\n datetime.datetime(2013, 1, 11, 0, 0) datetime.datetime(2013, 1, 12, 0, 0)\n datetime.datetime(2013, 1, 13, 0, 0) datetime.datetime(2013, 1, 14, 0, 0)\n datetime.datetime(2013, 1, 15, 0, 0) datetime.datetime(2013, 1, 16, 0, 0)\n datetime.datetime(2013, 1, 17, 0, 0) datetime.datetime(2013, 1, 18, 0, 0)\n datetime.datetime(2013, 1, 19, 0, 0) datetime.datetime(2013, 1, 20, 0, 0)\n datetime.datetime(2013, 1, 21, 0, 0) datetime.datetime(2013, 1, 22, 0, 0)\n datetime.datetime(2013, 1, 23, 0, 0) datetime.datetime(2013, 1, 24, 0, 0)\n datetime.datetime(2013, 1, 25, 0, 0) datetime.datetime(2013, 1, 26, 0, 0)\n datetime.datetime(2013, 1, 27, 0, 0) datetime.datetime(2013, 1, 28, 0, 0)\n datetime.datetime(2013, 1, 29, 0, 0) datetime.datetime(2013, 1, 30, 0, 0)\n datetime.datetime(2013, 1, 31, 0, 0) datetime.datetime(2013, 2, 1, 0, 0)\n datetime.datetime(2013, 2, 2, 0, 0) datetime.datetime(2013, 2, 3, 0, 0)\n datetime.datetime(2013, 2, 4, 0, 0) datetime.datetime(2013, 2, 5, 0, 0)\n datetime.datetime(2013, 2, 6, 0, 0) datetime.datetime(2013, 2, 7, 0, 0)\n datetime.datetime(2013, 2, 8, 0, 0) datetime.datetime(2013, 2, 9, 0, 0)\n datetime.datetime(2013, 2, 10, 0, 0) datetime.datetime(2013, 2, 11, 0, 0)\n datetime.datetime(2013, 2, 12, 0, 0) datetime.datetime(2013, 2, 13, 0, 0)\n datetime.datetime(2013, 2, 14, 0, 0) datetime.datetime(2013, 2, 15, 0, 0)\n datetime.datetime(2013, 2, 16, 0, 0) datetime.datetime(2013, 2, 17, 0, 0)\n datetime.datetime(2013, 2, 18, 0, 0) datetime.datetime(2013, 2, 19, 0, 0)\n datetime.datetime(2013, 2, 20, 0, 0) datetime.datetime(2013, 2, 21, 0, 0)\n datetime.datetime(2013, 2, 22, 0, 0) datetime.datetime(2013, 2, 23, 0, 0)\n datetime.datetime(2013, 2, 24, 0, 0) datetime.datetime(2013, 2, 25, 0, 0)\n datetime.datetime(2013, 2, 26, 0, 0) datetime.datetime(2013, 2, 27, 0, 0)\n datetime.datetime(2013, 2, 28, 0, 0) datetime.datetime(2013, 12, 1, 0, 0)\n datetime.datetime(2013, 12, 2, 0, 0) datetime.datetime(2013, 12, 3, 0, 0)\n datetime.datetime(2013, 12, 4, 0, 0) datetime.datetime(2013, 12, 5, 0, 0)\n datetime.datetime(2013, 12, 6, 0, 0) datetime.datetime(2013, 12, 7, 0, 0)\n datetime.datetime(2013, 12, 8, 0, 0) datetime.datetime(2013, 12, 9, 0, 0)\n datetime.datetime(2013, 12, 10, 0, 0)\n datetime.datetime(2013, 12, 11, 0, 0)\n datetime.datetime(2013, 12, 12, 0, 0)\n datetime.datetime(2013, 12, 13, 0, 0)\n datetime.datetime(2013, 12, 14, 0, 0)\n datetime.datetime(2013, 12, 15, 0, 0)\n datetime.datetime(2013, 12, 16, 0, 0)\n datetime.datetime(2013, 12, 17, 0, 0)\n datetime.datetime(2013, 12, 18, 0, 0)\n datetime.datetime(2013, 12, 19, 0, 0)\n datetime.datetime(2013, 12, 20, 0, 0)\n datetime.datetime(2013, 12, 21, 0, 0)\n datetime.datetime(2013, 12, 22, 0, 0)\n datetime.datetime(2013, 12, 23, 0, 0)\n datetime.datetime(2013, 12, 24, 0, 0)\n datetime.datetime(2013, 12, 25, 0, 0)\n datetime.datetime(2013, 12, 26, 0, 0)\n datetime.datetime(2013, 12, 27, 0, 0)\n datetime.datetime(2013, 12, 28, 0, 0)\n datetime.datetime(2013, 12, 29, 0, 0)\n datetime.datetime(2013, 12, 30, 0, 0)\n datetime.datetime(2013, 12, 31, 0, 0)]' - self.assertNumpyAll(td.value[tg.dgroups[1]],np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01KZ\x85cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tb\x89]q\x06(cdatetime\ndatetime\nq\x07U\n\x07\xdd\x01\x01\x00\x00\x00\x00\x00\x00\x85Rq\x08h\x07U\n\x07\xdd\x01\x02\x00\x00\x00\x00\x00\x00\x85Rq\th\x07U\n\x07\xdd\x01\x03\x00\x00\x00\x00\x00\x00\x85Rq\nh\x07U\n\x07\xdd\x01\x04\x00\x00\x00\x00\x00\x00\x85Rq\x0bh\x07U\n\x07\xdd\x01\x05\x00\x00\x00\x00\x00\x00\x85Rq\x0ch\x07U\n\x07\xdd\x01\x06\x00\x00\x00\x00\x00\x00\x85Rq\rh\x07U\n\x07\xdd\x01\x07\x00\x00\x00\x00\x00\x00\x85Rq\x0eh\x07U\n\x07\xdd\x01\x08\x00\x00\x00\x00\x00\x00\x85Rq\x0fh\x07U\n\x07\xdd\x01\t\x00\x00\x00\x00\x00\x00\x85Rq\x10h\x07U\n\x07\xdd\x01\n\x00\x00\x00\x00\x00\x00\x85Rq\x11h\x07U\n\x07\xdd\x01\x0b\x00\x00\x00\x00\x00\x00\x85Rq\x12h\x07U\n\x07\xdd\x01\x0c\x00\x00\x00\x00\x00\x00\x85Rq\x13h\x07U\n\x07\xdd\x01\r\x00\x00\x00\x00\x00\x00\x85Rq\x14h\x07U\n\x07\xdd\x01\x0e\x00\x00\x00\x00\x00\x00\x85Rq\x15h\x07U\n\x07\xdd\x01\x0f\x00\x00\x00\x00\x00\x00\x85Rq\x16h\x07U\n\x07\xdd\x01\x10\x00\x00\x00\x00\x00\x00\x85Rq\x17h\x07U\n\x07\xdd\x01\x11\x00\x00\x00\x00\x00\x00\x85Rq\x18h\x07U\n\x07\xdd\x01\x12\x00\x00\x00\x00\x00\x00\x85Rq\x19h\x07U\n\x07\xdd\x01\x13\x00\x00\x00\x00\x00\x00\x85Rq\x1ah\x07U\n\x07\xdd\x01\x14\x00\x00\x00\x00\x00\x00\x85Rq\x1bh\x07U\n\x07\xdd\x01\x15\x00\x00\x00\x00\x00\x00\x85Rq\x1ch\x07U\n\x07\xdd\x01\x16\x00\x00\x00\x00\x00\x00\x85Rq\x1dh\x07U\n\x07\xdd\x01\x17\x00\x00\x00\x00\x00\x00\x85Rq\x1eh\x07U\n\x07\xdd\x01\x18\x00\x00\x00\x00\x00\x00\x85Rq\x1fh\x07U\n\x07\xdd\x01\x19\x00\x00\x00\x00\x00\x00\x85Rq h\x07U\n\x07\xdd\x01\x1a\x00\x00\x00\x00\x00\x00\x85Rq!h\x07U\n\x07\xdd\x01\x1b\x00\x00\x00\x00\x00\x00\x85Rq"h\x07U\n\x07\xdd\x01\x1c\x00\x00\x00\x00\x00\x00\x85Rq#h\x07U\n\x07\xdd\x01\x1d\x00\x00\x00\x00\x00\x00\x85Rq$h\x07U\n\x07\xdd\x01\x1e\x00\x00\x00\x00\x00\x00\x85Rq%h\x07U\n\x07\xdd\x01\x1f\x00\x00\x00\x00\x00\x00\x85Rq&h\x07U\n\x07\xdd\x02\x01\x00\x00\x00\x00\x00\x00\x85Rq\'h\x07U\n\x07\xdd\x02\x02\x00\x00\x00\x00\x00\x00\x85Rq(h\x07U\n\x07\xdd\x02\x03\x00\x00\x00\x00\x00\x00\x85Rq)h\x07U\n\x07\xdd\x02\x04\x00\x00\x00\x00\x00\x00\x85Rq*h\x07U\n\x07\xdd\x02\x05\x00\x00\x00\x00\x00\x00\x85Rq+h\x07U\n\x07\xdd\x02\x06\x00\x00\x00\x00\x00\x00\x85Rq,h\x07U\n\x07\xdd\x02\x07\x00\x00\x00\x00\x00\x00\x85Rq-h\x07U\n\x07\xdd\x02\x08\x00\x00\x00\x00\x00\x00\x85Rq.h\x07U\n\x07\xdd\x02\t\x00\x00\x00\x00\x00\x00\x85Rq/h\x07U\n\x07\xdd\x02\n\x00\x00\x00\x00\x00\x00\x85Rq0h\x07U\n\x07\xdd\x02\x0b\x00\x00\x00\x00\x00\x00\x85Rq1h\x07U\n\x07\xdd\x02\x0c\x00\x00\x00\x00\x00\x00\x85Rq2h\x07U\n\x07\xdd\x02\r\x00\x00\x00\x00\x00\x00\x85Rq3h\x07U\n\x07\xdd\x02\x0e\x00\x00\x00\x00\x00\x00\x85Rq4h\x07U\n\x07\xdd\x02\x0f\x00\x00\x00\x00\x00\x00\x85Rq5h\x07U\n\x07\xdd\x02\x10\x00\x00\x00\x00\x00\x00\x85Rq6h\x07U\n\x07\xdd\x02\x11\x00\x00\x00\x00\x00\x00\x85Rq7h\x07U\n\x07\xdd\x02\x12\x00\x00\x00\x00\x00\x00\x85Rq8h\x07U\n\x07\xdd\x02\x13\x00\x00\x00\x00\x00\x00\x85Rq9h\x07U\n\x07\xdd\x02\x14\x00\x00\x00\x00\x00\x00\x85Rq:h\x07U\n\x07\xdd\x02\x15\x00\x00\x00\x00\x00\x00\x85Rq;h\x07U\n\x07\xdd\x02\x16\x00\x00\x00\x00\x00\x00\x85Rqh\x07U\n\x07\xdd\x02\x19\x00\x00\x00\x00\x00\x00\x85Rq?h\x07U\n\x07\xdd\x02\x1a\x00\x00\x00\x00\x00\x00\x85Rq@h\x07U\n\x07\xdd\x02\x1b\x00\x00\x00\x00\x00\x00\x85RqAh\x07U\n\x07\xdd\x02\x1c\x00\x00\x00\x00\x00\x00\x85RqBh\x07U\n\x07\xdd\x0c\x01\x00\x00\x00\x00\x00\x00\x85RqCh\x07U\n\x07\xdd\x0c\x02\x00\x00\x00\x00\x00\x00\x85RqDh\x07U\n\x07\xdd\x0c\x03\x00\x00\x00\x00\x00\x00\x85RqEh\x07U\n\x07\xdd\x0c\x04\x00\x00\x00\x00\x00\x00\x85RqFh\x07U\n\x07\xdd\x0c\x05\x00\x00\x00\x00\x00\x00\x85RqGh\x07U\n\x07\xdd\x0c\x06\x00\x00\x00\x00\x00\x00\x85RqHh\x07U\n\x07\xdd\x0c\x07\x00\x00\x00\x00\x00\x00\x85RqIh\x07U\n\x07\xdd\x0c\x08\x00\x00\x00\x00\x00\x00\x85RqJh\x07U\n\x07\xdd\x0c\t\x00\x00\x00\x00\x00\x00\x85RqKh\x07U\n\x07\xdd\x0c\n\x00\x00\x00\x00\x00\x00\x85RqLh\x07U\n\x07\xdd\x0c\x0b\x00\x00\x00\x00\x00\x00\x85RqMh\x07U\n\x07\xdd\x0c\x0c\x00\x00\x00\x00\x00\x00\x85RqNh\x07U\n\x07\xdd\x0c\r\x00\x00\x00\x00\x00\x00\x85RqOh\x07U\n\x07\xdd\x0c\x0e\x00\x00\x00\x00\x00\x00\x85RqPh\x07U\n\x07\xdd\x0c\x0f\x00\x00\x00\x00\x00\x00\x85RqQh\x07U\n\x07\xdd\x0c\x10\x00\x00\x00\x00\x00\x00\x85RqRh\x07U\n\x07\xdd\x0c\x11\x00\x00\x00\x00\x00\x00\x85RqSh\x07U\n\x07\xdd\x0c\x12\x00\x00\x00\x00\x00\x00\x85RqTh\x07U\n\x07\xdd\x0c\x13\x00\x00\x00\x00\x00\x00\x85RqUh\x07U\n\x07\xdd\x0c\x14\x00\x00\x00\x00\x00\x00\x85RqVh\x07U\n\x07\xdd\x0c\x15\x00\x00\x00\x00\x00\x00\x85RqWh\x07U\n\x07\xdd\x0c\x16\x00\x00\x00\x00\x00\x00\x85RqXh\x07U\n\x07\xdd\x0c\x17\x00\x00\x00\x00\x00\x00\x85RqYh\x07U\n\x07\xdd\x0c\x18\x00\x00\x00\x00\x00\x00\x85RqZh\x07U\n\x07\xdd\x0c\x19\x00\x00\x00\x00\x00\x00\x85Rq[h\x07U\n\x07\xdd\x0c\x1a\x00\x00\x00\x00\x00\x00\x85Rq\\h\x07U\n\x07\xdd\x0c\x1b\x00\x00\x00\x00\x00\x00\x85Rq]h\x07U\n\x07\xdd\x0c\x1c\x00\x00\x00\x00\x00\x00\x85Rq^h\x07U\n\x07\xdd\x0c\x1d\x00\x00\x00\x00\x00\x00\x85Rq_h\x07U\n\x07\xdd\x0c\x1e\x00\x00\x00\x00\x00\x00\x85Rq`h\x07U\n\x07\xdd\x0c\x1f\x00\x00\x00\x00\x00\x00\x85Rqaetb.')) - - def test_seasonal_get_grouping_unique_flag(self): ## test with year flag dates = get_date_list(dt(2012,1,1),dt(2013,12,31),1) td = TemporalDimension(value=dates) @@ -168,27 +127,29 @@ def test_seasonal_get_grouping_unique_flag(self): sub2,idx2 = td.get_time_region(time_region,return_indices=True) base_select = np.zeros(td.shape[0],dtype=bool) dgroups = deque() - + for software,manual in itertools.izip(tg.dgroups,dgroups): self.assertNumpyAll(software,manual) self.assertEqual(len(tg.dgroups),2) self.assertEqual(tg.value.tolist(),[datetime.datetime(2012, 7, 17, 0, 0), datetime.datetime(2013, 7, 17, 0, 0)]) self.assertEqual(tg.bounds.tolist(),[[datetime.datetime(2012, 6, 1, 0, 0), datetime.datetime(2012, 8, 31, 0, 0)], [datetime.datetime(2013, 6, 1, 0, 0), datetime.datetime(2013, 8, 31, 0, 0)]]) - + dgroup1 = base_select.copy() dgroup1[idx1] = True dgroup2 = base_select.copy() dgroup2[idx2] = True - + dgroups.append(dgroup1) dgroups.append(dgroup2) - + tg = td.get_grouping([[6,7,8],'year']) for ii in range(len(tg.dgroups)): self.assertNumpyAll(tg.dgroups[ii],dgroups[ii]) self.assertEqual(len(tg.dgroups),len(dgroups)) - - def test_seasonal_get_grouping_unique_flag_winter_season(self): + + def test_get_grouping_seasonal_unique_flag_winter_season(self): + """Test with a single winter season using the unique flag.""" + dt1 = datetime.datetime(1900,01,01) dt2 = datetime.datetime(1902,12,31) dates = get_date_list(dt1,dt2,days=1) @@ -197,28 +158,52 @@ def test_seasonal_get_grouping_unique_flag_winter_season(self): tg = td.get_grouping(group) self.assertEqual(tg.value.shape[0],2) self.assertEqual(tg.bounds.tolist(),[[datetime.datetime(1901, 1, 1, 0, 0), datetime.datetime(1901, 2, 28, 0, 0)], [datetime.datetime(1902, 1, 1, 0, 0), datetime.datetime(1902, 2, 28, 0, 0)]]) - - def test_empty_season_with_year_missing_month(self): - dt1 = datetime.datetime(1900,01,01) - dt2 = datetime.datetime(1903,1,31) - dates = get_date_list(dt1,dt2,days=1) + + def test_get_grouping_seasonal_year_flag(self): + ## test with year flag + dates = get_date_list(dt(2012,1,1),dt(2013,12,31),1) td = TemporalDimension(value=dates) - group = [[12,1,2],'unique'] - tg = td.get_grouping(group) - ## there should be a month missing from the last season (february) and it should not be - ## considered complete + calc_grouping = [[6,7,8],'year'] + tg = td.get_grouping(calc_grouping) self.assertEqual(tg.value.shape[0],2) - + + # '[datetime.datetime(2012, 7, 16, 0, 0) datetime.datetime(2013, 7, 16, 0, 0)]' + actual = np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\x02\x85cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tb\x89]q\x06(cdatetime\ndatetime\nq\x07U\n\x07\xdc\x07\x10\x00\x00\x00\x00\x00\x00\x85Rq\x08h\x07U\n\x07\xdd\x07\x10\x00\x00\x00\x00\x00\x00\x85Rq\tetb.') + self.assertNumpyAll(tg.value,actual) + + # '[datetime.datetime(2012, 6, 1, 0, 0) datetime.datetime(2012, 6, 2, 0, 0)\n datetime.datetime(2012, 6, 3, 0, 0) datetime.datetime(2012, 6, 4, 0, 0)\n datetime.datetime(2012, 6, 5, 0, 0) datetime.datetime(2012, 6, 6, 0, 0)\n datetime.datetime(2012, 6, 7, 0, 0) datetime.datetime(2012, 6, 8, 0, 0)\n datetime.datetime(2012, 6, 9, 0, 0) datetime.datetime(2012, 6, 10, 0, 0)\n datetime.datetime(2012, 6, 11, 0, 0) datetime.datetime(2012, 6, 12, 0, 0)\n datetime.datetime(2012, 6, 13, 0, 0) datetime.datetime(2012, 6, 14, 0, 0)\n datetime.datetime(2012, 6, 15, 0, 0) datetime.datetime(2012, 6, 16, 0, 0)\n datetime.datetime(2012, 6, 17, 0, 0) datetime.datetime(2012, 6, 18, 0, 0)\n datetime.datetime(2012, 6, 19, 0, 0) datetime.datetime(2012, 6, 20, 0, 0)\n datetime.datetime(2012, 6, 21, 0, 0) datetime.datetime(2012, 6, 22, 0, 0)\n datetime.datetime(2012, 6, 23, 0, 0) datetime.datetime(2012, 6, 24, 0, 0)\n datetime.datetime(2012, 6, 25, 0, 0) datetime.datetime(2012, 6, 26, 0, 0)\n datetime.datetime(2012, 6, 27, 0, 0) datetime.datetime(2012, 6, 28, 0, 0)\n datetime.datetime(2012, 6, 29, 0, 0) datetime.datetime(2012, 6, 30, 0, 0)\n datetime.datetime(2012, 7, 1, 0, 0) datetime.datetime(2012, 7, 2, 0, 0)\n datetime.datetime(2012, 7, 3, 0, 0) datetime.datetime(2012, 7, 4, 0, 0)\n datetime.datetime(2012, 7, 5, 0, 0) datetime.datetime(2012, 7, 6, 0, 0)\n datetime.datetime(2012, 7, 7, 0, 0) datetime.datetime(2012, 7, 8, 0, 0)\n datetime.datetime(2012, 7, 9, 0, 0) datetime.datetime(2012, 7, 10, 0, 0)\n datetime.datetime(2012, 7, 11, 0, 0) datetime.datetime(2012, 7, 12, 0, 0)\n datetime.datetime(2012, 7, 13, 0, 0) datetime.datetime(2012, 7, 14, 0, 0)\n datetime.datetime(2012, 7, 15, 0, 0) datetime.datetime(2012, 7, 16, 0, 0)\n datetime.datetime(2012, 7, 17, 0, 0) datetime.datetime(2012, 7, 18, 0, 0)\n datetime.datetime(2012, 7, 19, 0, 0) datetime.datetime(2012, 7, 20, 0, 0)\n datetime.datetime(2012, 7, 21, 0, 0) datetime.datetime(2012, 7, 22, 0, 0)\n datetime.datetime(2012, 7, 23, 0, 0) datetime.datetime(2012, 7, 24, 0, 0)\n datetime.datetime(2012, 7, 25, 0, 0) datetime.datetime(2012, 7, 26, 0, 0)\n datetime.datetime(2012, 7, 27, 0, 0) datetime.datetime(2012, 7, 28, 0, 0)\n datetime.datetime(2012, 7, 29, 0, 0) datetime.datetime(2012, 7, 30, 0, 0)\n datetime.datetime(2012, 7, 31, 0, 0) datetime.datetime(2012, 8, 1, 0, 0)\n datetime.datetime(2012, 8, 2, 0, 0) datetime.datetime(2012, 8, 3, 0, 0)\n datetime.datetime(2012, 8, 4, 0, 0) datetime.datetime(2012, 8, 5, 0, 0)\n datetime.datetime(2012, 8, 6, 0, 0) datetime.datetime(2012, 8, 7, 0, 0)\n datetime.datetime(2012, 8, 8, 0, 0) datetime.datetime(2012, 8, 9, 0, 0)\n datetime.datetime(2012, 8, 10, 0, 0) datetime.datetime(2012, 8, 11, 0, 0)\n datetime.datetime(2012, 8, 12, 0, 0) datetime.datetime(2012, 8, 13, 0, 0)\n datetime.datetime(2012, 8, 14, 0, 0) datetime.datetime(2012, 8, 15, 0, 0)\n datetime.datetime(2012, 8, 16, 0, 0) datetime.datetime(2012, 8, 17, 0, 0)\n datetime.datetime(2012, 8, 18, 0, 0) datetime.datetime(2012, 8, 19, 0, 0)\n datetime.datetime(2012, 8, 20, 0, 0) datetime.datetime(2012, 8, 21, 0, 0)\n datetime.datetime(2012, 8, 22, 0, 0) datetime.datetime(2012, 8, 23, 0, 0)\n datetime.datetime(2012, 8, 24, 0, 0) datetime.datetime(2012, 8, 25, 0, 0)\n datetime.datetime(2012, 8, 26, 0, 0) datetime.datetime(2012, 8, 27, 0, 0)\n datetime.datetime(2012, 8, 28, 0, 0) datetime.datetime(2012, 8, 29, 0, 0)\n datetime.datetime(2012, 8, 30, 0, 0) datetime.datetime(2012, 8, 31, 0, 0)]' + actual = np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\\\x85cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tb\x89]q\x06(cdatetime\ndatetime\nq\x07U\n\x07\xdc\x06\x01\x00\x00\x00\x00\x00\x00\x85Rq\x08h\x07U\n\x07\xdc\x06\x02\x00\x00\x00\x00\x00\x00\x85Rq\th\x07U\n\x07\xdc\x06\x03\x00\x00\x00\x00\x00\x00\x85Rq\nh\x07U\n\x07\xdc\x06\x04\x00\x00\x00\x00\x00\x00\x85Rq\x0bh\x07U\n\x07\xdc\x06\x05\x00\x00\x00\x00\x00\x00\x85Rq\x0ch\x07U\n\x07\xdc\x06\x06\x00\x00\x00\x00\x00\x00\x85Rq\rh\x07U\n\x07\xdc\x06\x07\x00\x00\x00\x00\x00\x00\x85Rq\x0eh\x07U\n\x07\xdc\x06\x08\x00\x00\x00\x00\x00\x00\x85Rq\x0fh\x07U\n\x07\xdc\x06\t\x00\x00\x00\x00\x00\x00\x85Rq\x10h\x07U\n\x07\xdc\x06\n\x00\x00\x00\x00\x00\x00\x85Rq\x11h\x07U\n\x07\xdc\x06\x0b\x00\x00\x00\x00\x00\x00\x85Rq\x12h\x07U\n\x07\xdc\x06\x0c\x00\x00\x00\x00\x00\x00\x85Rq\x13h\x07U\n\x07\xdc\x06\r\x00\x00\x00\x00\x00\x00\x85Rq\x14h\x07U\n\x07\xdc\x06\x0e\x00\x00\x00\x00\x00\x00\x85Rq\x15h\x07U\n\x07\xdc\x06\x0f\x00\x00\x00\x00\x00\x00\x85Rq\x16h\x07U\n\x07\xdc\x06\x10\x00\x00\x00\x00\x00\x00\x85Rq\x17h\x07U\n\x07\xdc\x06\x11\x00\x00\x00\x00\x00\x00\x85Rq\x18h\x07U\n\x07\xdc\x06\x12\x00\x00\x00\x00\x00\x00\x85Rq\x19h\x07U\n\x07\xdc\x06\x13\x00\x00\x00\x00\x00\x00\x85Rq\x1ah\x07U\n\x07\xdc\x06\x14\x00\x00\x00\x00\x00\x00\x85Rq\x1bh\x07U\n\x07\xdc\x06\x15\x00\x00\x00\x00\x00\x00\x85Rq\x1ch\x07U\n\x07\xdc\x06\x16\x00\x00\x00\x00\x00\x00\x85Rq\x1dh\x07U\n\x07\xdc\x06\x17\x00\x00\x00\x00\x00\x00\x85Rq\x1eh\x07U\n\x07\xdc\x06\x18\x00\x00\x00\x00\x00\x00\x85Rq\x1fh\x07U\n\x07\xdc\x06\x19\x00\x00\x00\x00\x00\x00\x85Rq h\x07U\n\x07\xdc\x06\x1a\x00\x00\x00\x00\x00\x00\x85Rq!h\x07U\n\x07\xdc\x06\x1b\x00\x00\x00\x00\x00\x00\x85Rq"h\x07U\n\x07\xdc\x06\x1c\x00\x00\x00\x00\x00\x00\x85Rq#h\x07U\n\x07\xdc\x06\x1d\x00\x00\x00\x00\x00\x00\x85Rq$h\x07U\n\x07\xdc\x06\x1e\x00\x00\x00\x00\x00\x00\x85Rq%h\x07U\n\x07\xdc\x07\x01\x00\x00\x00\x00\x00\x00\x85Rq&h\x07U\n\x07\xdc\x07\x02\x00\x00\x00\x00\x00\x00\x85Rq\'h\x07U\n\x07\xdc\x07\x03\x00\x00\x00\x00\x00\x00\x85Rq(h\x07U\n\x07\xdc\x07\x04\x00\x00\x00\x00\x00\x00\x85Rq)h\x07U\n\x07\xdc\x07\x05\x00\x00\x00\x00\x00\x00\x85Rq*h\x07U\n\x07\xdc\x07\x06\x00\x00\x00\x00\x00\x00\x85Rq+h\x07U\n\x07\xdc\x07\x07\x00\x00\x00\x00\x00\x00\x85Rq,h\x07U\n\x07\xdc\x07\x08\x00\x00\x00\x00\x00\x00\x85Rq-h\x07U\n\x07\xdc\x07\t\x00\x00\x00\x00\x00\x00\x85Rq.h\x07U\n\x07\xdc\x07\n\x00\x00\x00\x00\x00\x00\x85Rq/h\x07U\n\x07\xdc\x07\x0b\x00\x00\x00\x00\x00\x00\x85Rq0h\x07U\n\x07\xdc\x07\x0c\x00\x00\x00\x00\x00\x00\x85Rq1h\x07U\n\x07\xdc\x07\r\x00\x00\x00\x00\x00\x00\x85Rq2h\x07U\n\x07\xdc\x07\x0e\x00\x00\x00\x00\x00\x00\x85Rq3h\x07U\n\x07\xdc\x07\x0f\x00\x00\x00\x00\x00\x00\x85Rq4h\x07U\n\x07\xdc\x07\x10\x00\x00\x00\x00\x00\x00\x85Rq5h\x07U\n\x07\xdc\x07\x11\x00\x00\x00\x00\x00\x00\x85Rq6h\x07U\n\x07\xdc\x07\x12\x00\x00\x00\x00\x00\x00\x85Rq7h\x07U\n\x07\xdc\x07\x13\x00\x00\x00\x00\x00\x00\x85Rq8h\x07U\n\x07\xdc\x07\x14\x00\x00\x00\x00\x00\x00\x85Rq9h\x07U\n\x07\xdc\x07\x15\x00\x00\x00\x00\x00\x00\x85Rq:h\x07U\n\x07\xdc\x07\x16\x00\x00\x00\x00\x00\x00\x85Rq;h\x07U\n\x07\xdc\x07\x17\x00\x00\x00\x00\x00\x00\x85Rqh\x07U\n\x07\xdc\x07\x1a\x00\x00\x00\x00\x00\x00\x85Rq?h\x07U\n\x07\xdc\x07\x1b\x00\x00\x00\x00\x00\x00\x85Rq@h\x07U\n\x07\xdc\x07\x1c\x00\x00\x00\x00\x00\x00\x85RqAh\x07U\n\x07\xdc\x07\x1d\x00\x00\x00\x00\x00\x00\x85RqBh\x07U\n\x07\xdc\x07\x1e\x00\x00\x00\x00\x00\x00\x85RqCh\x07U\n\x07\xdc\x07\x1f\x00\x00\x00\x00\x00\x00\x85RqDh\x07U\n\x07\xdc\x08\x01\x00\x00\x00\x00\x00\x00\x85RqEh\x07U\n\x07\xdc\x08\x02\x00\x00\x00\x00\x00\x00\x85RqFh\x07U\n\x07\xdc\x08\x03\x00\x00\x00\x00\x00\x00\x85RqGh\x07U\n\x07\xdc\x08\x04\x00\x00\x00\x00\x00\x00\x85RqHh\x07U\n\x07\xdc\x08\x05\x00\x00\x00\x00\x00\x00\x85RqIh\x07U\n\x07\xdc\x08\x06\x00\x00\x00\x00\x00\x00\x85RqJh\x07U\n\x07\xdc\x08\x07\x00\x00\x00\x00\x00\x00\x85RqKh\x07U\n\x07\xdc\x08\x08\x00\x00\x00\x00\x00\x00\x85RqLh\x07U\n\x07\xdc\x08\t\x00\x00\x00\x00\x00\x00\x85RqMh\x07U\n\x07\xdc\x08\n\x00\x00\x00\x00\x00\x00\x85RqNh\x07U\n\x07\xdc\x08\x0b\x00\x00\x00\x00\x00\x00\x85RqOh\x07U\n\x07\xdc\x08\x0c\x00\x00\x00\x00\x00\x00\x85RqPh\x07U\n\x07\xdc\x08\r\x00\x00\x00\x00\x00\x00\x85RqQh\x07U\n\x07\xdc\x08\x0e\x00\x00\x00\x00\x00\x00\x85RqRh\x07U\n\x07\xdc\x08\x0f\x00\x00\x00\x00\x00\x00\x85RqSh\x07U\n\x07\xdc\x08\x10\x00\x00\x00\x00\x00\x00\x85RqTh\x07U\n\x07\xdc\x08\x11\x00\x00\x00\x00\x00\x00\x85RqUh\x07U\n\x07\xdc\x08\x12\x00\x00\x00\x00\x00\x00\x85RqVh\x07U\n\x07\xdc\x08\x13\x00\x00\x00\x00\x00\x00\x85RqWh\x07U\n\x07\xdc\x08\x14\x00\x00\x00\x00\x00\x00\x85RqXh\x07U\n\x07\xdc\x08\x15\x00\x00\x00\x00\x00\x00\x85RqYh\x07U\n\x07\xdc\x08\x16\x00\x00\x00\x00\x00\x00\x85RqZh\x07U\n\x07\xdc\x08\x17\x00\x00\x00\x00\x00\x00\x85Rq[h\x07U\n\x07\xdc\x08\x18\x00\x00\x00\x00\x00\x00\x85Rq\\h\x07U\n\x07\xdc\x08\x19\x00\x00\x00\x00\x00\x00\x85Rq]h\x07U\n\x07\xdc\x08\x1a\x00\x00\x00\x00\x00\x00\x85Rq^h\x07U\n\x07\xdc\x08\x1b\x00\x00\x00\x00\x00\x00\x85Rq_h\x07U\n\x07\xdc\x08\x1c\x00\x00\x00\x00\x00\x00\x85Rq`h\x07U\n\x07\xdc\x08\x1d\x00\x00\x00\x00\x00\x00\x85Rqah\x07U\n\x07\xdc\x08\x1e\x00\x00\x00\x00\x00\x00\x85Rqbh\x07U\n\x07\xdc\x08\x1f\x00\x00\x00\x00\x00\x00\x85Rqcetb.') + sub0 = td.value[tg.dgroups[0]] + self.assertNumpyAll(sub0,actual) + + # '[datetime.datetime(2013, 6, 1, 0, 0) datetime.datetime(2013, 6, 2, 0, 0)\n datetime.datetime(2013, 6, 3, 0, 0) datetime.datetime(2013, 6, 4, 0, 0)\n datetime.datetime(2013, 6, 5, 0, 0) datetime.datetime(2013, 6, 6, 0, 0)\n datetime.datetime(2013, 6, 7, 0, 0) datetime.datetime(2013, 6, 8, 0, 0)\n datetime.datetime(2013, 6, 9, 0, 0) datetime.datetime(2013, 6, 10, 0, 0)\n datetime.datetime(2013, 6, 11, 0, 0) datetime.datetime(2013, 6, 12, 0, 0)\n datetime.datetime(2013, 6, 13, 0, 0) datetime.datetime(2013, 6, 14, 0, 0)\n datetime.datetime(2013, 6, 15, 0, 0) datetime.datetime(2013, 6, 16, 0, 0)\n datetime.datetime(2013, 6, 17, 0, 0) datetime.datetime(2013, 6, 18, 0, 0)\n datetime.datetime(2013, 6, 19, 0, 0) datetime.datetime(2013, 6, 20, 0, 0)\n datetime.datetime(2013, 6, 21, 0, 0) datetime.datetime(2013, 6, 22, 0, 0)\n datetime.datetime(2013, 6, 23, 0, 0) datetime.datetime(2013, 6, 24, 0, 0)\n datetime.datetime(2013, 6, 25, 0, 0) datetime.datetime(2013, 6, 26, 0, 0)\n datetime.datetime(2013, 6, 27, 0, 0) datetime.datetime(2013, 6, 28, 0, 0)\n datetime.datetime(2013, 6, 29, 0, 0) datetime.datetime(2013, 6, 30, 0, 0)\n datetime.datetime(2013, 7, 1, 0, 0) datetime.datetime(2013, 7, 2, 0, 0)\n datetime.datetime(2013, 7, 3, 0, 0) datetime.datetime(2013, 7, 4, 0, 0)\n datetime.datetime(2013, 7, 5, 0, 0) datetime.datetime(2013, 7, 6, 0, 0)\n datetime.datetime(2013, 7, 7, 0, 0) datetime.datetime(2013, 7, 8, 0, 0)\n datetime.datetime(2013, 7, 9, 0, 0) datetime.datetime(2013, 7, 10, 0, 0)\n datetime.datetime(2013, 7, 11, 0, 0) datetime.datetime(2013, 7, 12, 0, 0)\n datetime.datetime(2013, 7, 13, 0, 0) datetime.datetime(2013, 7, 14, 0, 0)\n datetime.datetime(2013, 7, 15, 0, 0) datetime.datetime(2013, 7, 16, 0, 0)\n datetime.datetime(2013, 7, 17, 0, 0) datetime.datetime(2013, 7, 18, 0, 0)\n datetime.datetime(2013, 7, 19, 0, 0) datetime.datetime(2013, 7, 20, 0, 0)\n datetime.datetime(2013, 7, 21, 0, 0) datetime.datetime(2013, 7, 22, 0, 0)\n datetime.datetime(2013, 7, 23, 0, 0) datetime.datetime(2013, 7, 24, 0, 0)\n datetime.datetime(2013, 7, 25, 0, 0) datetime.datetime(2013, 7, 26, 0, 0)\n datetime.datetime(2013, 7, 27, 0, 0) datetime.datetime(2013, 7, 28, 0, 0)\n datetime.datetime(2013, 7, 29, 0, 0) datetime.datetime(2013, 7, 30, 0, 0)\n datetime.datetime(2013, 7, 31, 0, 0) datetime.datetime(2013, 8, 1, 0, 0)\n datetime.datetime(2013, 8, 2, 0, 0) datetime.datetime(2013, 8, 3, 0, 0)\n datetime.datetime(2013, 8, 4, 0, 0) datetime.datetime(2013, 8, 5, 0, 0)\n datetime.datetime(2013, 8, 6, 0, 0) datetime.datetime(2013, 8, 7, 0, 0)\n datetime.datetime(2013, 8, 8, 0, 0) datetime.datetime(2013, 8, 9, 0, 0)\n datetime.datetime(2013, 8, 10, 0, 0) datetime.datetime(2013, 8, 11, 0, 0)\n datetime.datetime(2013, 8, 12, 0, 0) datetime.datetime(2013, 8, 13, 0, 0)\n datetime.datetime(2013, 8, 14, 0, 0) datetime.datetime(2013, 8, 15, 0, 0)\n datetime.datetime(2013, 8, 16, 0, 0) datetime.datetime(2013, 8, 17, 0, 0)\n datetime.datetime(2013, 8, 18, 0, 0) datetime.datetime(2013, 8, 19, 0, 0)\n datetime.datetime(2013, 8, 20, 0, 0) datetime.datetime(2013, 8, 21, 0, 0)\n datetime.datetime(2013, 8, 22, 0, 0) datetime.datetime(2013, 8, 23, 0, 0)\n datetime.datetime(2013, 8, 24, 0, 0) datetime.datetime(2013, 8, 25, 0, 0)\n datetime.datetime(2013, 8, 26, 0, 0) datetime.datetime(2013, 8, 27, 0, 0)\n datetime.datetime(2013, 8, 28, 0, 0) datetime.datetime(2013, 8, 29, 0, 0)\n datetime.datetime(2013, 8, 30, 0, 0) datetime.datetime(2013, 8, 31, 0, 0)]' + actual = np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\\\x85cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tb\x89]q\x06(cdatetime\ndatetime\nq\x07U\n\x07\xdd\x06\x01\x00\x00\x00\x00\x00\x00\x85Rq\x08h\x07U\n\x07\xdd\x06\x02\x00\x00\x00\x00\x00\x00\x85Rq\th\x07U\n\x07\xdd\x06\x03\x00\x00\x00\x00\x00\x00\x85Rq\nh\x07U\n\x07\xdd\x06\x04\x00\x00\x00\x00\x00\x00\x85Rq\x0bh\x07U\n\x07\xdd\x06\x05\x00\x00\x00\x00\x00\x00\x85Rq\x0ch\x07U\n\x07\xdd\x06\x06\x00\x00\x00\x00\x00\x00\x85Rq\rh\x07U\n\x07\xdd\x06\x07\x00\x00\x00\x00\x00\x00\x85Rq\x0eh\x07U\n\x07\xdd\x06\x08\x00\x00\x00\x00\x00\x00\x85Rq\x0fh\x07U\n\x07\xdd\x06\t\x00\x00\x00\x00\x00\x00\x85Rq\x10h\x07U\n\x07\xdd\x06\n\x00\x00\x00\x00\x00\x00\x85Rq\x11h\x07U\n\x07\xdd\x06\x0b\x00\x00\x00\x00\x00\x00\x85Rq\x12h\x07U\n\x07\xdd\x06\x0c\x00\x00\x00\x00\x00\x00\x85Rq\x13h\x07U\n\x07\xdd\x06\r\x00\x00\x00\x00\x00\x00\x85Rq\x14h\x07U\n\x07\xdd\x06\x0e\x00\x00\x00\x00\x00\x00\x85Rq\x15h\x07U\n\x07\xdd\x06\x0f\x00\x00\x00\x00\x00\x00\x85Rq\x16h\x07U\n\x07\xdd\x06\x10\x00\x00\x00\x00\x00\x00\x85Rq\x17h\x07U\n\x07\xdd\x06\x11\x00\x00\x00\x00\x00\x00\x85Rq\x18h\x07U\n\x07\xdd\x06\x12\x00\x00\x00\x00\x00\x00\x85Rq\x19h\x07U\n\x07\xdd\x06\x13\x00\x00\x00\x00\x00\x00\x85Rq\x1ah\x07U\n\x07\xdd\x06\x14\x00\x00\x00\x00\x00\x00\x85Rq\x1bh\x07U\n\x07\xdd\x06\x15\x00\x00\x00\x00\x00\x00\x85Rq\x1ch\x07U\n\x07\xdd\x06\x16\x00\x00\x00\x00\x00\x00\x85Rq\x1dh\x07U\n\x07\xdd\x06\x17\x00\x00\x00\x00\x00\x00\x85Rq\x1eh\x07U\n\x07\xdd\x06\x18\x00\x00\x00\x00\x00\x00\x85Rq\x1fh\x07U\n\x07\xdd\x06\x19\x00\x00\x00\x00\x00\x00\x85Rq h\x07U\n\x07\xdd\x06\x1a\x00\x00\x00\x00\x00\x00\x85Rq!h\x07U\n\x07\xdd\x06\x1b\x00\x00\x00\x00\x00\x00\x85Rq"h\x07U\n\x07\xdd\x06\x1c\x00\x00\x00\x00\x00\x00\x85Rq#h\x07U\n\x07\xdd\x06\x1d\x00\x00\x00\x00\x00\x00\x85Rq$h\x07U\n\x07\xdd\x06\x1e\x00\x00\x00\x00\x00\x00\x85Rq%h\x07U\n\x07\xdd\x07\x01\x00\x00\x00\x00\x00\x00\x85Rq&h\x07U\n\x07\xdd\x07\x02\x00\x00\x00\x00\x00\x00\x85Rq\'h\x07U\n\x07\xdd\x07\x03\x00\x00\x00\x00\x00\x00\x85Rq(h\x07U\n\x07\xdd\x07\x04\x00\x00\x00\x00\x00\x00\x85Rq)h\x07U\n\x07\xdd\x07\x05\x00\x00\x00\x00\x00\x00\x85Rq*h\x07U\n\x07\xdd\x07\x06\x00\x00\x00\x00\x00\x00\x85Rq+h\x07U\n\x07\xdd\x07\x07\x00\x00\x00\x00\x00\x00\x85Rq,h\x07U\n\x07\xdd\x07\x08\x00\x00\x00\x00\x00\x00\x85Rq-h\x07U\n\x07\xdd\x07\t\x00\x00\x00\x00\x00\x00\x85Rq.h\x07U\n\x07\xdd\x07\n\x00\x00\x00\x00\x00\x00\x85Rq/h\x07U\n\x07\xdd\x07\x0b\x00\x00\x00\x00\x00\x00\x85Rq0h\x07U\n\x07\xdd\x07\x0c\x00\x00\x00\x00\x00\x00\x85Rq1h\x07U\n\x07\xdd\x07\r\x00\x00\x00\x00\x00\x00\x85Rq2h\x07U\n\x07\xdd\x07\x0e\x00\x00\x00\x00\x00\x00\x85Rq3h\x07U\n\x07\xdd\x07\x0f\x00\x00\x00\x00\x00\x00\x85Rq4h\x07U\n\x07\xdd\x07\x10\x00\x00\x00\x00\x00\x00\x85Rq5h\x07U\n\x07\xdd\x07\x11\x00\x00\x00\x00\x00\x00\x85Rq6h\x07U\n\x07\xdd\x07\x12\x00\x00\x00\x00\x00\x00\x85Rq7h\x07U\n\x07\xdd\x07\x13\x00\x00\x00\x00\x00\x00\x85Rq8h\x07U\n\x07\xdd\x07\x14\x00\x00\x00\x00\x00\x00\x85Rq9h\x07U\n\x07\xdd\x07\x15\x00\x00\x00\x00\x00\x00\x85Rq:h\x07U\n\x07\xdd\x07\x16\x00\x00\x00\x00\x00\x00\x85Rq;h\x07U\n\x07\xdd\x07\x17\x00\x00\x00\x00\x00\x00\x85Rqh\x07U\n\x07\xdd\x07\x1a\x00\x00\x00\x00\x00\x00\x85Rq?h\x07U\n\x07\xdd\x07\x1b\x00\x00\x00\x00\x00\x00\x85Rq@h\x07U\n\x07\xdd\x07\x1c\x00\x00\x00\x00\x00\x00\x85RqAh\x07U\n\x07\xdd\x07\x1d\x00\x00\x00\x00\x00\x00\x85RqBh\x07U\n\x07\xdd\x07\x1e\x00\x00\x00\x00\x00\x00\x85RqCh\x07U\n\x07\xdd\x07\x1f\x00\x00\x00\x00\x00\x00\x85RqDh\x07U\n\x07\xdd\x08\x01\x00\x00\x00\x00\x00\x00\x85RqEh\x07U\n\x07\xdd\x08\x02\x00\x00\x00\x00\x00\x00\x85RqFh\x07U\n\x07\xdd\x08\x03\x00\x00\x00\x00\x00\x00\x85RqGh\x07U\n\x07\xdd\x08\x04\x00\x00\x00\x00\x00\x00\x85RqHh\x07U\n\x07\xdd\x08\x05\x00\x00\x00\x00\x00\x00\x85RqIh\x07U\n\x07\xdd\x08\x06\x00\x00\x00\x00\x00\x00\x85RqJh\x07U\n\x07\xdd\x08\x07\x00\x00\x00\x00\x00\x00\x85RqKh\x07U\n\x07\xdd\x08\x08\x00\x00\x00\x00\x00\x00\x85RqLh\x07U\n\x07\xdd\x08\t\x00\x00\x00\x00\x00\x00\x85RqMh\x07U\n\x07\xdd\x08\n\x00\x00\x00\x00\x00\x00\x85RqNh\x07U\n\x07\xdd\x08\x0b\x00\x00\x00\x00\x00\x00\x85RqOh\x07U\n\x07\xdd\x08\x0c\x00\x00\x00\x00\x00\x00\x85RqPh\x07U\n\x07\xdd\x08\r\x00\x00\x00\x00\x00\x00\x85RqQh\x07U\n\x07\xdd\x08\x0e\x00\x00\x00\x00\x00\x00\x85RqRh\x07U\n\x07\xdd\x08\x0f\x00\x00\x00\x00\x00\x00\x85RqSh\x07U\n\x07\xdd\x08\x10\x00\x00\x00\x00\x00\x00\x85RqTh\x07U\n\x07\xdd\x08\x11\x00\x00\x00\x00\x00\x00\x85RqUh\x07U\n\x07\xdd\x08\x12\x00\x00\x00\x00\x00\x00\x85RqVh\x07U\n\x07\xdd\x08\x13\x00\x00\x00\x00\x00\x00\x85RqWh\x07U\n\x07\xdd\x08\x14\x00\x00\x00\x00\x00\x00\x85RqXh\x07U\n\x07\xdd\x08\x15\x00\x00\x00\x00\x00\x00\x85RqYh\x07U\n\x07\xdd\x08\x16\x00\x00\x00\x00\x00\x00\x85RqZh\x07U\n\x07\xdd\x08\x17\x00\x00\x00\x00\x00\x00\x85Rq[h\x07U\n\x07\xdd\x08\x18\x00\x00\x00\x00\x00\x00\x85Rq\\h\x07U\n\x07\xdd\x08\x19\x00\x00\x00\x00\x00\x00\x85Rq]h\x07U\n\x07\xdd\x08\x1a\x00\x00\x00\x00\x00\x00\x85Rq^h\x07U\n\x07\xdd\x08\x1b\x00\x00\x00\x00\x00\x00\x85Rq_h\x07U\n\x07\xdd\x08\x1c\x00\x00\x00\x00\x00\x00\x85Rq`h\x07U\n\x07\xdd\x08\x1d\x00\x00\x00\x00\x00\x00\x85Rqah\x07U\n\x07\xdd\x08\x1e\x00\x00\x00\x00\x00\x00\x85Rqbh\x07U\n\x07\xdd\x08\x1f\x00\x00\x00\x00\x00\x00\x85Rqcetb.') + sub1 = td.value[tg.dgroups[1]] + self.assertNumpyAll(sub1,actual) + + ## test crossing year boundary + for calc_grouping in [[[12,1,2],'year'],['year',[12,1,2]]]: + tg = td.get_grouping(calc_grouping) + + # '[datetime.datetime(2012, 1, 16, 0, 0) datetime.datetime(2013, 1, 16, 0, 0)]' + self.assertNumpyAll(tg.value,np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\x02\x85cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tb\x89]q\x06(cdatetime\ndatetime\nq\x07U\n\x07\xdc\x01\x10\x00\x00\x00\x00\x00\x00\x85Rq\x08h\x07U\n\x07\xdd\x01\x10\x00\x00\x00\x00\x00\x00\x85Rq\tetb.')) + + # '[[datetime.datetime(2012, 1, 1, 0, 0) datetime.datetime(2012, 12, 31, 0, 0)]\n [datetime.datetime(2013, 1, 1, 0, 0) datetime.datetime(2013, 12, 31, 0, 0)]]' + self.assertNumpyAll(tg.bounds,np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\x02K\x02\x86cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tb\x89]q\x06(cdatetime\ndatetime\nq\x07U\n\x07\xdc\x01\x01\x00\x00\x00\x00\x00\x00\x85Rq\x08h\x07U\n\x07\xdc\x0c\x1f\x00\x00\x00\x00\x00\x00\x85Rq\th\x07U\n\x07\xdd\x01\x01\x00\x00\x00\x00\x00\x00\x85Rq\nh\x07U\n\x07\xdd\x0c\x1f\x00\x00\x00\x00\x00\x00\x85Rq\x0betb.')) + + # '[datetime.datetime(2013, 1, 1, 0, 0) datetime.datetime(2013, 1, 2, 0, 0)\n datetime.datetime(2013, 1, 3, 0, 0) datetime.datetime(2013, 1, 4, 0, 0)\n datetime.datetime(2013, 1, 5, 0, 0) datetime.datetime(2013, 1, 6, 0, 0)\n datetime.datetime(2013, 1, 7, 0, 0) datetime.datetime(2013, 1, 8, 0, 0)\n datetime.datetime(2013, 1, 9, 0, 0) datetime.datetime(2013, 1, 10, 0, 0)\n datetime.datetime(2013, 1, 11, 0, 0) datetime.datetime(2013, 1, 12, 0, 0)\n datetime.datetime(2013, 1, 13, 0, 0) datetime.datetime(2013, 1, 14, 0, 0)\n datetime.datetime(2013, 1, 15, 0, 0) datetime.datetime(2013, 1, 16, 0, 0)\n datetime.datetime(2013, 1, 17, 0, 0) datetime.datetime(2013, 1, 18, 0, 0)\n datetime.datetime(2013, 1, 19, 0, 0) datetime.datetime(2013, 1, 20, 0, 0)\n datetime.datetime(2013, 1, 21, 0, 0) datetime.datetime(2013, 1, 22, 0, 0)\n datetime.datetime(2013, 1, 23, 0, 0) datetime.datetime(2013, 1, 24, 0, 0)\n datetime.datetime(2013, 1, 25, 0, 0) datetime.datetime(2013, 1, 26, 0, 0)\n datetime.datetime(2013, 1, 27, 0, 0) datetime.datetime(2013, 1, 28, 0, 0)\n datetime.datetime(2013, 1, 29, 0, 0) datetime.datetime(2013, 1, 30, 0, 0)\n datetime.datetime(2013, 1, 31, 0, 0) datetime.datetime(2013, 2, 1, 0, 0)\n datetime.datetime(2013, 2, 2, 0, 0) datetime.datetime(2013, 2, 3, 0, 0)\n datetime.datetime(2013, 2, 4, 0, 0) datetime.datetime(2013, 2, 5, 0, 0)\n datetime.datetime(2013, 2, 6, 0, 0) datetime.datetime(2013, 2, 7, 0, 0)\n datetime.datetime(2013, 2, 8, 0, 0) datetime.datetime(2013, 2, 9, 0, 0)\n datetime.datetime(2013, 2, 10, 0, 0) datetime.datetime(2013, 2, 11, 0, 0)\n datetime.datetime(2013, 2, 12, 0, 0) datetime.datetime(2013, 2, 13, 0, 0)\n datetime.datetime(2013, 2, 14, 0, 0) datetime.datetime(2013, 2, 15, 0, 0)\n datetime.datetime(2013, 2, 16, 0, 0) datetime.datetime(2013, 2, 17, 0, 0)\n datetime.datetime(2013, 2, 18, 0, 0) datetime.datetime(2013, 2, 19, 0, 0)\n datetime.datetime(2013, 2, 20, 0, 0) datetime.datetime(2013, 2, 21, 0, 0)\n datetime.datetime(2013, 2, 22, 0, 0) datetime.datetime(2013, 2, 23, 0, 0)\n datetime.datetime(2013, 2, 24, 0, 0) datetime.datetime(2013, 2, 25, 0, 0)\n datetime.datetime(2013, 2, 26, 0, 0) datetime.datetime(2013, 2, 27, 0, 0)\n datetime.datetime(2013, 2, 28, 0, 0) datetime.datetime(2013, 12, 1, 0, 0)\n datetime.datetime(2013, 12, 2, 0, 0) datetime.datetime(2013, 12, 3, 0, 0)\n datetime.datetime(2013, 12, 4, 0, 0) datetime.datetime(2013, 12, 5, 0, 0)\n datetime.datetime(2013, 12, 6, 0, 0) datetime.datetime(2013, 12, 7, 0, 0)\n datetime.datetime(2013, 12, 8, 0, 0) datetime.datetime(2013, 12, 9, 0, 0)\n datetime.datetime(2013, 12, 10, 0, 0)\n datetime.datetime(2013, 12, 11, 0, 0)\n datetime.datetime(2013, 12, 12, 0, 0)\n datetime.datetime(2013, 12, 13, 0, 0)\n datetime.datetime(2013, 12, 14, 0, 0)\n datetime.datetime(2013, 12, 15, 0, 0)\n datetime.datetime(2013, 12, 16, 0, 0)\n datetime.datetime(2013, 12, 17, 0, 0)\n datetime.datetime(2013, 12, 18, 0, 0)\n datetime.datetime(2013, 12, 19, 0, 0)\n datetime.datetime(2013, 12, 20, 0, 0)\n datetime.datetime(2013, 12, 21, 0, 0)\n datetime.datetime(2013, 12, 22, 0, 0)\n datetime.datetime(2013, 12, 23, 0, 0)\n datetime.datetime(2013, 12, 24, 0, 0)\n datetime.datetime(2013, 12, 25, 0, 0)\n datetime.datetime(2013, 12, 26, 0, 0)\n datetime.datetime(2013, 12, 27, 0, 0)\n datetime.datetime(2013, 12, 28, 0, 0)\n datetime.datetime(2013, 12, 29, 0, 0)\n datetime.datetime(2013, 12, 30, 0, 0)\n datetime.datetime(2013, 12, 31, 0, 0)]' + self.assertNumpyAll(td.value[tg.dgroups[1]],np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01KZ\x85cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tb\x89]q\x06(cdatetime\ndatetime\nq\x07U\n\x07\xdd\x01\x01\x00\x00\x00\x00\x00\x00\x85Rq\x08h\x07U\n\x07\xdd\x01\x02\x00\x00\x00\x00\x00\x00\x85Rq\th\x07U\n\x07\xdd\x01\x03\x00\x00\x00\x00\x00\x00\x85Rq\nh\x07U\n\x07\xdd\x01\x04\x00\x00\x00\x00\x00\x00\x85Rq\x0bh\x07U\n\x07\xdd\x01\x05\x00\x00\x00\x00\x00\x00\x85Rq\x0ch\x07U\n\x07\xdd\x01\x06\x00\x00\x00\x00\x00\x00\x85Rq\rh\x07U\n\x07\xdd\x01\x07\x00\x00\x00\x00\x00\x00\x85Rq\x0eh\x07U\n\x07\xdd\x01\x08\x00\x00\x00\x00\x00\x00\x85Rq\x0fh\x07U\n\x07\xdd\x01\t\x00\x00\x00\x00\x00\x00\x85Rq\x10h\x07U\n\x07\xdd\x01\n\x00\x00\x00\x00\x00\x00\x85Rq\x11h\x07U\n\x07\xdd\x01\x0b\x00\x00\x00\x00\x00\x00\x85Rq\x12h\x07U\n\x07\xdd\x01\x0c\x00\x00\x00\x00\x00\x00\x85Rq\x13h\x07U\n\x07\xdd\x01\r\x00\x00\x00\x00\x00\x00\x85Rq\x14h\x07U\n\x07\xdd\x01\x0e\x00\x00\x00\x00\x00\x00\x85Rq\x15h\x07U\n\x07\xdd\x01\x0f\x00\x00\x00\x00\x00\x00\x85Rq\x16h\x07U\n\x07\xdd\x01\x10\x00\x00\x00\x00\x00\x00\x85Rq\x17h\x07U\n\x07\xdd\x01\x11\x00\x00\x00\x00\x00\x00\x85Rq\x18h\x07U\n\x07\xdd\x01\x12\x00\x00\x00\x00\x00\x00\x85Rq\x19h\x07U\n\x07\xdd\x01\x13\x00\x00\x00\x00\x00\x00\x85Rq\x1ah\x07U\n\x07\xdd\x01\x14\x00\x00\x00\x00\x00\x00\x85Rq\x1bh\x07U\n\x07\xdd\x01\x15\x00\x00\x00\x00\x00\x00\x85Rq\x1ch\x07U\n\x07\xdd\x01\x16\x00\x00\x00\x00\x00\x00\x85Rq\x1dh\x07U\n\x07\xdd\x01\x17\x00\x00\x00\x00\x00\x00\x85Rq\x1eh\x07U\n\x07\xdd\x01\x18\x00\x00\x00\x00\x00\x00\x85Rq\x1fh\x07U\n\x07\xdd\x01\x19\x00\x00\x00\x00\x00\x00\x85Rq h\x07U\n\x07\xdd\x01\x1a\x00\x00\x00\x00\x00\x00\x85Rq!h\x07U\n\x07\xdd\x01\x1b\x00\x00\x00\x00\x00\x00\x85Rq"h\x07U\n\x07\xdd\x01\x1c\x00\x00\x00\x00\x00\x00\x85Rq#h\x07U\n\x07\xdd\x01\x1d\x00\x00\x00\x00\x00\x00\x85Rq$h\x07U\n\x07\xdd\x01\x1e\x00\x00\x00\x00\x00\x00\x85Rq%h\x07U\n\x07\xdd\x01\x1f\x00\x00\x00\x00\x00\x00\x85Rq&h\x07U\n\x07\xdd\x02\x01\x00\x00\x00\x00\x00\x00\x85Rq\'h\x07U\n\x07\xdd\x02\x02\x00\x00\x00\x00\x00\x00\x85Rq(h\x07U\n\x07\xdd\x02\x03\x00\x00\x00\x00\x00\x00\x85Rq)h\x07U\n\x07\xdd\x02\x04\x00\x00\x00\x00\x00\x00\x85Rq*h\x07U\n\x07\xdd\x02\x05\x00\x00\x00\x00\x00\x00\x85Rq+h\x07U\n\x07\xdd\x02\x06\x00\x00\x00\x00\x00\x00\x85Rq,h\x07U\n\x07\xdd\x02\x07\x00\x00\x00\x00\x00\x00\x85Rq-h\x07U\n\x07\xdd\x02\x08\x00\x00\x00\x00\x00\x00\x85Rq.h\x07U\n\x07\xdd\x02\t\x00\x00\x00\x00\x00\x00\x85Rq/h\x07U\n\x07\xdd\x02\n\x00\x00\x00\x00\x00\x00\x85Rq0h\x07U\n\x07\xdd\x02\x0b\x00\x00\x00\x00\x00\x00\x85Rq1h\x07U\n\x07\xdd\x02\x0c\x00\x00\x00\x00\x00\x00\x85Rq2h\x07U\n\x07\xdd\x02\r\x00\x00\x00\x00\x00\x00\x85Rq3h\x07U\n\x07\xdd\x02\x0e\x00\x00\x00\x00\x00\x00\x85Rq4h\x07U\n\x07\xdd\x02\x0f\x00\x00\x00\x00\x00\x00\x85Rq5h\x07U\n\x07\xdd\x02\x10\x00\x00\x00\x00\x00\x00\x85Rq6h\x07U\n\x07\xdd\x02\x11\x00\x00\x00\x00\x00\x00\x85Rq7h\x07U\n\x07\xdd\x02\x12\x00\x00\x00\x00\x00\x00\x85Rq8h\x07U\n\x07\xdd\x02\x13\x00\x00\x00\x00\x00\x00\x85Rq9h\x07U\n\x07\xdd\x02\x14\x00\x00\x00\x00\x00\x00\x85Rq:h\x07U\n\x07\xdd\x02\x15\x00\x00\x00\x00\x00\x00\x85Rq;h\x07U\n\x07\xdd\x02\x16\x00\x00\x00\x00\x00\x00\x85Rqh\x07U\n\x07\xdd\x02\x19\x00\x00\x00\x00\x00\x00\x85Rq?h\x07U\n\x07\xdd\x02\x1a\x00\x00\x00\x00\x00\x00\x85Rq@h\x07U\n\x07\xdd\x02\x1b\x00\x00\x00\x00\x00\x00\x85RqAh\x07U\n\x07\xdd\x02\x1c\x00\x00\x00\x00\x00\x00\x85RqBh\x07U\n\x07\xdd\x0c\x01\x00\x00\x00\x00\x00\x00\x85RqCh\x07U\n\x07\xdd\x0c\x02\x00\x00\x00\x00\x00\x00\x85RqDh\x07U\n\x07\xdd\x0c\x03\x00\x00\x00\x00\x00\x00\x85RqEh\x07U\n\x07\xdd\x0c\x04\x00\x00\x00\x00\x00\x00\x85RqFh\x07U\n\x07\xdd\x0c\x05\x00\x00\x00\x00\x00\x00\x85RqGh\x07U\n\x07\xdd\x0c\x06\x00\x00\x00\x00\x00\x00\x85RqHh\x07U\n\x07\xdd\x0c\x07\x00\x00\x00\x00\x00\x00\x85RqIh\x07U\n\x07\xdd\x0c\x08\x00\x00\x00\x00\x00\x00\x85RqJh\x07U\n\x07\xdd\x0c\t\x00\x00\x00\x00\x00\x00\x85RqKh\x07U\n\x07\xdd\x0c\n\x00\x00\x00\x00\x00\x00\x85RqLh\x07U\n\x07\xdd\x0c\x0b\x00\x00\x00\x00\x00\x00\x85RqMh\x07U\n\x07\xdd\x0c\x0c\x00\x00\x00\x00\x00\x00\x85RqNh\x07U\n\x07\xdd\x0c\r\x00\x00\x00\x00\x00\x00\x85RqOh\x07U\n\x07\xdd\x0c\x0e\x00\x00\x00\x00\x00\x00\x85RqPh\x07U\n\x07\xdd\x0c\x0f\x00\x00\x00\x00\x00\x00\x85RqQh\x07U\n\x07\xdd\x0c\x10\x00\x00\x00\x00\x00\x00\x85RqRh\x07U\n\x07\xdd\x0c\x11\x00\x00\x00\x00\x00\x00\x85RqSh\x07U\n\x07\xdd\x0c\x12\x00\x00\x00\x00\x00\x00\x85RqTh\x07U\n\x07\xdd\x0c\x13\x00\x00\x00\x00\x00\x00\x85RqUh\x07U\n\x07\xdd\x0c\x14\x00\x00\x00\x00\x00\x00\x85RqVh\x07U\n\x07\xdd\x0c\x15\x00\x00\x00\x00\x00\x00\x85RqWh\x07U\n\x07\xdd\x0c\x16\x00\x00\x00\x00\x00\x00\x85RqXh\x07U\n\x07\xdd\x0c\x17\x00\x00\x00\x00\x00\x00\x85RqYh\x07U\n\x07\xdd\x0c\x18\x00\x00\x00\x00\x00\x00\x85RqZh\x07U\n\x07\xdd\x0c\x19\x00\x00\x00\x00\x00\x00\x85Rq[h\x07U\n\x07\xdd\x0c\x1a\x00\x00\x00\x00\x00\x00\x85Rq\\h\x07U\n\x07\xdd\x0c\x1b\x00\x00\x00\x00\x00\x00\x85Rq]h\x07U\n\x07\xdd\x0c\x1c\x00\x00\x00\x00\x00\x00\x85Rq^h\x07U\n\x07\xdd\x0c\x1d\x00\x00\x00\x00\x00\x00\x85Rq_h\x07U\n\x07\xdd\x0c\x1e\x00\x00\x00\x00\x00\x00\x85Rq`h\x07U\n\x07\xdd\x0c\x1f\x00\x00\x00\x00\x00\x00\x85Rqaetb.')) + def test_get_boolean_groups_from_time_regions(self): dates = get_date_list(dt(2012,1,1),dt(2013,12,31),1) seasons = [[3,4,5],[6,7,8],[9,10,11],[12,1,2]] td = TemporalDimension(value=dates) time_regions = get_time_regions(seasons,dates,raise_if_incomplete=False) - + dgroups = list(iter_boolean_groups_from_time_regions(time_regions,td)) ## the last winter season is not complete as it does not have enough years self.assertEqual(len(dgroups),7) - + to_test = [] for dgroup in dgroups: sub = td[dgroup] @@ -226,16 +211,16 @@ def test_get_boolean_groups_from_time_regions(self): to_test.append([sub.extent, sub.shape[0], sub[sub.shape[0]/2].value[0]]) correct = [[(datetime.datetime(2012, 3, 1, 0, 0), datetime.datetime(2012, 5, 31, 0, 0)), 92, datetime.datetime(2012, 4, 16, 0, 0)], [(datetime.datetime(2012, 6, 1, 0, 0), datetime.datetime(2012, 8, 31, 0, 0)), 92, datetime.datetime(2012, 7, 17, 0, 0)], [(datetime.datetime(2012, 9, 1, 0, 0), datetime.datetime(2012, 11, 30, 0, 0)), 91, datetime.datetime(2012, 10, 16, 0, 0)], [(datetime.datetime(2012, 12, 1, 0, 0), datetime.datetime(2013, 2, 28, 0, 0)), 90, datetime.datetime(2013, 1, 15, 0, 0)], [(datetime.datetime(2013, 3, 1, 0, 0), datetime.datetime(2013, 5, 31, 0, 0)), 92, datetime.datetime(2013, 4, 16, 0, 0)], [(datetime.datetime(2013, 6, 1, 0, 0), datetime.datetime(2013, 8, 31, 0, 0)), 92, datetime.datetime(2013, 7, 17, 0, 0)], [(datetime.datetime(2013, 9, 1, 0, 0), datetime.datetime(2013, 11, 30, 0, 0)), 91, datetime.datetime(2013, 10, 16, 0, 0)]] self.assertEqual(to_test,correct) - + def test_seasonal_get_time_regions(self): dates = get_date_list(dt(2012,1,1),dt(2013,12,31),1) - + ## two simple seasons calc_grouping = [[6,7,8],[9,10,11]] time_regions = get_time_regions(calc_grouping,dates) correct = [[{'month': [6, 7, 8], 'year': [2012]}], [{'month': [9, 10, 11], 'year': [2012]}], [{'month': [6, 7, 8], 'year': [2013]}], [{'month': [9, 10, 11], 'year': [2013]}]] self.assertEqual(time_regions,correct) - + ## add an interannual season at the back calc_grouping = [[6,7,8],[9,10,11],[12,1,2]] with self.assertRaises(IncompleteSeasonError): @@ -243,7 +228,7 @@ def test_seasonal_get_time_regions(self): time_regions = get_time_regions(calc_grouping,dates,raise_if_incomplete=False) correct = [[{'month': [6, 7, 8], 'year': [2012]}], [{'month': [9, 10, 11], 'year': [2012]}], [{'month': [12], 'year': [2012]}, {'month': [2, 1], 'year': [2013]}], [{'month': [6, 7, 8], 'year': [2013]}], [{'month': [9, 10, 11], 'year': [2013]}]] self.assertEqual(time_regions,correct) - + ## put the interannual season in the middle calc_grouping = [[9,10,11],[12,1,2],[6,7,8]] with self.assertRaises(IncompleteSeasonError): @@ -251,19 +236,38 @@ def test_seasonal_get_time_regions(self): time_regions = get_time_regions(calc_grouping,dates,raise_if_incomplete=False) correct = [[{'month': [9, 10, 11], 'year': [2012]}], [{'month': [12], 'year': [2012]}, {'month': [2, 1], 'year': [2013]}], [{'month': [6, 7, 8], 'year': [2012]}], [{'month': [9, 10, 11], 'year': [2013]}], [{'month': [6, 7, 8], 'year': [2013]}]] self.assertEqual(time_regions,correct) - + ## odd seasons, but covering the whole year calc_grouping = [[1,2,3],[4,5,6],[7,8,9],[10,11,12]] time_regions = get_time_regions(calc_grouping,dates) correct = [[{'month': [1, 2, 3], 'year': [2012]}], [{'month': [4, 5, 6], 'year': [2012]}], [{'month': [7, 8, 9], 'year': [2012]}], [{'month': [10, 11, 12], 'year': [2012]}], [{'month': [1, 2, 3], 'year': [2013]}], [{'month': [4, 5, 6], 'year': [2013]}], [{'month': [7, 8, 9], 'year': [2013]}], [{'month': [10, 11, 12], 'year': [2013]}]] self.assertEqual(time_regions,correct) - + ## standard seasons calc_grouping = [[3,4,5],[6,7,8],[9,10,11],[12,1,2]] time_regions = get_time_regions(calc_grouping,dates,raise_if_incomplete=False) correct = [[{'month': [3, 4, 5], 'year': [2012]}], [{'month': [6, 7, 8], 'year': [2012]}], [{'month': [9, 10, 11], 'year': [2012]}], [{'month': [12], 'year': [2012]}, {'month': [2, 1], 'year': [2013]}], [{'month': [3, 4, 5], 'year': [2013]}], [{'month': [6, 7, 8], 'year': [2013]}], [{'month': [9, 10, 11], 'year': [2013]}]] self.assertEqual(time_regions,correct) + def test_time_range_subset(self): + dt1 = datetime.datetime(1950,01,01,12) + dt2 = datetime.datetime(1950,12,31,12) + dates = np.array(get_date_list(dt1,dt2,1)) + r1 = datetime.datetime(1950,01,01) + r2 = datetime.datetime(1950,12,31) + td = TemporalDimension(value=dates) + ret = td.get_between(r1,r2) + self.assertEqual(ret.value[-1],datetime.datetime(1950,12,30,12,0)) + delta = datetime.timedelta(hours=12) + lower = dates - delta + upper = dates + delta + bounds = np.empty((lower.shape[0],2),dtype=object) + bounds[:,0] = lower + bounds[:,1] = upper + td = TemporalDimension(value=dates,bounds=bounds) + ret = td.get_between(r1,r2) + self.assertEqual(ret.value[-1],datetime.datetime(1950,12,31,12,0)) + def test_get_sorted_seasons(self): calc_grouping = [[9, 10, 11], [12, 1, 2], [6, 7, 8]] methods = ['max', 'min'] From 1d48b48e8800c1f7ece34a1b52a425cc3b35cb18 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 7 Nov 2014 14:43:02 -0700 Subject: [PATCH 15/71] unique season flag returning wrong slice #337 Fixed iter_boolean_groups_from_time_regions to return the correctly sliced object. The month from a wrapped season is now correctly appended. --- .../interface/base/dimension/temporal.py | 87 ++++++++++-------- .../test_ocgis/test_api/test_operations.py | 16 ++-- .../test_base/test_dimension/test_temporal.py | 88 ++++++++++++++----- 3 files changed, 126 insertions(+), 65 deletions(-) diff --git a/src/ocgis/interface/base/dimension/temporal.py b/src/ocgis/interface/base/dimension/temporal.py index a66560b11..a2e54c651 100644 --- a/src/ocgis/interface/base/dimension/temporal.py +++ b/src/ocgis/interface/base/dimension/temporal.py @@ -33,29 +33,38 @@ def get_grouping(self,grouping): return(tgd) - def _get_grouping_seasonal_unique_(self,grouping): - ''' - Returns a :class:`TemporalGroupDimension` arguments for unique seasons. - ''' - ## remove the unique keyword from the list + def _get_grouping_seasonal_unique_(self, grouping): + """ + :param list grouping: A seasonal list containing the unique flag. + + >>> grouping = [[12, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11], 'unique'] + + :returns: A tuple of elements necessary to create a :class:`ocgis.interface.base.dimension.temporal.TemporalGroupDimension` + object. + :rtype: tuple + """ + + # remove the unique keyword from the list grouping = list(deepcopy(grouping)) grouping.remove('unique') grouping = get_sorted_seasons(grouping) - ## turn the seasons into time regions - time_regions = get_time_regions(grouping,self._get_datetime_value_(),raise_if_incomplete=False) - ## holds the boolean selection arrays + # turn the seasons into time regions + time_regions = get_time_regions(grouping, self._get_datetime_value_(), raise_if_incomplete=False) + # holds the boolean selection arrays dgroups = deque() - new_bounds = np.array([],dtype=object).reshape(-1,2) - repr_dt = np.array([],dtype=object) - ## return temporal dimensions and convert to groups - for dgroup,sub in iter_boolean_groups_from_time_regions(time_regions,self,yield_subset=True,raise_if_incomplete=False): + new_bounds = np.array([], dtype=object).reshape(-1, 2) + repr_dt = np.array([], dtype=object) + # return temporal dimensions and convert to groups + for dgroup, sub in iter_boolean_groups_from_time_regions(time_regions, self, yield_subset=True, + raise_if_incomplete=False): dgroups.append(dgroup) sub_value_datetime = sub._get_datetime_value_() - new_bounds = np.vstack((new_bounds,[min(sub_value_datetime),max(sub_value_datetime)])) - repr_dt = np.append(repr_dt,sub_value_datetime[int(sub.shape[0]/2)]) - ## no date parts yet... + new_bounds = np.vstack((new_bounds, [min(sub_value_datetime), max(sub_value_datetime)])) + repr_dt = np.append(repr_dt, sub_value_datetime[int(sub.shape[0] / 2)]) + # no date parts yet... date_parts = None - return(new_bounds,date_parts,repr_dt,dgroups) + + return new_bounds, date_parts, repr_dt, dgroups def _get_grouping_all_(self): ''' @@ -368,48 +377,54 @@ def __init__(self,*args,**kwds): TemporalDimension.__init__(self,*args,**kwds) -def iter_boolean_groups_from_time_regions(time_regions,temporal_dimension,yield_subset=False, +def iter_boolean_groups_from_time_regions(time_regions, temporal_dimension, yield_subset=False, raise_if_incomplete=True): - ''' + """ :param time_regions: Sequence of nested time region dictionaries. - + >>> [[{'month':[1,2],'year':[2024]},...],...] - - :param temporal_dimension: :class:`TemporalDimension` - :yields: boolean ndarray vector with yld.shape == temporal_dimension.shape - ''' + + :param temporal_dimension: A temporal dimension object. + :type temporal_dimension: :class:`ocgis.interface.base.dimension.temporal.TemporalDimension` + :param bool yield_subset: If ``True``, yield a tuple with the subset of ``temporal_dimension``. + :param bool raise_if_incomplete: If ``True``, raise an exception if the season is incomplete. + :returns: boolean ndarray vector with yld.shape == temporal_dimension.shape + :raises: IncompleteSeasonError + """ + for sub_time_regions in time_regions: - ## incomplete seasons are searched for in the nested loop. this indicates - ## if a time region group should be considered a season. + # incomplete seasons are searched for in the nested loop. this indicates if a time region group should be + # considered a season. is_complete = True - idx_append = np.array([],dtype=int) + idx_append = np.array([], dtype=int) for time_region in sub_time_regions: - sub,idx = temporal_dimension.get_time_region(time_region,return_indices=True) + sub, idx = temporal_dimension.get_time_region(time_region, return_indices=True) ## insert a check to ensure there are months present for each time region months = set([d.month for d in sub._get_datetime_value_()]) try: - assert(months == set(time_region['month'])) + assert (months == set(time_region['month'])) except AssertionError: if raise_if_incomplete: for m in time_region['month']: if m not in months: - raise(IncompleteSeasonError(time_region,month=m)) + raise IncompleteSeasonError(time_region, month=m) else: is_complete = False - idx_append = np.append(idx_append,idx) - ## if the season is complete append, otherwise pass to next iteration. + idx_append = np.append(idx_append, idx) + + # if the season is complete append, otherwise pass to next iteration. if is_complete: - dgroup = np.zeros(temporal_dimension.shape[0],dtype=bool) + dgroup = np.zeros(temporal_dimension.shape[0], dtype=bool) dgroup[idx_append] = True else: continue - + if yield_subset: - yld = (dgroup,sub) + yld = (dgroup, temporal_dimension[dgroup]) else: yld = dgroup - - yield(yld) + + yield yld def get_is_interannual(sequence): ''' diff --git a/src/ocgis/test/test_ocgis/test_api/test_operations.py b/src/ocgis/test/test_ocgis/test_api/test_operations.py index 109e5bd4e..422814bda 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_operations.py +++ b/src/ocgis/test/test_ocgis/test_api/test_operations.py @@ -337,16 +337,18 @@ def test_calc_grouping_seasonal_with_year(self): self.assertEqual(ret[25]['tas'].shape,(1,10,1,5,4)) def test_calc_grouping_seasonal_with_unique(self): - calc_grouping = [[12,1,2],'unique'] - calc = [{'func':'mean','name':'mean'}] + """Test calc_grouping argument using a seasonal unique flag.""" + + calc_grouping = [[12, 1, 2], 'unique'] + calc = [{'func': 'mean', 'name': 'mean'}] rd = self.test_data.get_rd('cancm4_tas') - ops = ocgis.OcgOperations(dataset=rd,calc_grouping=calc_grouping,geom='state_boundaries', - select_ugid=[27],output_format='nc',calc=calc) + ops = ocgis.OcgOperations(dataset=rd, calc_grouping=calc_grouping, geom='state_boundaries', select_ugid=[27], + output_format='nc', calc=calc) ret = ops.execute() - rd2 = ocgis.RequestDataset(uri=ret,variable='mean') + rd2 = ocgis.RequestDataset(uri=ret, variable='mean') field = rd2.get() - self.assertNotEqual(field.temporal.bounds,None) - self.assertEqual(field.temporal.bounds_datetime.tolist(),[[datetime.datetime(2002, 1, 1, 12, 0), datetime.datetime(2002, 2, 28, 12, 0)], [datetime.datetime(2003, 1, 1, 12, 0), datetime.datetime(2003, 2, 28, 12, 0)], [datetime.datetime(2004, 1, 1, 12, 0), datetime.datetime(2004, 2, 28, 12, 0)], [datetime.datetime(2005, 1, 1, 12, 0), datetime.datetime(2005, 2, 28, 12, 0)], [datetime.datetime(2006, 1, 1, 12, 0), datetime.datetime(2006, 2, 28, 12, 0)], [datetime.datetime(2007, 1, 1, 12, 0), datetime.datetime(2007, 2, 28, 12, 0)], [datetime.datetime(2008, 1, 1, 12, 0), datetime.datetime(2008, 2, 28, 12, 0)], [datetime.datetime(2009, 1, 1, 12, 0), datetime.datetime(2009, 2, 28, 12, 0)], [datetime.datetime(2010, 1, 1, 12, 0), datetime.datetime(2010, 2, 28, 12, 0)]]) + self.assertNotEqual(field.temporal.bounds, None) + self.assertEqual(field.temporal.bounds_datetime.tolist(), [[datetime.datetime(2001, 12, 1, 12, 0), datetime.datetime(2002, 2, 28, 12, 0)], [datetime.datetime(2002, 12, 1, 12, 0), datetime.datetime(2003, 2, 28, 12, 0)], [datetime.datetime(2003, 12, 1, 12, 0), datetime.datetime(2004, 2, 28, 12, 0)], [datetime.datetime(2004, 12, 1, 12, 0), datetime.datetime(2005, 2, 28, 12, 0)], [datetime.datetime(2005, 12, 1, 12, 0), datetime.datetime(2006, 2, 28, 12, 0)], [datetime.datetime(2006, 12, 1, 12, 0), datetime.datetime(2007, 2, 28, 12, 0)], [datetime.datetime(2007, 12, 1, 12, 0), datetime.datetime(2008, 2, 28, 12, 0)], [datetime.datetime(2008, 12, 1, 12, 0), datetime.datetime(2009, 2, 28, 12, 0)], [datetime.datetime(2009, 12, 1, 12, 0), datetime.datetime(2010, 2, 28, 12, 0)]]) self.assertEqual(field.shape,(1, 9, 1, 3, 3)) def test_dataset(self): diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py index 4263468de..f5cd105dd 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py @@ -12,6 +12,27 @@ from ocgis.exc import IncompleteSeasonError +class Test(TestBase): + + def test_iter_boolean_groups_from_time_regions(self): + time_regions = [[{'month': [12], 'year': [1900]}, {'month': [2, 1], 'year': [1901]}]] + yield_subset = True + raise_if_incomplete = False + + start = datetime.datetime(1900, 1, 1) + end = datetime.datetime(1902, 12, 31) + value = self.get_time_series(start, end) + temporal_dimension = TemporalDimension(value=value) + + itr = iter_boolean_groups_from_time_regions(time_regions, temporal_dimension, yield_subset=yield_subset, + raise_if_incomplete=raise_if_incomplete) + itr = list(itr) + self.assertEqual(len(itr), 1) + for dgroup, sub in itr: + self.assertEqual(sub.value[0].year, 1900) + self.assertEqual(sub.value[0].month, 12) + + class TestTemporalDimension(TestBase): def get_temporal_dimension(self,add_bounds=True,start=None,stop=None,days=1): @@ -115,24 +136,26 @@ def test_get_grouping_season_empty_with_year_missing_month(self): def test_get_grouping_seasonal_unique_flag(self): """Test the unique flag for seasonal groups.""" - ## test with year flag - dates = get_date_list(dt(2012,1,1),dt(2013,12,31),1) + dates = get_date_list(dt(2012, 1, 1), dt(2013, 12, 31), 1) td = TemporalDimension(value=dates) - calc_grouping = [[6,7,8],'unique'] + calc_grouping = [[6, 7, 8], 'unique'] tg = td.get_grouping(calc_grouping) - time_region = {'year':[2012],'month':[6,7,8]} - sub1,idx1 = td.get_time_region(time_region,return_indices=True) - time_region = {'year':[2013],'month':[6,7,8]} - sub2,idx2 = td.get_time_region(time_region,return_indices=True) - base_select = np.zeros(td.shape[0],dtype=bool) + time_region = {'year': [2012], 'month': [6, 7, 8]} + sub1, idx1 = td.get_time_region(time_region, return_indices=True) + time_region = {'year': [2013], 'month': [6, 7, 8]} + sub2, idx2 = td.get_time_region(time_region, return_indices=True) + base_select = np.zeros(td.shape[0], dtype=bool) dgroups = deque() - for software,manual in itertools.izip(tg.dgroups,dgroups): - self.assertNumpyAll(software,manual) - self.assertEqual(len(tg.dgroups),2) - self.assertEqual(tg.value.tolist(),[datetime.datetime(2012, 7, 17, 0, 0), datetime.datetime(2013, 7, 17, 0, 0)]) - self.assertEqual(tg.bounds.tolist(),[[datetime.datetime(2012, 6, 1, 0, 0), datetime.datetime(2012, 8, 31, 0, 0)], [datetime.datetime(2013, 6, 1, 0, 0), datetime.datetime(2013, 8, 31, 0, 0)]]) + for software, manual in itertools.izip(tg.dgroups, dgroups): + self.assertNumpyAll(software, manual) + self.assertEqual(len(tg.dgroups), 2) + self.assertEqual(tg.value.tolist(), + [datetime.datetime(2012, 7, 17, 0, 0), datetime.datetime(2013, 7, 17, 0, 0)]) + self.assertEqual(tg.bounds.tolist(), + [[datetime.datetime(2012, 6, 1, 0, 0), datetime.datetime(2012, 8, 31, 0, 0)], + [datetime.datetime(2013, 6, 1, 0, 0), datetime.datetime(2013, 8, 31, 0, 0)]]) dgroup1 = base_select.copy() dgroup1[idx1] = True @@ -142,22 +165,43 @@ def test_get_grouping_seasonal_unique_flag(self): dgroups.append(dgroup1) dgroups.append(dgroup2) - tg = td.get_grouping([[6,7,8],'year']) + tg = td.get_grouping([[6, 7, 8], 'year']) for ii in range(len(tg.dgroups)): - self.assertNumpyAll(tg.dgroups[ii],dgroups[ii]) - self.assertEqual(len(tg.dgroups),len(dgroups)) + self.assertNumpyAll(tg.dgroups[ii], dgroups[ii]) + self.assertEqual(len(tg.dgroups), len(dgroups)) def test_get_grouping_seasonal_unique_flag_winter_season(self): """Test with a single winter season using the unique flag.""" - dt1 = datetime.datetime(1900,01,01) - dt2 = datetime.datetime(1902,12,31) - dates = get_date_list(dt1,dt2,days=1) + dt1 = datetime.datetime(1900, 01, 01) + dt2 = datetime.datetime(1902, 12, 31) + dates = get_date_list(dt1, dt2, days=1) td = TemporalDimension(value=dates) - group = [[12,1,2],'unique'] + group = [[12, 1, 2], 'unique'] tg = td.get_grouping(group) - self.assertEqual(tg.value.shape[0],2) - self.assertEqual(tg.bounds.tolist(),[[datetime.datetime(1901, 1, 1, 0, 0), datetime.datetime(1901, 2, 28, 0, 0)], [datetime.datetime(1902, 1, 1, 0, 0), datetime.datetime(1902, 2, 28, 0, 0)]]) + self.assertEqual(tg.value.shape[0], 2) + self.assertEqual(tg.bounds.tolist(), [[datetime.datetime(1900, 12, 1, 0, 0), datetime.datetime(1901, 2, 28, 0, 0)], [datetime.datetime(1901, 12, 1, 0, 0), datetime.datetime(1902, 2, 28, 0, 0)]]) + + def test_get_grouping_seasonal_unique_flag_all_seasons(self): + """Test unique flag with all seasons.""" + + start = datetime.datetime(1900, 1, 1) + end = datetime.datetime(1902, 12, 31) + ret = self.get_time_series(start, end) + calc_grouping = [[12, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11], 'unique'] + td = TemporalDimension(value=ret) + group = td.get_grouping(calc_grouping) + + for idx in range(group.shape[0]): + bounds_lower = group.bounds[idx, 0] + bounds_upper = group.bounds[idx, 1] + + sub = td[group.dgroups[idx]] + self.assertEqual(sub.value.min(), bounds_lower) + self.assertEqual(sub.value.max(), bounds_upper) + + self.assertEqual(group.value.tolist(), [datetime.datetime(1900, 4, 16, 0, 0), datetime.datetime(1900, 7, 17, 0, 0), datetime.datetime(1900, 10, 16, 0, 0), datetime.datetime(1901, 1, 15, 0, 0), datetime.datetime(1901, 4, 16, 0, 0), datetime.datetime(1901, 7, 17, 0, 0), datetime.datetime(1901, 10, 16, 0, 0), datetime.datetime(1902, 1, 15, 0, 0), datetime.datetime(1902, 4, 16, 0, 0), datetime.datetime(1902, 7, 17, 0, 0), datetime.datetime(1902, 10, 16, 0, 0)]) + self.assertEqual(group.bounds.tolist(), [[datetime.datetime(1900, 3, 1, 0, 0), datetime.datetime(1900, 5, 31, 0, 0)], [datetime.datetime(1900, 6, 1, 0, 0), datetime.datetime(1900, 8, 31, 0, 0)], [datetime.datetime(1900, 9, 1, 0, 0), datetime.datetime(1900, 11, 30, 0, 0)], [datetime.datetime(1900, 12, 1, 0, 0), datetime.datetime(1901, 2, 28, 0, 0)], [datetime.datetime(1901, 3, 1, 0, 0), datetime.datetime(1901, 5, 31, 0, 0)], [datetime.datetime(1901, 6, 1, 0, 0), datetime.datetime(1901, 8, 31, 0, 0)], [datetime.datetime(1901, 9, 1, 0, 0), datetime.datetime(1901, 11, 30, 0, 0)], [datetime.datetime(1901, 12, 1, 0, 0), datetime.datetime(1902, 2, 28, 0, 0)], [datetime.datetime(1902, 3, 1, 0, 0), datetime.datetime(1902, 5, 31, 0, 0)], [datetime.datetime(1902, 6, 1, 0, 0), datetime.datetime(1902, 8, 31, 0, 0)], [datetime.datetime(1902, 9, 1, 0, 0), datetime.datetime(1902, 11, 30, 0, 0)]]) def test_get_grouping_seasonal_year_flag(self): ## test with year flag From 9c9e9a8fe64bfd91c9804a847769db928a600bb6 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 7 Nov 2014 15:04:07 -0700 Subject: [PATCH 16/71] Improved documentation for 'unique' seasonal flag and multi-geometry subsetting to netCDF. --- doc/api.rst | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/doc/api.rst b/doc/api.rst index a8c0f8a29..dcf29395f 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -81,6 +81,8 @@ Value Description geom ~~~~ +.. warning:: Subsetting with multiple geometries to netCDF will result in :ref:`agg_selection` being set to ``True``. Indexing multiple geometries using netCDF-CF convention is currently not possible. + If a geometry(s) is provided, it is used to subset `every` :class:`ocgis.RequestDataset` object. Again, supplying a value of `None` (the default) results in the return of the entire spatial domain. Any shapefiles used for subsetting must include a unique integer attribute called `UGID` and have a WGS84 latitude/longitude geographic coordinate system. There are a number of ways to parameterize the `geom` keyword argument: @@ -162,7 +164,7 @@ calc_grouping There are three forms for this argument: -1. **Date Part Grouping**: Any combination of 'day', 'month', and 'year'. +1. **Date Part Grouping**: Any combination of ``'day'``, ``'month'``, and ``'year'``. >>> calc_grouping = ['day'] >>> calc_grouping = ['month','year'] @@ -180,23 +182,23 @@ Any temporal aggregation applied to a dataset should be consistent with the inpu Month integers map as expected (1=January, 2=February, etc.). The example below constructs a single season composed of March, April, and May. Note the nested lists. ->>> calc_grouping = [[3,4,5]] +>>> calc_grouping = [[3, 4, 5]] The next example consumes all the months in a year. ->>> calc_grouping = [[12,1,2],[3,4,5],[6,7,8],[9,10,11]] +>>> calc_grouping = [[12, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11]] -Unique, time sequential seasons are possible with the `'unique'` flag: +Unique, time sequential seasons are possible with the ``'unique'`` flag: ->>> calc_grouping = [[12,1,2],'unique'] +>>> calc_grouping = [[12, 1, 2], 'unique'] -For example, this returns a calculation based on values with date coordinates in: +A *unique* season has at least one value associated with each month in the season. If a month is missing, the season will be dropped. The season specification above returns a calculation based on values with date coordinates in: * Dec 1900, Jan 1901, Feb 1901 * Dec 1901, Jan 1902, Feb 1902 It is also possible to group the seasons by year. ->>> calc_grouping = [[12,1,2],[3,4,5],[6,7,8],[9,10,11],'year'] +>>> calc_grouping = [[12, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11], 'year'] For example, this returns a calculation based on values with date coordinates in: * 1900: Dec, Jan, Feb @@ -259,6 +261,8 @@ Value Description `geojson` A GeoJSON representation of the data. ====================== =============================================================================================== +.. _agg_selection: + agg_selection ~~~~~~~~~~~~~ From a604a4e1a2b6f16468bfb2ba379ae9a83716f483 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 7 Nov 2014 15:26:33 -0700 Subject: [PATCH 17/71] minor --- doc/api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/api.rst b/doc/api.rst index dcf29395f..883863ead 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -178,7 +178,7 @@ Any temporal aggregation applied to a dataset should be consistent with the inpu >>> calc_grouping = 'all' -3. **Seasonal Groups**: A sequence of integer sequences. Element sequences must be mutually exclusive (i.e. no repeated integers). Representatative times for the climatology are chosen as the center month in a sequence (i.e. January in the sequence [12,1,2]). +3. **Seasonal Groups**: A sequence of integer sequences. Element sequences must be mutually exclusive (i.e. no repeated integers). Representative times for the climatology are chosen as the center month in a sequence (i.e. January in the sequence [12,1,2]). Month integers map as expected (1=January, 2=February, etc.). The example below constructs a single season composed of March, April, and May. Note the nested lists. From 2913f3b162d60b5310d0263f30b5ac6fc1f16b23 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Wed, 12 Nov 2014 17:08:43 -0700 Subject: [PATCH 18/71] 1. Added nose_runner.py containing a nose plugin for creating a simple log of test results. Intended to get results connected into OCGIS results. 2. Added new fabric task run_nesii_aws_tests to execute test suite on remote AWS server. 3. Changes to configuration template. --- fabfile/fabfile.py | 123 ++++++++++++++++++++++++++++++++++++++++- fabfile/nose_runner.py | 73 ++++++++++++++++++++++++ ocgis.conf.TEMPLATE | 67 ++++++++++++++++------ 3 files changed, 242 insertions(+), 21 deletions(-) create mode 100644 fabfile/nose_runner.py diff --git a/fabfile/fabfile.py b/fabfile/fabfile.py index 06a7e0f5c..1e2a954ee 100644 --- a/fabfile/fabfile.py +++ b/fabfile/fabfile.py @@ -1,10 +1,13 @@ from ConfigParser import SafeConfigParser +import datetime from fabric.contrib.project import rsync_project from fabric.state import env from fabric.decorators import task -from fabric.operations import sudo, run, put -from fabric.context_managers import cd +from fabric.operations import sudo, run, put, get +from fabric.context_managers import cd, shell_env, settings import os +import time +from fabric.tasks import Task from helpers import set_rwx_permissions, set_rx_permisions, fcmd, parser import packages @@ -53,7 +56,15 @@ def ebs_mkfs(): def ebs_mount(): """Mount an EBS volume.""" - cmd = ['mount', parser.get('aws', 'ebs_mount_name'), parser.get('server', 'dir_data')] + cmd = ['mount', parser.get('aws-testing', 'ebs_mount_name'), parser.get('server', 'dir_data')] + fcmd(sudo, cmd) + + +@task +def ebs_umount(): + """Unmount an EBS volume.""" + + cmd = ['umount', parser.get('server', 'dir_data')] fcmd(sudo, cmd) @@ -125,6 +136,112 @@ def run_tests(target='all', branch='next', failed='false'): fcmd(run, cmd) +class RunNesiiAwsTests(Task): + """ + Run tests on remote server and return the path to a local log file of tests results. + """ + name = 'run_nesii_aws_tests' + + def run(self, path_local_log, branch='next', sched='true'): + """ + :param str path_local_log: Path to the local log file copied from the remote server. + :param str branch: Target git branch to test. + :param str sched: If ``'false'``, run tests only once. Otherwise, run tests at 23:00 hours daily. + """ + + import schedule + from logbook import Logger + + self.log = Logger('nesii-testing') + + self.path_local_log = path_local_log + self.branch = branch + + if sched == 'true': + self.log.info('begin continous loop') + schedule.every().day.at("6:00").do(self._run_tests_, should_email=True) + while True: + schedule.run_pending() + time.sleep(1) + else: + self.log.info('running tests once') + self._run_tests_(should_email=True) + + def _run_tests_(self, should_email=False): + aws_src = os.getenv('OCGIS_SIMPLEAWS_SRC') + aws_conf = os.getenv('OCGIS_CONF_PATH') + aws_testing_section = 'aws-testing' + + ebs_volumesize = int(parser.get(aws_testing_section, 'ebs_volumesize')) + ebs_snapshot = parser.get(aws_testing_section, 'ebs_snapshot') + ebs_mount_name = parser.get(aws_testing_section, 'ebs_mount_name') + ebs_placement = parser.get(aws_testing_section, 'ebs_placement') + test_results_path = parser.get(aws_testing_section, 'test_results_path') + test_instance_name = parser.get(aws_testing_section, 'test_instance_name') + test_instance_type = parser.get(aws_testing_section, 'test_instance_type') + test_image_id = parser.get(aws_testing_section, 'test_image_id') + dest_email = parser.get(aws_testing_section, 'dest_email') + dir_clone = parser.get('server', 'dir_clone') + + import sys + sys.path.append(aws_src) + import aws + + am = aws.AwsManager(aws_conf) + + self.log.info('launching instance') + instance = am.launch_new_instance(test_instance_name, image_id=test_image_id, instance_type=test_instance_type, + placement=ebs_placement) + + with settings(host_string=instance.ip_address, disable_known_hosts=True, connection_attempts=10): + try: + self.log.info('creating volume') + volume = am.conn.create_volume(ebs_volumesize, ebs_placement, snapshot=ebs_snapshot) + am.wait_for_status(volume, 'available') + try: + self.log.info('attaching volume') + am.conn.attach_volume(volume.id, instance.id, ebs_mount_name, dry_run=False) + am.wait_for_status(volume, 'in-use') + + ebs_mount() + + path = os.path.join(dir_clone, parser.get('git', 'name')) + test_target = os.path.join(path, 'src', 'ocgis', 'test') + # test_target = os.path.join(path, 'src', 'ocgis', 'test', 'test_simple') + nose_runner = os.path.join(path, 'fabfile', 'nose_runner.py') + path_src = os.path.join(path, 'src') + with cd(path): + fcmd(run, ['git', 'pull']) + fcmd(run, ['git', 'checkout', self.branch]) + fcmd(run, ['git', 'pull']) + with cd(path_src): + with shell_env(OCGIS_TEST_TARGET=test_target): + fcmd(run, ['python', nose_runner]) + get(test_results_path, local_path=self.path_local_log) + + ebs_umount() + + finally: + self.log.info('detaching volume') + volume.detach() + am.wait_for_status(volume, 'available') + self.log.info('deleting volume') + volume.delete() + finally: + self.log.info('terminating instance') + instance.terminate() + with open(self.path_local_log, 'r') as f: + content = f.read() + + if should_email: + self.log.info('sending email') + am.send_email(dest_email, dest_email, 'OCGIS_AWS', content) + + self.log.info('success') + +r = RunNesiiAwsTests() + + @task def install_dependencies(): # packages.NumpyInstaller('1.8.2').execute() diff --git a/fabfile/nose_runner.py b/fabfile/nose_runner.py new file mode 100644 index 000000000..26b8b09b8 --- /dev/null +++ b/fabfile/nose_runner.py @@ -0,0 +1,73 @@ +import datetime +from email.mime.text import MIMEText +import os +import smtplib +import tempfile +import nose +from nose.plugins.plugintest import run + +PATH_LOG = '/tmp/foo.txt' + +class SimpleStream(object): + + def __init__(self, path): + self.path = path + self.write('', mode='w') + self.writeln('OpenClimateGIS Test Results') + self.writeln('Started {0} UTC'.format(datetime.datetime.utcnow())) + self.writeln() + + def flush(self): + pass + + def write(self, msg, mode='a'): + print msg + with open(self.path, mode) as f: + f.write(msg) + + def writeln(self, msg=None, mode='a'): + if msg is None: + msg = '\n' + else: + msg = '{0}\n'.format(msg) + print msg + self.write(msg, mode=mode) + + +class NESIITestRunner(nose.plugins.Plugin): + name = 'nesii-remote-tests' + _days = {0: 'Sunday', 1: 'Monday', 2: 'Tuesday', 3: 'Wednesday', 4: 'Thursday', 5: 'Friday', 6: 'Saturday'} + + def __init__(self, *args, **kwargs): + self._path_log = kwargs.pop('path_log') + super(NESIITestRunner, self).__init__(*args, **kwargs) + self._ss = None + + def finalize(self, result): + # skipped = len(result.skipped) + errors = len(result.errors) + failures = len(result.failures) + # total = result.testsRun + + self._ss.writeln() + total_bad = errors + failures + self._ss.writeln('Test_Failures:{0}'.format(total_bad)) + self._ss.writeln('Day_of_Week:{0}'.format(self._days[datetime.datetime.now().weekday()])) + + if total_bad > 0: + color = 'yellow' + elif total_bad == 0: + color = 'green' + else: + raise NotImplementedError(total_bad) + + self._ss.writeln('Test_results:{0}'.format(color)) + + def setOutputStream(self, stream): + self._ss = SimpleStream(self._path_log) + return self._ss + + +if __name__ == '__main__': + nose.main(addplugins=[NESIITestRunner(path_log='/tmp/nesii_test_results.log')], + argv=[__file__, '-vs', os.getenv('OCGIS_TEST_TARGET'), '--with-nesii-remote-tests']) \ No newline at end of file diff --git a/ocgis.conf.TEMPLATE b/ocgis.conf.TEMPLATE index 743b2779c..521e66e54 100644 --- a/ocgis.conf.TEMPLATE +++ b/ocgis.conf.TEMPLATE @@ -1,28 +1,59 @@ [fabric] -hosts = -key_filename = -user = +# host string +hosts = 51.289.53.004 +# full path to ssh key file +key_filename = ~/.ssh/keyfile.pem +# remote username +user = ubuntu [server] -venv_name = -dir_clone = -dir_src = -dir_data = +# name of target virtual environment +venv_name = ocgis +# path to git clone directory +dir_clone = /home/ubuntu/git +# path prefix to source code directory +dir_src = /home/ubuntu/src +# path to mount point for data volume +dir_data = /home/ubuntu/data +# path prefix for software installation +dir_install = /usr/local +# absolute path to home directory +dir_home = /home/ubuntu +# number of processors to use for make command j = 2 [git] +# github clone url url = https://github.com/NCPP/ocgis.git +# default branch for cloning branch = master +# name of the github repository name = ocgis -[aws] -instance_name = -instance_id = -aws_access_key_id = -aws_secret_access_key = -key_name = -region = -image_id = -security_group = -instance_type = -ebs_mount_name = \ No newline at end of file +# see simple-aws configuration file for details +[simple-aws] +aws_access_key_id = +aws_secret_access_key = +key_name = keyfile +region = us-west-2 +security_group = ocgis + +[aws-testing] +# mount name of the ebs volume +ebs_mount_name = /dev/xvdg +# snapshot identifier for the test data volume +ebs_snapshot = snap-ba020831 +# size of the volume in gigabytes +ebs_volumesize = 100 +# the placement / availability zone of the ebs test volume +ebs_placement = us-west-2a +# path to where the log file is written on the remote server +test_results_path = /tmp/nesii_test_results.log +# name of the instance to create when testing +test_instance_name = ocgis-nesii-test +# size of the instance to create when testing +test_instance_type = t2.medium +# image identifier for the test instance +test_image_id = ami-e7460ed7 +# destination email address for mailing results +dest_email = foo@mail.com \ No newline at end of file From 07c43a446007c1d2e3dcc451f0e910319674d1b4 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Thu, 20 Nov 2014 11:56:16 -0700 Subject: [PATCH 19/71] aggregate not working with rotated pole and no subset geometry #339 Fixed issue with aggregate=True not triggering the forward transformation from the rotated pole coordinate system. This was only an issue when geom=None. --- src/ocgis/api/operations.py | 132 +++-- src/ocgis/api/subset.py | 18 +- .../test_ocgis/test_api/test_operations.py | 509 +++++++++--------- .../test_api/test_parms/test_definition.py | 15 + 4 files changed, 348 insertions(+), 326 deletions(-) diff --git a/src/ocgis/api/operations.py b/src/ocgis/api/operations.py index c4b3dde7b..e50381a75 100644 --- a/src/ocgis/api/operations.py +++ b/src/ocgis/api/operations.py @@ -341,132 +341,130 @@ def _update_dependents_(self): raise(DefinitionValidationError(Dataset, msg)) def _validate_(self): - ocgis_lh(logger='operations',msg='validating operations') - - def _raise_(msg,obj=OutputFormat): - e = DefinitionValidationError(obj,msg) - ocgis_lh(exc=e,logger='operations') + ocgis_lh(logger='operations', msg='validating operations') + + def _raise_(msg, obj=OutputFormat): + e = DefinitionValidationError(obj, msg) + ocgis_lh(exc=e, logger='operations') # no regridding with a spatial operation of clip if self.regrid_destination is not None: if self.spatial_operation == 'clip': msg = 'Regridding not allowed with spatial "clip" operation.' raise DefinitionValidationError(SpatialOperation, msg) - - ## there are a bunch of constraints on the netCDF format + + # there are a bunch of constraints on the netCDF format if self.output_format == 'nc': - ## we can only write one requestdataset to netCDF + # we can only write one requestdataset to netCDF if len(self.dataset) > 1 and self.calc is None: msg = ('Data packages (i.e. more than one RequestDataset) may not be written to netCDF. ' 'There are currently {dcount} RequestDatasets. Note, this is different than a ' 'multifile dataset.'.format(dcount=len(self.dataset))) - _raise_(msg,OutputFormat) - ## we can write multivariate functions to netCDF however + _raise_(msg, OutputFormat) + # we can write multivariate functions to netCDF however else: if self.calc is not None and len(self.dataset) > 1: - ## count the occurrences of these classes in the calculation - ## list. - klasses_to_check = [AbstractMultivariateFunction,MultivariateEvalFunction] + # count the occurrences of these classes in the calculation list. + klasses_to_check = [AbstractMultivariateFunction, MultivariateEvalFunction] multivariate_checks = [] for klass in klasses_to_check: for calc in self.calc: - multivariate_checks.append(issubclass(calc['ref'],klass)) + multivariate_checks.append(issubclass(calc['ref'], klass)) if sum(multivariate_checks) != 1: msg = ('Data packages (i.e. more than one RequestDataset) may not be written to netCDF. ' 'There are currently {dcount} RequestDatasets. Note, this is different than a ' 'multifile dataset.'.format(dcount=len(self.dataset))) - _raise_(msg,OutputFormat) + _raise_(msg, OutputFormat) else: - ## there is a multivariate calculation and this requires - ## multiple request dataset + # there is a multivariate calculation and this requires multiple request dataset pass - ## clipped data which creates an arbitrary geometry may not be written - ## to netCDF + + # clipped data which creates an arbitrary geometry may not be written to netCDF if self.spatial_operation != 'intersects': msg = 'Only "intersects" spatial operation allowed for netCDF output. Arbitrary geometries may not currently be written.' - _raise_(msg,OutputFormat) - ## data may not be aggregated either + _raise_(msg, OutputFormat) + # data may not be aggregated either if self.aggregate: msg = 'Data may not be aggregated for netCDF output. The aggregate parameter must be False.' - _raise_(msg,OutputFormat) - ## either the input data CRS or WGS84 is required for data output - if self.output_crs is not None and not isinstance(self.output_crs,CFWGS84): + _raise_(msg, OutputFormat) + # either the input data CRS or WGS84 is required for data output + if self.output_crs is not None and not isinstance(self.output_crs, CFWGS84): msg = 'CFWGS84 is the only acceptable overloaded output CRS at this time for netCDF output.' - _raise_(msg,OutputFormat) - ## calculations on raw values are not relevant as not aggregation can - ## occur anyway. + _raise_(msg, OutputFormat) + # calculations on raw values are not relevant as not aggregation can occur anyway. if self.calc is not None: if self.calc_raw: msg = 'Calculations must be performed on original values (i.e. calc_raw=False) for netCDF output.' _raise_(msg) - ## no keyed output functions to netCDF - if OcgCalculationEngine._check_calculation_members_(self.calc,AbstractKeyedOutputFunction): + # no keyed output functions to netCDF + if OcgCalculationEngine._check_calculation_members_(self.calc, AbstractKeyedOutputFunction): msg = 'Keyed function output may not be written to netCDF.' _raise_(msg) - - ## collect projections for the dataset sets. None is returned if one - ## is not parsable. the WGS84 default is actually done in the RequestDataset - ## object. + + # collect projections for the dataset sets. None is returned if one is not parsable. the WGS84 default is + # actually done in the RequestDataset object. projections = [] for rd in self.dataset.itervalues(): if not any([_ == rd.crs for _ in projections]): projections.append(rd.crs) - ## if there is not output CRS and projections differ, raise an exception. - ## however, it is okay to have data with different projections in the - ## numpy output. - if len(projections) > 1 and self.output_format != 'numpy': #@UndefinedVariable + + # if there is not output CRS and projections differ, raise an exception. however, it is okay to have data with + # different projections in the numpy output. + if len(projections) > 1 and self.output_format != 'numpy': # @UndefinedVariable if self.output_crs is None: - _raise_('Dataset coordinate reference systems must be equivalent if no output CRS is chosen.',obj=OutputCRS) - ## clip and/or aggregation operations may not be written back to CFRotatedPole - ## at this time. hence, the output crs must be set to CFWGS84. - if CFRotatedPole in map(type,projections): - if self.output_crs is not None and not isinstance(self.output_crs,WGS84): + _raise_('Dataset coordinate reference systems must be equivalent if no output CRS is chosen.', + obj=OutputCRS) + + # clip and/or aggregation operations may not be written back to CFRotatedPole at this time. hence, the output + # crs must be set to CFWGS84. + if CFRotatedPole in map(type, projections): + if self.output_crs is not None and not isinstance(self.output_crs, WGS84): msg = ('{0} data may only be written to the same coordinate system (i.e. "output_crs=None") ' - 'or {1}.').format(CFRotatedPole.__name__,CFWGS84.__name__) - _raise_(msg,obj=OutputCRS) + 'or {1}.').format(CFRotatedPole.__name__, CFWGS84.__name__) + _raise_(msg, obj=OutputCRS) if self.aggregate or self.spatial_operation == 'clip': - msg = ('{0} data if clipped or spatially averaged must be written to ' - '{1}. The "output_crs" is being updated to {2}.').format( - CFRotatedPole.__name__,CFWGS84.__name__, - CFWGS84.__name__) - ocgis_lh(level=logging.WARN,msg=msg,logger='operations') + msg = ( + '{0} data if clipped or spatially averaged must be written to ' '{1}. The "output_crs" is being updated to {2}.').format( + CFRotatedPole.__name__, CFWGS84.__name__, CFWGS84.__name__) + ocgis_lh(level=logging.WARN, msg=msg, logger='operations') self._get_object_('output_crs')._value = CFWGS84() - ## only WGS84 may be written to to GeoJSON + + # only WGS84 may be written to to GeoJSON if self.output_format == 'geojson': if any([element != WGS84() for element in projections if element is not None]): _raise_('Only data with a WGS84 projection may be written to GeoJSON.') if self.output_crs is not None: if self.output_crs != WGS84(): _raise_('Only data with a WGS84 projection may be written to GeoJSON.') - - ## snippet only relevant for subsetting not operations with a calculation - ## or time region + + # snippet only relevant for subsetting not operations with a calculation or time region if self.snippet: if self.calc is not None: - _raise_('Snippets are not implemented for calculations. Apply a limiting time range for faster responses.',obj=Snippet) + _raise_( + 'Snippets are not implemented for calculations. Apply a limiting time range for faster responses.', + obj=Snippet) for rd in self.dataset.itervalues(): if rd.time_region is not None: - _raise_('Snippets are not implemented for time regions.',obj=Snippet) - - ## no slicing with a geometry - can easily lead to extent errors + _raise_('Snippets are not implemented for time regions.', obj=Snippet) + + # no slicing with a geometry - can easily lead to extent errors if self.slice is not None: - assert(self.geom is None) - - ## file only operations only valid for netCDF and calculations. + assert (self.geom is None) + + # file only operations only valid for netCDF and calculations. if self.file_only: if self.output_format != 'nc': - _raise_('Only netCDF may be written with file_only as True.',obj=FileOnly) + _raise_('Only netCDF may be written with file_only as True.', obj=FileOnly) if self.calc is None: - _raise_('File only outputs are only relevant for computations.',obj=FileOnly) - - ## validate any calculations against the operations object. if the calculation - ## is a string eval function do not validate. + _raise_('File only outputs are only relevant for computations.', obj=FileOnly) + + # validate any calculations against the operations object. if the calculation is a string eval function do not + # validate. if self.calc is not None: if self._get_object_('calc')._is_eval_function: if self.calc_grouping is not None: msg = 'Calculation groups are not applicable for string function expressions.' - _raise_(msg,obj=CalcGrouping) + _raise_(msg, obj=CalcGrouping) else: for c in self.calc: c['ref'].validate(self) - diff --git a/src/ocgis/api/subset.py b/src/ocgis/api/subset.py index 310d11919..132e55db8 100644 --- a/src/ocgis/api/subset.py +++ b/src/ocgis/api/subset.py @@ -252,8 +252,7 @@ def _get_initialized_collection_(self, field, headers, value_keys): coll = SpatialCollection(crs=collection_crs, headers=headers, value_keys=value_keys) return coll - @staticmethod - def _get_update_rotated_pole_state_(field, subset_sdim): + def _get_update_rotated_pole_state_(self, field, subset_sdim): """ Rotated pole coordinate systems are handled internally by transforming the CRS to a geographic coordinate system. @@ -271,7 +270,7 @@ def _get_update_rotated_pole_state_(field, subset_sdim): original_rotated_pole_crs = None if isinstance(field.spatial.crs, CFRotatedPole): # only transform if there is a subset geometry - if subset_sdim is not None: + if subset_sdim is not None or self.ops.aggregate or self.ops.spatial_operation == 'clip': # update the CRS. copy the original CRS for possible later transformation back to rotated pole. original_rotated_pole_crs = copy(field.spatial.crs) field.spatial.update_crs(CFWGS84()) @@ -611,16 +610,15 @@ def _process_geometries_(self, itr, field, headers, value_keys, alias): except ValueError: # attempt without buffering the subset geometry for the target field. sfield.spatial = original_sfield_sdim - sfield = self._get_regridded_field_with_subset_( - sfield, - subset_sdim_for_regridding=subset_sdim_for_regridding, - with_buffer=False) + sfield = self._get_regridded_field_with_subset_(sfield, + subset_sdim_for_regridding=subset_sdim_for_regridding, + with_buffer=False) - ## if empty returns are allowed, there be an empty field + # if empty returns are allowed, there be an empty field if sfield is not None: ## aggregate if requested if self.ops.aggregate: - ocgis_lh('executing spatial average',self._subset_log,alias=alias,ugid=subset_ugid) + ocgis_lh('executing spatial average', self._subset_log, alias=alias, ugid=subset_ugid) sfield = sfield.get_spatially_aggregated(new_spatial_uid=subset_ugid) # wrap the returned data. @@ -639,7 +637,7 @@ def _process_geometries_(self, itr, field, headers, value_keys, alias): if env.OPTIMIZE_FOR_CALC is False and self.ops.file_only is False: self._check_masking_(alias, sfield, subset_ugid) - ## transform back to rotated pole if necessary + # transform back to rotated pole if necessary if original_rotated_pole_crs is not None: if not isinstance(self.ops.output_crs, CFWGS84): sfield.spatial.update_crs(original_rotated_pole_crs) diff --git a/src/ocgis/test/test_ocgis/test_api/test_operations.py b/src/ocgis/test/test_ocgis/test_api/test_operations.py index 422814bda..46bc95120 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_operations.py +++ b/src/ocgis/test/test_ocgis/test_api/test_operations.py @@ -1,25 +1,26 @@ -import unittest +import csv +from datetime import datetime as dt +import itertools +import datetime + +from numpy import dtype +import numpy as np + from ocgis.api.parms.definition import RegridOptions +from ocgis.interface.base.crs import CFWGS84 from ocgis.test.base import TestBase from ocgis.exc import DefinitionValidationError, DimensionNotFound, RequestValidationError from ocgis.api.parms import definition from ocgis import env, constants -import os -from datetime import datetime as dt from ocgis.api.operations import OcgOperations -from ocgis.test.test_simple.test_simple import nc_scope from ocgis.util.helpers import make_poly -import itertools import ocgis from ocgis.api.request.base import RequestDataset, RequestDatasetCollection from ocgis.util.shp_cabinet import ShpCabinetIterator -import datetime -from numpy import dtype -import numpy as np class TestOcgOperations(TestBase): - + def setUp(self): TestBase.setUp(self) @@ -41,116 +42,13 @@ def test_init(self): self.assertEqual(ops.regrid_destination, None) self.assertDictEqual(ops.regrid_options, RegridOptions.default) - def test_regrid_destination(self): - """Test regridding not allowed with clip operation.""" - - rd = self.test_data.get_rd('cancm4_tas') - with self.assertRaises(DefinitionValidationError): - OcgOperations(dataset=rd, regrid_destination=rd, spatial_operation='clip') - - def test_conform_units_to(self): - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tas') - rd2.alias = 'foo' - ops = OcgOperations(dataset=[rd1, rd2], conform_units_to='celsius') - for ds in ops.dataset.itervalues(): - self.assertEqual(ds.conform_units_to, 'celsius') - - ## test that the conform argument is updated - ops.conform_units_to = 'fahrenheit' - for ds in ops.dataset.itervalues(): - self.assertEqual(ds.conform_units_to, 'fahrenheit') - - def test_conform_units_to_bad_units(self): - rd = self.test_data.get_rd('cancm4_tas') - with self.assertRaises(RequestValidationError): - OcgOperations(dataset=rd, conform_units_to='crap') - - def test_no_calc_grouping_with_string_expression(self): - calc = 'es=tas*3' - calc_grouping = ['month'] - rd = self.test_data.get_rd('cancm4_tas') - with self.assertRaises(DefinitionValidationError): - OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping) - - def test_time_range(self): - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tas') - rd.alias = 'foo' - tr = [datetime.datetime(2002,1,1),datetime.datetime(2002,3,1)] - ops = ocgis.OcgOperations(dataset=[rd,rd2],time_range=tr) - for r in [rd,rd2]: - self.assertEqual(r.time_range,None) - for r in ops.dataset.itervalues(): - self.assertEqual(r.time_range,tuple(tr)) - - tr = [datetime.datetime(2002,1,1),datetime.datetime(2003,3,1)] - ops.time_range = tr - for r in ops.dataset.itervalues(): - self.assertEqual(r.time_range,tuple(tr)) - - def test_time_region(self): - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tas') - rd.alias = 'foo' - tr = {'month':[6],'year':[2005]} - ops = ocgis.OcgOperations(dataset=[rd,rd2],time_region=tr) - for r in [rd,rd2]: - self.assertEqual(r.time_region,None) - for r in ops.dataset.itervalues(): - self.assertEqual(r.time_region,tr) - - tr = {'month':[6],'year':[2006]} - ops.time_region = tr - for r in ops.dataset.itervalues(): - self.assertEqual(r.time_region,tr) - - def test_level_range(self): + def test_str(self): rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tas') - rd.alias = 'foo' - lr = [1,2] - ops = ocgis.OcgOperations(dataset=[rd,rd2],level_range=lr) - for r in [rd,rd2]: - self.assertEqual(r.level_range,None) - for r in ops.dataset.itervalues(): - self.assertEqual(r.level_range,tuple(lr)) - - lr = [2,3] - ops.level_range = lr - for r in ops.dataset.itervalues(): - self.assertEqual(r.level_range,tuple(lr)) + ops = OcgOperations(dataset=rd) + ret = str(ops) + self.assertTrue(str(ret).startswith('OcgOperations')) + self.assertGreater(len(ret), 1000) - def test_nc_package_validation_raised_first(self): - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('rotated_pole_ichec',kwds={'alias':'tas2'}) - try: - ocgis.OcgOperations(dataset=[rd,rd2],output_format='nc') - except DefinitionValidationError as e: - self.assertIn('Data packages (i.e. more than one RequestDataset) may not be written to netCDF.', - e.message) - pass - - def test_with_callback(self): - - app = [] - def callback(perc,msg,app=app): - app.append((perc,msg)) -# print(perc,msg) - - rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_tasmax_2011') - dataset = [rd,rd2] - for ds in dataset: - ds.time_region = {'month':[6]} - ops = ocgis.OcgOperations(dataset=dataset,geom='state_boundaries',select_ugid=[16,17], - calc_grouping=['month'],calc=[{'func':'mean','name':'mean'},{'func':'median','name':'median'}], - callback=callback) - ops.execute() - - self.assertTrue(len(app) > 15) - self.assertEqual(app[-1][0],100.0) - def test_get_base_request_size(self): rd = self.test_data.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd) @@ -160,12 +58,6 @@ def test_get_base_request_size(self): with self.assertRaises(DefinitionValidationError): OcgOperations(dataset=rd, regrid_destination=rd).get_base_request_size() - def test_get_base_request_size_with_geom(self): - rd = self.test_data.get_rd('cancm4_tas') - ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[23]) - size = ops.get_base_request_size() - self.assertEqual(size,{'variables': {'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 171.09375, 'shape': (1, 3650, 1, 4, 3), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 0.0234375, 'shape': (3,), 'dtype': dtype('float64')}, 'row': {'kb': 0.03125, 'shape': (4,), 'dtype': dtype('float64')}}}, 'total': 199.6640625}) - def test_get_base_request_size_multifile(self): rd1 = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('narccap_pr_wrfg_ncep') @@ -173,7 +65,7 @@ def test_get_base_request_size_multifile(self): ops = OcgOperations(dataset=rds) size = ops.get_base_request_size() self.assertEqual({'variables': {'pr': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 228.25, 'shape': (29216,), 'dtype': dtype('float64')}, 'value': {'kb': 1666909.75, 'shape': (1, 29216, 1, 109, 134), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 1.046875, 'shape': (134,), 'dtype': dtype('float64')}, 'row': {'kb': 0.8515625, 'shape': (109,), 'dtype': dtype('float64')}}, 'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 116800.0, 'shape': (1, 3650, 1, 64, 128), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 1.0, 'shape': (128,), 'dtype': dtype('float64')}, 'row': {'kb': 0.5, 'shape': (64,), 'dtype': dtype('float64')}}}, 'total': 1783969.9140625},size) - + def test_get_base_request_size_multifile_with_geom(self): rd1 = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('narccap_pr_wrfg_ncep') @@ -194,7 +86,7 @@ def test_get_base_request_size_test_data(self): ops = OcgOperations(dataset=rd) ret = ops.get_base_request_size() self.assertTrue(ret['total'] > 1) - + def test_get_base_request_size_with_calculation(self): rd = self.test_data.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd,calc=[{'func':'mean','name':'mean'}], @@ -202,103 +94,69 @@ def test_get_base_request_size_with_calculation(self): size = ops.get_base_request_size() self.assertEqual(size['variables']['tas']['temporal']['shape'][0],3650) - def test_str(self): + def test_get_base_request_size_with_geom(self): rd = self.test_data.get_rd('cancm4_tas') - ops = OcgOperations(dataset=rd) - ret = str(ops) - self.assertTrue(str(ret).startswith('OcgOperations')) - self.assertGreater(len(ret), 1000) + ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[23]) + size = ops.get_base_request_size() + self.assertEqual(size,{'variables': {'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 171.09375, 'shape': (1, 3650, 1, 4, 3), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 0.0234375, 'shape': (3,), 'dtype': dtype('float64')}, 'row': {'kb': 0.03125, 'shape': (4,), 'dtype': dtype('float64')}}}, 'total': 199.6640625}) def test_get_meta(self): ops = OcgOperations(dataset=self.datasets) meta = ops.get_meta() self.assertTrue(len(meta) > 100) self.assertTrue('\n' in meta) - + ops = OcgOperations(dataset=self.datasets,calc=[{'func':'mean','name':'my_mean'}], calc_grouping=['month']) meta = ops.get_meta() self.assertTrue(len(meta) > 100) self.assertTrue('\n' in meta) - def test_null_parms(self): - ops = OcgOperations(dataset=self.datasets_no_range) - self.assertEqual(ops.geom,None) - self.assertEqual(len(ops.dataset),3) - for ds in ops.dataset.itervalues(): - self.assertEqual(ds.time_range,None) - self.assertEqual(ds.level_range,None) - ops.__repr__() - - def test_aggregate(self): - A = definition.Aggregate - - a = A(True) - self.assertEqual(a.value,True) - - a = A(False) - self.assertEqual(a.value,False) - - a = A('True') - self.assertEqual(a.value,True) - - def test_geom_string(self): - ops = OcgOperations(dataset=self.datasets,geom='state_boundaries') - self.assertEqual(len(list(ops.geom)),51) - ops.geom = None - self.assertEqual(ops.geom,None) - ops.geom = 'mi_watersheds' - self.assertEqual(len(list(ops.geom)),60) - ops.geom = [-120,40,-110,50] - self.assertEqual(ops.geom[0].single.geom.bounds,(-120.0,40.0,-110.0,50.0)) - - def test_geom(self): - geom = make_poly((37.762,38.222),(-102.281,-101.754)) - g = definition.Geom(geom) - self.assertEqual(type(g.value),tuple) - self.assertEqual(g.value[0].single.geom.bounds,(-102.281, 37.762, -101.754, 38.222)) - - g = definition.Geom(None) - self.assertEqual(g.value,None) - self.assertEqual(str(g),'geom=None') - - g = definition.Geom('mi_watersheds') - self.assertEqual(str(g),'geom="mi_watersheds"') - - geoms = ShpCabinetIterator('mi_watersheds') - g = definition.Geom(geoms) - self.assertEqual(len(list(g.value)),60) - self.assertEqual(g._shp_key,'mi_watersheds') - - def test_geom_having_changed_select_ugid(self): - ops = OcgOperations(dataset=self.test_data.get_rd('cancm4_tas'), - geom='state_boundaries') - self.assertEqual(len(list(ops.geom)),51) - ops.select_ugid = [16,17] - self.assertEqual(len(list(ops.geom)),2) - - def test_headers(self): - headers = ['did','value'] - for htype in [list,tuple]: - hvalue = htype(headers) - hh = definition.Headers(hvalue) - self.assertEqual(hh.value,tuple(constants.required_headers+['value'])) - - headers = ['foo'] + def test_keyword_abstraction(self): + K = definition.Abstraction + + k = K() + self.assertEqual(k.value,None) + self.assertEqual(str(k),'abstraction="None"') + + k = K('point') + self.assertEqual(k.value,'point') + with self.assertRaises(DefinitionValidationError): - hh = definition.Headers(headers) - - headers = [] - hh = definition.Headers(headers) - self.assertEqual(hh.value,tuple(constants.required_headers)) - - def test_calc_grouping_none_date_parts(self): + K('pt') + + def test_keyword_aggregate(self): + rd = self.test_data.get_rd('rotated_pole_cnrm_cerfacs') + + ofield = rd.get()[:, 0:10, :, 0:10, 0:10] + ovalue = ofield.variables['pr'].value + manual_mean = ovalue[0, 4, 0, :, :].mean() + + slc = [None, [0, 10], None, [0, 10], [0, 10]] + for output_format in ['numpy', 'csv']: + ops = OcgOperations(dataset=rd, output_format=output_format, aggregate=True, slice=slc) + # spatial operations on rotated pole require the output crs be wgs84 + self.assertEqual(ops.output_crs, CFWGS84()) + ret = ops.execute() + if output_format == 'numpy': + field = ret[1]['pr'] + self.assertEqual(field.shape, (1, 10, 1, 1, 1)) + value = ret.gvu(1, 'pr') + self.assertAlmostEqual(manual_mean, value[0, 4, 0, 0, 0]) + else: + with open(ret, 'r') as f: + reader = csv.DictReader(f) + rows = list(reader) + self.assertEqual(len(rows), 10) + self.assertAlmostEqual(float(rows[4]['VALUE']), manual_mean) + + def test_keyword_calc_grouping_none_date_parts(self): _cg = [ - None, - ['day','month'], - 'day' - ] - + None, + ['day','month'], + 'day' + ] + for cg in _cg: if cg is not None: eq = tuple(cg) @@ -309,7 +167,7 @@ def test_calc_grouping_none_date_parts(self): self.assertEqual(obj.value,eq) except AssertionError: self.assertEqual(obj.value,('day',)) - + ## only month, year, and day combinations are currently supported rd = self.test_data.get_rd('cancm4_tas') calcs = [None,[{'func':'mean','name':'mean'}]] @@ -326,17 +184,8 @@ def test_calc_grouping_none_date_parts(self): reraise = False if reraise: raise - - def test_calc_grouping_seasonal_with_year(self): - calc_grouping = [[1,2,3],'year'] - calc = [{'func':'mean','name':'mean'}] - rd = self.test_data.get_rd('cancm4_tas') - ops = OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, - geom='state_boundaries',select_ugid=[25]) - ret = ops.execute() - self.assertEqual(ret[25]['tas'].shape,(1,10,1,5,4)) - - def test_calc_grouping_seasonal_with_unique(self): + + def test_keyword_calc_grouping_seasonal_with_unique(self): """Test calc_grouping argument using a seasonal unique flag.""" calc_grouping = [[12, 1, 2], 'unique'] @@ -350,44 +199,161 @@ def test_calc_grouping_seasonal_with_unique(self): self.assertNotEqual(field.temporal.bounds, None) self.assertEqual(field.temporal.bounds_datetime.tolist(), [[datetime.datetime(2001, 12, 1, 12, 0), datetime.datetime(2002, 2, 28, 12, 0)], [datetime.datetime(2002, 12, 1, 12, 0), datetime.datetime(2003, 2, 28, 12, 0)], [datetime.datetime(2003, 12, 1, 12, 0), datetime.datetime(2004, 2, 28, 12, 0)], [datetime.datetime(2004, 12, 1, 12, 0), datetime.datetime(2005, 2, 28, 12, 0)], [datetime.datetime(2005, 12, 1, 12, 0), datetime.datetime(2006, 2, 28, 12, 0)], [datetime.datetime(2006, 12, 1, 12, 0), datetime.datetime(2007, 2, 28, 12, 0)], [datetime.datetime(2007, 12, 1, 12, 0), datetime.datetime(2008, 2, 28, 12, 0)], [datetime.datetime(2008, 12, 1, 12, 0), datetime.datetime(2009, 2, 28, 12, 0)], [datetime.datetime(2009, 12, 1, 12, 0), datetime.datetime(2010, 2, 28, 12, 0)]]) self.assertEqual(field.shape,(1, 9, 1, 3, 3)) - - def test_dataset(self): + + def test_keyword_calc_grouping_seasonal_with_year(self): + calc_grouping = [[1,2,3],'year'] + calc = [{'func':'mean','name':'mean'}] + rd = self.test_data.get_rd('cancm4_tas') + ops = OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, + geom='state_boundaries',select_ugid=[25]) + ret = ops.execute() + self.assertEqual(ret[25]['tas'].shape,(1,10,1,5,4)) + + def test_keyword_calc_grouping_with_string_expression(self): + """Test that no calculation grouping is allowed with a string expression.""" + + calc = 'es=tas*3' + calc_grouping = ['month'] + rd = self.test_data.get_rd('cancm4_tas') + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping) + + def test_keyword_callback(self): + + app = [] + def callback(perc,msg,app=app): + app.append((perc,msg)) + # print(perc,msg) + + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tasmax_2011') + dataset = [rd,rd2] + for ds in dataset: + ds.time_region = {'month':[6]} + ops = ocgis.OcgOperations(dataset=dataset,geom='state_boundaries',select_ugid=[16,17], + calc_grouping=['month'],calc=[{'func':'mean','name':'mean'},{'func':'median','name':'median'}], + callback=callback) + ops.execute() + + self.assertTrue(len(app) > 15) + self.assertEqual(app[-1][0],100.0) + + def test_keyword_conform_units_to(self): + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas') + rd2.alias = 'foo' + ops = OcgOperations(dataset=[rd1, rd2], conform_units_to='celsius') + for ds in ops.dataset.itervalues(): + self.assertEqual(ds.conform_units_to, 'celsius') + + ## test that the conform argument is updated + ops.conform_units_to = 'fahrenheit' + for ds in ops.dataset.itervalues(): + self.assertEqual(ds.conform_units_to, 'fahrenheit') + + def test_keyword_conform_units_to_bad_units(self): + rd = self.test_data.get_rd('cancm4_tas') + with self.assertRaises(RequestValidationError): + OcgOperations(dataset=rd, conform_units_to='crap') + + def test_keyword_dataset(self): env.DIR_DATA = ocgis.env.DIR_TEST_DATA reference_rd = self.test_data.get_rd('cancm4_tas') rd = RequestDataset(reference_rd.uri,reference_rd.variable) ds = definition.Dataset(rd) self.assertEqual(ds.value,RequestDatasetCollection([rd])) - + dsa = {'uri':reference_rd.uri,'variable':reference_rd.variable} ds = definition.Dataset(dsa) - + reference_rd2 = self.test_data.get_rd('narccap_crcm') dsb = [dsa,{'uri':reference_rd2.uri,'variable':reference_rd2.variable,'alias':'knight'}] ds = definition.Dataset(dsb) - - def test_abstraction(self): - K = definition.Abstraction - - k = K() - self.assertEqual(k.value,None) - self.assertEqual(str(k),'abstraction="None"') - - k = K('point') - self.assertEqual(k.value,'point') - + + def test_keyword_geom(self): + geom = make_poly((37.762,38.222),(-102.281,-101.754)) + g = definition.Geom(geom) + self.assertEqual(type(g.value),tuple) + self.assertEqual(g.value[0].single.geom.bounds,(-102.281, 37.762, -101.754, 38.222)) + + g = definition.Geom(None) + self.assertEqual(g.value,None) + self.assertEqual(str(g),'geom=None') + + g = definition.Geom('mi_watersheds') + self.assertEqual(str(g),'geom="mi_watersheds"') + + geoms = ShpCabinetIterator('mi_watersheds') + g = definition.Geom(geoms) + self.assertEqual(len(list(g.value)),60) + self.assertEqual(g._shp_key,'mi_watersheds') + + def test_keyword_geom_having_changed_select_ugid(self): + ops = OcgOperations(dataset=self.test_data.get_rd('cancm4_tas'), + geom='state_boundaries') + self.assertEqual(len(list(ops.geom)),51) + ops.select_ugid = [16,17] + self.assertEqual(len(list(ops.geom)),2) + + def test_keyword_geom_string(self): + ops = OcgOperations(dataset=self.datasets,geom='state_boundaries') + self.assertEqual(len(list(ops.geom)),51) + ops.geom = None + self.assertEqual(ops.geom,None) + ops.geom = 'mi_watersheds' + self.assertEqual(len(list(ops.geom)),60) + ops.geom = [-120,40,-110,50] + self.assertEqual(ops.geom[0].single.geom.bounds,(-120.0,40.0,-110.0,50.0)) + + def test_keyword_headers(self): + headers = ['did','value'] + for htype in [list,tuple]: + hvalue = htype(headers) + hh = definition.Headers(hvalue) + self.assertEqual(hh.value,tuple(constants.required_headers+['value'])) + + headers = ['foo'] with self.assertRaises(DefinitionValidationError): - K('pt') - - def test_spatial_operation(self): - values = (None,'clip','intersects') - ast = ('intersects','clip','intersects') - - klass = definition.SpatialOperation - for v,a in zip(values,ast): - obj = klass(v) - self.assertEqual(obj.value,a) + hh = definition.Headers(headers) + + headers = [] + hh = definition.Headers(headers) + self.assertEqual(hh.value,tuple(constants.required_headers)) - def test_regridding_to_nc(self): + def test_keyword_level_range(self): + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas') + rd.alias = 'foo' + lr = [1,2] + ops = ocgis.OcgOperations(dataset=[rd,rd2],level_range=lr) + for r in [rd,rd2]: + self.assertEqual(r.level_range,None) + for r in ops.dataset.itervalues(): + self.assertEqual(r.level_range,tuple(lr)) + + lr = [2,3] + ops.level_range = lr + for r in ops.dataset.itervalues(): + self.assertEqual(r.level_range,tuple(lr)) + + def test_keyword_output_format_nc_package_validation_raised_first(self): + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('rotated_pole_ichec',kwds={'alias':'tas2'}) + try: + ocgis.OcgOperations(dataset=[rd,rd2],output_format='nc') + except DefinitionValidationError as e: + self.assertIn('Data packages (i.e. more than one RequestDataset) may not be written to netCDF.', + e.message) + pass + + def test_keyword_regrid_destination(self): + """Test regridding not allowed with clip operation.""" + + rd = self.test_data.get_rd('cancm4_tas') + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=rd, regrid_destination=rd, spatial_operation='clip') + + def test_keyword_regrid_destination_to_nc(self): """Write regridded data to netCDF.""" rd1 = self.test_data.get_rd('cancm4_tas') @@ -401,7 +367,7 @@ def test_regridding_to_nc(self): self.assertIsNotNone(field.spatial.grid.corners) self.assertTrue(np.any(field.variables.first().value.mask)) - def test_regridding_to_shp_vector_wrap(self): + def test_keyword_regrid_destination_to_shp_vector_wrap(self): """Test writing to shapefile with different vector wrap options.""" rd1 = self.test_data.get_rd('cancm4_tas') @@ -420,7 +386,52 @@ def test_regridding_to_shp_vector_wrap(self): else: self.assertGreater(geom.bounds[0], 0) + def test_keyword_spatial_operation(self): + values = (None,'clip','intersects') + ast = ('intersects','clip','intersects') + + klass = definition.SpatialOperation + for v,a in zip(values,ast): + obj = klass(v) + self.assertEqual(obj.value,a) + + def test_keyword_time_range(self): + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas') + rd.alias = 'foo' + tr = [datetime.datetime(2002,1,1),datetime.datetime(2002,3,1)] + ops = ocgis.OcgOperations(dataset=[rd,rd2],time_range=tr) + for r in [rd,rd2]: + self.assertEqual(r.time_range,None) + for r in ops.dataset.itervalues(): + self.assertEqual(r.time_range,tuple(tr)) + + tr = [datetime.datetime(2002,1,1),datetime.datetime(2003,3,1)] + ops.time_range = tr + for r in ops.dataset.itervalues(): + self.assertEqual(r.time_range,tuple(tr)) + + def test_keyword_time_range_and_time_region_null_parms(self): + ops = OcgOperations(dataset=self.datasets_no_range) + self.assertEqual(ops.geom,None) + self.assertEqual(len(ops.dataset),3) + for ds in ops.dataset.itervalues(): + self.assertEqual(ds.time_range,None) + self.assertEqual(ds.level_range,None) + ops.__repr__() -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() \ No newline at end of file + def test_keyword_time_region(self): + rd = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_tas') + rd.alias = 'foo' + tr = {'month':[6],'year':[2005]} + ops = ocgis.OcgOperations(dataset=[rd,rd2],time_region=tr) + for r in [rd,rd2]: + self.assertEqual(r.time_region,None) + for r in ops.dataset.itervalues(): + self.assertEqual(r.time_region,tr) + + tr = {'month':[6],'year':[2006]} + ops.time_region = tr + for r in ops.dataset.itervalues(): + self.assertEqual(r.time_region,tr) diff --git a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py index c959d28fa..7e2129059 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py +++ b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py @@ -298,6 +298,21 @@ def test_bad_key(self): Calc(calc) +class TestCalcGrouping(TestBase): + + def test_init(self): + A = Aggregate + + a = A(True) + self.assertEqual(a.value,True) + + a = A(False) + self.assertEqual(a.value,False) + + a = A('True') + self.assertEqual(a.value,True) + + class TestConformUnitsTo(TestBase): create_dir = False From 490803106252baab3df34f0d2d2e86c75a81e5be Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Sun, 23 Nov 2014 18:03:47 -0700 Subject: [PATCH 20/71] wrapped state improperly attributed #341 Changed wrapped states to 'wrapped', 'unwrapped', and 'unknown' allowing explicity comparisons for selectiong of wrapping operations. --- src/ocgis/api/subset.py | 34 +++-- src/ocgis/constants.py | 3 +- src/ocgis/interface/base/crs.py | 137 +++++++++++------- src/ocgis/interface/base/dimension/spatial.py | 35 +++-- src/ocgis/test/base.py | 1 + .../test_ocgis/test_api/test_operations.py | 13 +- .../test_interface/test_base/test_crs.py | 132 +++++++++++------ .../test_base/test_dimension/test_spatial.py | 42 ++---- .../test_spatial/test_spatial_subset.py | 14 +- .../test_real_data/test_multiple_datasets.py | 4 + src/ocgis/test/test_simple/test_simple.py | 6 +- src/ocgis/util/spatial/spatial_subset.py | 13 +- 12 files changed, 266 insertions(+), 168 deletions(-) diff --git a/src/ocgis/api/subset.py b/src/ocgis/api/subset.py index 132e55db8..c362d9c00 100644 --- a/src/ocgis/api/subset.py +++ b/src/ocgis/api/subset.py @@ -5,7 +5,7 @@ from ocgis.util.logging_ocgis import ocgis_lh, ProgressOcgOperations import logging from ocgis.api.collection import SpatialCollection -from ocgis.interface.base.crs import CFWGS84, CFRotatedPole, Spherical, WGS84 +from ocgis.interface.base.crs import CFWGS84, CFRotatedPole, Spherical, WGS84, WrappableCoordinateReferenceSystem from ocgis.calc.base import AbstractMultivariateFunction, AbstractKeyedOutputFunction from ocgis.util.helpers import get_default_or_apply from copy import deepcopy, copy @@ -348,12 +348,13 @@ def _get_spatially_subsetted_field_(self, alias, field, subset_sdim, subset_ugid self._geom_unique_store.append(subset_geom) # unwrap the data if it is geographic and 360 - if field.spatial.is_unwrapped and not subset_sdim.is_unwrapped: - ocgis_lh('unwrapping selection geometry', self._subset_log, alias=alias, ugid=subset_ugid, - level=logging.DEBUG) - subset_sdim.unwrap() - # update the geometry reference as the spatial dimension was unwrapped and modified in place - subset_geom = subset_sdim.single.geom + if field.spatial.wrapped_state == WrappableCoordinateReferenceSystem._flag_unwrapped: + if subset_sdim.wrapped_state == WrappableCoordinateReferenceSystem._flag_wrapped: + ocgis_lh('unwrapping selection geometry', self._subset_log, alias=alias, ugid=subset_ugid, + level=logging.DEBUG) + subset_sdim.unwrap() + # update the geometry reference as the spatial dimension was unwrapped and modified in place + subset_geom = subset_sdim.single.geom # perform the spatial operation try: @@ -375,8 +376,9 @@ def _get_spatially_subsetted_field_(self, alias, field, subset_sdim, subset_ugid ocgis_lh(exc=ExtentError(message=msg), alias=alias, logger=self._subset_log) # if the subset geometry is unwrapped and the vector wrap option is true, wrap the subset geometry. - if subset_sdim.is_unwrapped and self.ops.vector_wrap: - subset_sdim.wrap() + if self.ops.vector_wrap: + if subset_sdim.wrapped_state == WrappableCoordinateReferenceSystem._flag_unwrapped: + subset_sdim.wrap() return sfield @@ -512,10 +514,14 @@ def _get_regridded_field_with_subset_(self, sfield, subset_sdim_for_regridding=N destination_sdim.crs = Spherical() # check that wrapping is equivalent - if destination_sdim.is_unwrapped and not sfield.spatial.is_unwrapped: - sfield.spatial.unwrap() - elif sfield.spatial.is_unwrapped and not destination_sdim.is_unwrapped: - sfield.spatial.wrap() + if destination_sdim.wrapped_state == WrappableCoordinateReferenceSystem._flag_unwrapped: + if sfield.spatial.wrapped_state == WrappableCoordinateReferenceSystem._flag_wrapped: + sfield.spatial = deepcopy(sfield.spatial) + sfield.spatial.unwrap() + if destination_sdim.wrapped_state == WrappableCoordinateReferenceSystem._flag_wrapped: + if sfield.spatial.wrapped_state == WrappableCoordinateReferenceSystem._flag_unwrapped: + sfield.spatial = deepcopy(sfield.spatial) + sfield.spatial.wrap() # remove the mask from the destination field. new_mask = np.zeros(destination_sdim.shape, dtype=bool) @@ -623,7 +629,7 @@ def _process_geometries_(self, itr, field, headers, value_keys, alias): # wrap the returned data. if not env.OPTIMIZE_FOR_CALC: - if sfield is not None and sfield.spatial.is_unwrapped: + if sfield is not None and sfield.spatial.wrapped_state == WrappableCoordinateReferenceSystem._flag_unwrapped: if self.ops.output_format != 'nc' and self.ops.vector_wrap: ocgis_lh('wrapping output geometries', self._subset_log, alias=alias, ugid=subset_ugid, level=logging.DEBUG) diff --git a/src/ocgis/constants.py b/src/ocgis/constants.py index 17cb0e1e8..80683388a 100644 --- a/src/ocgis/constants.py +++ b/src/ocgis/constants.py @@ -46,7 +46,8 @@ enabled_numpy_ufuncs = ['exp','log','abs'] #: The value for the 180th meridian to use when wrapping. -meridian_180th = 179.9999999999999 +meridian_180th = 180. +# meridian_180th = 179.9999999999999 test_run_long_tests = False diff --git a/src/ocgis/interface/base/crs.py b/src/ocgis/interface/base/crs.py index f48fa95cf..ea97a0e22 100644 --- a/src/ocgis/interface/base/crs.py +++ b/src/ocgis/interface/base/crs.py @@ -133,70 +133,95 @@ def get_wrap_action(cls, state_src, state_dst): return ret @classmethod - def get_is_360(cls, spatial): + def get_wrapped_state(cls, sdim): """ - :param spatial: - :type spatial: :class:`~ocgis.interface.base.dimension.spatial.SpatialDimension` + :param sdim: The spatial dimension used to determine the wrapped state. This function only checks grid centroids + and geometry exteriors. Bounds/corners on the grid are excluded. + :type sdim: :class:`ocgis.interface.base.dimension.spatial.SpatialDimension` """ - if not isinstance(spatial.crs, WrappableCoordinateReferenceSystem): - msg = 'Wrapped state may only be determined for geographic (i.e. spherical) coordinate systems.' - raise SpatialWrappingError(msg) - - try: - if spatial.grid.col.bounds is None: - check = spatial.grid.col.value - else: - check = spatial.grid.col.bounds - except AttributeError as e: - # column dimension is likely missing - try: - if spatial.grid.col is None: - if spatial.grid.corners is not None: - check = spatial.grid.corners[1] - else: - check = spatial.grid.value[1, :, :] - else: - ocgis_lh(exc=e) - except AttributeError: - # there may be no grid, access the geometries directly - if spatial.geom.polygon is not None: - geoms_to_check = spatial.geom.polygon.value - else: - geoms_to_check = spatial.geom.point.value - geoms_to_check = geoms_to_check.compressed() - - # if this is switched to true, there are geometries with coordinate values less than 0 - for geom in geoms_to_check: - if type(geom) in [MultiPolygon, MultiPoint]: - it = geom - else: - it = [geom] - for sub_geom in it: - try: - coords = np.array(sub_geom.exterior.coords) - if np.any(coords > 180.): - return True - ## might be checking a point - except AttributeError: - coords = np.array(sub_geom) - if np.any(coords > 180.): - return True - return False - - if np.any(check > 180.): - ret = True + if sdim.grid is not None: + ret = cls._get_wrapped_state_from_array_(sdim.grid.value[1].data) else: - ret = False - + stops = (cls._flag_wrapped, cls._flag_unwrapped) + ret = cls._flag_unknown + if sdim.geom.polygon is not None: + geoms = sdim.geom.polygon.value.data.flat + else: + geoms = sdim.geom.point.value.data.flat + for geom in geoms: + flag = cls._get_wrapped_state_from_geometry_(geom) + if flag in stops: + ret = flag + break return ret + #todo: remove commented code + # @classmethod + # def get_is_360(cls, spatial): + # """ + # :param spatial: + # :type spatial: :class:`~ocgis.interface.base.dimension.spatial.SpatialDimension` + # """ + # + # if not isinstance(spatial.crs, WrappableCoordinateReferenceSystem): + # msg = 'Wrapped state may only be determined for geographic (i.e. spherical) coordinate systems.' + # raise SpatialWrappingError(msg) + # + # try: + # # if spatial.grid.col.bounds is None: + # check = spatial.grid.col.value + # # else: + # # check = spatial.grid.col.bounds + # except AttributeError as e: + # # column dimension is likely missing + # try: + # if spatial.grid.col is None: + # # if spatial.grid.corners is not None: + # # check = spatial.grid.corners[1] + # # else: + # check = spatial.grid.value[1, :, :] + # else: + # ocgis_lh(exc=e) + # except AttributeError: + # # there may be no grid, access the geometries directly + # if spatial.geom.polygon is not None: + # geoms_to_check = spatial.geom.polygon.value + # else: + # geoms_to_check = spatial.geom.point.value + # geoms_to_check = geoms_to_check.compressed() + # + # # if this is switched to true, there are geometries with coordinate values less than 0 + # for geom in geoms_to_check: + # if type(geom) in [MultiPolygon, MultiPoint]: + # it = geom + # else: + # it = [geom] + # for sub_geom in it: + # try: + # coords = np.array(sub_geom.exterior.coords) + # if np.all(coords[:, 0] >= 0.): + # return True + # ## might be checking a point + # except AttributeError: + # coords = np.array(sub_geom) + # if np.all(coords[0] >= 0.): + # return True + # return False + # + # if np.all(check >= 0.): + # ret = True + # else: + # ret = False + # + # return ret + def unwrap(self, spatial): """ :type spatial: :class:`ocgis.interface.base.dimension.spatial.SpatialDimension` """ - if not self.get_is_360(spatial): + if self.get_wrapped_state(spatial) == self._flag_wrapped: # unwrap the geometries unwrap = Wrapper().unwrap to_wrap = self._get_to_wrap_(spatial) @@ -224,7 +249,7 @@ def unwrap(self, spatial): spatial.grid.corners[1][select] += 360 else: - ocgis_lh(exc=SpatialWrappingError('Data already has a 0 to 360 coordinate system.')) + ocgis_lh(exc=SpatialWrappingError('Data does not need to be unwrapped.')) def wrap(self,spatial): """ @@ -235,7 +260,7 @@ def wrap(self,spatial): :type spatial: :class:`ocgis.interface.base.dimension.spatial.SpatialDimension` """ - if self.get_is_360(spatial): + if self.get_wrapped_state(spatial) == self._flag_unwrapped: # wrap the geometries if they are available wrap = Wrapper().wrap to_wrap = self._get_to_wrap_(spatial) @@ -291,7 +316,7 @@ def wrap(self,spatial): select = ref[1] > 180 ref[1][select] -= 360 else: - ocgis_lh(exc=SpatialWrappingError('Data does not have a 0 to 360 coordinate system.')) + ocgis_lh(exc=SpatialWrappingError('Data does not need to be wrapped.')) @staticmethod def _get_to_wrap_(spatial): diff --git a/src/ocgis/interface/base/dimension/spatial.py b/src/ocgis/interface/base/dimension/spatial.py index 77c7b516b..df96d2d09 100644 --- a/src/ocgis/interface/base/dimension/spatial.py +++ b/src/ocgis/interface/base/dimension/spatial.py @@ -140,17 +140,18 @@ def grid(self, value): assert(isinstance(value, SpatialGridDimension)) self._grid = value - @property - def is_unwrapped(self): - """ - Return ``True`` if the coordinates of the spatial data have a 0 to 360 longitudinal domain.""" - - try: - ret = self.crs.get_is_360(self) - # None and coordinate systems w/out spherical coordinate systems have no wrapping checks - except AttributeError: - ret = False - return ret + #todo: remove commented code + # @property + # def is_unwrapped(self): + # """ + # Return ``True`` if the coordinates of the spatial data have a 0 to 360 longitudinal domain.""" + # + # try: + # ret = self.crs.get_is_360(self) + # # None and coordinate systems w/out spherical coordinate systems have no wrapping checks + # except AttributeError: + # ret = False + # return ret @property def shape(self): @@ -175,6 +176,14 @@ def weights(self): ret = self.geom.polygon.weights return ret + @property + def wrapped_state(self): + try: + ret = self.crs.get_wrapped_state(self) + except AttributeError: + ret = None + return ret + def assert_uniform_mask(self): """ Check that the mask for the major spatial components are equivalent. This will only test loaded elements. @@ -505,7 +514,9 @@ def update_crs(self, to_crs): # update grid values value_row = self.grid.value.data[0].reshape(-1) value_col = self.grid.value.data[1].reshape(-1) - self._update_crs_with_geometry_collection_(to_sr, value_row, value_col)# update corners + self._update_crs_with_geometry_collection_(to_sr, value_row, value_col) + self.grid.value.data[0] = value_row.reshape(*self.grid.shape) + self.grid.value.data[1] = value_col.reshape(*self.grid.shape) if self.grid.corners is not None: # update the corners diff --git a/src/ocgis/test/base.py b/src/ocgis/test/base.py index a03b63eca..e7a471320 100644 --- a/src/ocgis/test/base.py +++ b/src/ocgis/test/base.py @@ -296,6 +296,7 @@ def get_tst_data(): test_data.update(['nc', 'misc', 'subset_test'], 'Prcp', 'sresa2.ncar_pcm1.3.monthly.Prcp.RAW.1950-2099.nc', key='subset_test_Prcp') test_data.update(['nc', 'misc', 'subset_test'], 'Tavg', 'Tavg_bccr_bcm2_0.1.sresa2.nc', key='subset_test_Tavg') test_data.update(['nc', 'misc', 'subset_test'], 'Tavg', 'sresa2.bccr_bcm2_0.1.monthly.Tavg.RAW.1950-2099.nc', key='subset_test_Tavg_sresa2') + test_data.update(['nc', 'misc', 'subset_test'], 'slp', 'slp.1955.nc', key='subset_test_slp') test_data.update(['nc', 'narccap'], 'pr', 'pr_CRCM_ccsm_1981010103.nc', key='narccap_crcm') test_data.update(['nc', 'narccap'], 'pr', 'pr_CRCM_ccsm_1981010103.nc', key='narccap_polar_stereographic') test_data.update(['nc', 'narccap'], 'pr', 'pr_HRM3_gfdl_1981010103.nc', key='narccap_hrm3') diff --git a/src/ocgis/test/test_ocgis/test_api/test_operations.py b/src/ocgis/test/test_ocgis/test_api/test_operations.py index 46bc95120..3744e2fd1 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_operations.py +++ b/src/ocgis/test/test_ocgis/test_api/test_operations.py @@ -5,6 +5,7 @@ from numpy import dtype import numpy as np +from ocgis.util.inspect import Inspect from ocgis.api.parms.definition import RegridOptions from ocgis.interface.base.crs import CFWGS84 @@ -13,7 +14,7 @@ from ocgis.api.parms import definition from ocgis import env, constants from ocgis.api.operations import OcgOperations -from ocgis.util.helpers import make_poly +from ocgis.util.helpers import make_poly, write_geom_dict, bbox_poly import ocgis from ocgis.api.request.base import RequestDataset, RequestDatasetCollection from ocgis.util.shp_cabinet import ShpCabinetIterator @@ -395,6 +396,16 @@ def test_keyword_spatial_operation(self): obj = klass(v) self.assertEqual(obj.value,a) + def test_keyword_spatial_operations_bounding_box(self): + geom = [-80, 22.5, 50, 70.0] + rd = self.test_data.get_rd('subset_test_slp') + ops = OcgOperations(dataset=rd, geom=geom) + ret = ops.execute() + field = ret[1]['slp'] + self.assertEqual(field.shape, (1, 365, 1, 18, 143)) + slp = field.variables.first() + self.assertEqual(slp.value.mask.sum(), 611010) + def test_keyword_time_range(self): rd = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('cancm4_tas') diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py index 4a860644f..546684ba9 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py @@ -78,6 +78,68 @@ def test_get_wrap_action(self): else: self.assertIsNone(ret) + def test_get_wrapped_state(self): + #todo: test only geometries + + refv = WrappableCoordinateReferenceSystem + refm = refv.get_wrapped_state + + ## test grid ## + + row = VectorDimension(value=[50, 60]) + + col = VectorDimension(value=[0, 90, 180]) + grid = SpatialGridDimension(row=row, col=col) + sdim = SpatialDimension(grid=grid) + self.assertEqual(refm(sdim), refv._flag_unknown) + + col = VectorDimension(value=[-170, 0, 30]) + grid = SpatialGridDimension(row=row, col=col) + sdim = SpatialDimension(grid=grid) + self.assertEqual(refm(sdim), refv._flag_wrapped) + + col = VectorDimension(value=[0, 90, 180, 270]) + grid = SpatialGridDimension(row=row, col=col) + sdim = SpatialDimension(grid=grid) + self.assertEqual(refm(sdim), refv._flag_unwrapped) + + ## test geom ## + + for with_polygon in [True, False]: + row = VectorDimension(value=[50, 60]) + col = VectorDimension(value=[155, 165, 175]) + if with_polygon: + row.set_extrapolated_bounds() + col.set_extrapolated_bounds() + grid = SpatialGridDimension(row=row, col=col) + sdim = SpatialDimension(grid=grid) + sdim.grid = None + self.assertEqual(refm(sdim), refv._flag_unknown) + + row = VectorDimension(value=[50, 60]) + col = VectorDimension(value=[160, 170, 180]) + if with_polygon: + row.set_extrapolated_bounds() + col.set_extrapolated_bounds() + grid = SpatialGridDimension(row=row, col=col) + sdim = SpatialDimension(grid=grid) + sdim.grid = None + if with_polygon: + actual = refv._flag_unwrapped + else: + actual = refv._flag_unknown + self.assertEqual(refm(sdim), actual) + + row = VectorDimension(value=[50, 60]) + col = VectorDimension(value=[-160, -150, -140]) + if with_polygon: + row.set_extrapolated_bounds() + col.set_extrapolated_bounds() + grid = SpatialGridDimension(row=row, col=col) + sdim = SpatialDimension(grid=grid) + sdim.grid = None + self.assertEqual(refm(sdim), refv._flag_wrapped) + def test_get_wrapped_state_from_array(self): def _run_(arr, actual_wrapped_state): @@ -93,6 +155,27 @@ def _run_(arr, actual_wrapped_state): arr = np.array([30]) _run_(arr, WrappableCoordinateReferenceSystem._flag_unknown) + arr = np.array([-180, 0, 30]) + _run_(arr, WrappableCoordinateReferenceSystem._flag_wrapped) + + arr = np.array([0]) + _run_(arr, WrappableCoordinateReferenceSystem._flag_unknown) + + arr = np.array([0, 30, 50]) + _run_(arr, WrappableCoordinateReferenceSystem._flag_unknown) + + arr = np.array([0, 30, 50, 181]) + _run_(arr, WrappableCoordinateReferenceSystem._flag_unwrapped) + + arr = np.array([0, 30, 50, 180]) + _run_(arr, WrappableCoordinateReferenceSystem._flag_unknown) + + arr = np.array([-180]) + _run_(arr, WrappableCoordinateReferenceSystem._flag_wrapped) + + arr = np.array([-180, 0, 50]) + _run_(arr, WrappableCoordinateReferenceSystem._flag_wrapped) + def test_get_wrapped_state_from_geometry(self): geoms = [Point(-130, 40), MultiPoint([Point(-130, 40), Point(30, 50)]), @@ -119,28 +202,6 @@ def test_init(self): self.assertDictEqual(crs.value, {'a': 6370998.1, 'no_defs': True, 'b': 6370998.1, 'proj': 'longlat', 'towgs84': '0,0,0,0,0,0,0'}) - def test_get_is_360_geometries(self): - bounds = (5.869442939758301, 47.28110122680663, 15.038049697875975, 54.91740036010742) - poly = make_poly((bounds[1], bounds[3]), (bounds[0], bounds[2])) - record_poly = {'geom': poly, 'properties': {'UGID': 1}} - record_point = {'geom': poly.centroid, 'properties': {'UGID': 1}} - for record in [record_poly, record_point]: - sdim = SpatialDimension.from_records([record]) - self.assertFalse(Spherical.get_is_360(sdim)) - - def test_get_is_360_grid(self): - # perform test with small grid falling between 0 and 180. - row = VectorDimension(value=[0, 40]) - col = VectorDimension(value=[0, 170]) - grid = SpatialGridDimension(row=row, col=col) - sdim = SpatialDimension(grid=grid) - # no crs for the spatial dimension, hence wrapping will fail. - with self.assertRaises(SpatialWrappingError): - self.assertIsNone(sdim.crs) - Spherical.get_is_360(sdim) - sdim.crs = Spherical() - self.assertFalse(Spherical.get_is_360(sdim)) - def test_place_prime_meridian_array(self): arr = np.array([123, 180, 200, 180], dtype=float) ret = Spherical._place_prime_meridian_array_(arr) @@ -169,11 +230,9 @@ def test_wrap_normal(self): grid = SpatialGridDimension(row=row, col=col) self.assertEqual(grid.resolution, 3.0) sdim = SpatialDimension(grid=grid, crs=Spherical()) + self.assertEqual(sdim.wrapped_state, WrappableCoordinateReferenceSystem._flag_unknown) with self.assertRaises(SpatialWrappingError): sdim.crs.wrap(sdim) - sdim.crs.unwrap(sdim) - self.assertNotEqual(sdim.grid, None) - self.assertNumpyAll(sdim.grid.value, np.ma.array(data=[[[40.0]], [[0.0]]], mask=[[[False]], [[False]]], )) def test_wrap_360(self): """Test wrapping.""" @@ -214,9 +273,7 @@ def _get_sdim_(value, bounds): # bounds values at the prime meridian of 180. orig, sdim = _get_sdim_(178, [176, 180.]) - # data does not have a verified 360 coordinate system - with self.assertRaises(SpatialWrappingError): - sdim.wrap() + self.assertEqual(sdim.wrapped_state, WrappableCoordinateReferenceSystem._flag_unknown) # bounds values on the other side of the prime meridian orig, sdim = _get_sdim_(182, [180, 184]) @@ -229,13 +286,9 @@ def _get_sdim_(value, bounds): # centroid directly on prime meridian orig, sdim = _get_sdim_(180, [178, 182]) - sdim.wrap() - self.assertIsNone(sdim.grid.col.bounds) - self.assertIsNone(sdim.grid.row.bounds) - self.assertIsNone(sdim.grid.corners) - self.assertEqual(sdim.geom.polygon.value[0, 0][0].bounds, (178.0, 38.0, 180.0, 42.0)) - self.assertEqual(sdim.geom.polygon.value[0, 0][1].bounds, (-180.0, 38.0, -178.0, 42.0)) - self.assertNumpyAll(np.array(sdim.geom.point.value[0, 0]), np.array([180., 40.])) + self.assertEqual(sdim.wrapped_state, WrappableCoordinateReferenceSystem._flag_unknown) + with self.assertRaises(SpatialWrappingError): + sdim.wrap() # no row/column bounds but with corners orig, sdim = _get_sdim_([182, 186], [[180, 184], [184, 188]]) @@ -250,18 +303,15 @@ def _get_sdim_(value, bounds): # unwrap a wrapped spatial dimension making sure the unwrapped multipolygon bounds are the same as the wrapped # polygon bounds. row = VectorDimension(value=40, bounds=[38, 42]) - col = VectorDimension(value=180, bounds=[179, 181]) + col = VectorDimension(value=185, bounds=[184, 186]) grid = SpatialGridDimension(row=row, col=col) sdim = SpatialDimension(grid=grid, crs=Spherical()) orig_sdim = deepcopy(sdim) + self.assertEqual(orig_sdim.wrapped_state, WrappableCoordinateReferenceSystem._flag_unwrapped) sdim.crs.wrap(sdim) - self.assertIsInstance(sdim.geom.polygon.value[0, 0], MultiPolygon) + self.assertEqual(sdim.wrapped_state, WrappableCoordinateReferenceSystem._flag_wrapped) sdim.crs.unwrap(sdim) self.assertEqual(orig_sdim.geom.polygon.value[0, 0].bounds, sdim.geom.polygon.value[0, 0].bounds) - - # for target in ['point', 'polygon']: - # path = get_temp_path(name=target, suffix='.shp', wd=self.current_dir_output) - # sdim.write_fiona(path, target) class TestWGS84(TestBase): diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py index 97a45a7e7..c4eca2a98 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py @@ -8,14 +8,15 @@ SpatialGeometryDimension, SpatialGeometryPolygonDimension,\ SpatialGridDimension, SpatialGeometryPointDimension, SingleElementRetriever from ocgis.util.helpers import iter_array, make_poly, get_bounds_from_1d,\ - get_date_list, write_geom_dict + get_date_list, write_geom_dict, bbox_poly import fiona from fiona.crs import from_epsg from shapely.geometry import shape, mapping, Polygon from shapely.geometry.point import Point from ocgis.exc import EmptySubsetError, SpatialWrappingError, MultipleElementsFound from ocgis.test.base import TestBase -from ocgis.interface.base.crs import CoordinateReferenceSystem, WGS84, CFWGS84, CFRotatedPole +from ocgis.interface.base.crs import CoordinateReferenceSystem, WGS84, CFWGS84, CFRotatedPole, \ + WrappableCoordinateReferenceSystem from ocgis.interface.base.dimension.base import VectorDimension import datetime from importlib import import_module @@ -397,30 +398,6 @@ def test_set_mask(self): else: raise - def test_is_unwrapped(self): - """Test if a dataset's longitudinal domain extends from 0 to 360 or -180 to 180.""" - - # the state boundaries file is not unwrapped - sdim = self.get_spatial_dimension_from_records() - self.assertFalse(sdim.is_unwrapped) - - # choose a record and unwrap it - idx = sdim.properties['STATE_NAME'] == 'Nebraska' - sub = sdim[:, idx] - wrapper = Wrapper() - unwrapped = wrapper.unwrap(sub.abstraction_geometry.value[0, 0]) - sub.abstraction_geometry.value[0, 0] = unwrapped - self.assertTrue(sub.is_unwrapped) - - def test_is_unwrapped_wrong_crs(self): - """Test exception is appropriately raised with the wrong CRS.""" - - sdim = self.get_spatial_dimension_from_records() - sdim.crs = CoordinateReferenceSystem(epsg=2346) - self.assertFalse(sdim.is_unwrapped) - sdim.crs = None - self.assertFalse(sdim.is_unwrapped) - def test_overloaded_crs(self): """Test CFWGS84 coordinate system is always used if the input CRS is equivalent.""" @@ -992,7 +969,7 @@ def assertUnwrapped(arr): with self.assertRaises(SpatialWrappingError): sdim.unwrap() sdim.crs = WGS84() - self.assertFalse(sdim.is_unwrapped) + self.assertEqual(sdim.wrapped_state, WrappableCoordinateReferenceSystem._flag_wrapped) sdim.unwrap() assertUnwrapped(sdim.grid.value) @@ -1010,13 +987,13 @@ def assertUnwrapped(arr): self.assertNumpyAll(np.array(sdim.geom.polygon.value[2, 2].bounds), np.array(bounds_from_corner)) self.assertEqual(sdim.geom.polygon.value[2, 2].bounds, (261.5, 37.5, 262.5, 38.5)) self.assertNumpyAll(np.array(sdim.geom.point.value[2, 2]), np.array([ 262., 38.])) - self.assertTrue(sdim.is_unwrapped) + self.assertEqual(sdim.wrapped_state, WrappableCoordinateReferenceSystem._flag_unwrapped) def test_wrap(self): """Test wrapping a SpatialDimension""" def assertWrapped(arr): - select = arr > 180 + select = arr >= constants.meridian_180th self.assertFalse(select.any()) sdim = self.get_sdim(crs=WGS84()) @@ -1051,6 +1028,13 @@ def test_wrap_unwrap_non_wgs84(self): with self.assertRaises(SpatialWrappingError): getattr(sdim, method)() + def test_wrapped_state(self): + sdim = self.get_sdim() + self.assertIsNone(sdim.wrapped_state) + + sdim = self.get_sdim(crs=CFWGS84()) + self.assertEqual(sdim.wrapped_state, WrappableCoordinateReferenceSystem._flag_wrapped) + class TestSpatialGeometryDimension(TestBase): diff --git a/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py b/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py index 28ada52c9..f14070396 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py +++ b/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py @@ -6,7 +6,7 @@ from ocgis import CoordinateReferenceSystem, RequestDataset import ocgis from ocgis.exc import EmptySubsetError -from ocgis.interface.base.crs import CFWGS84, CFRotatedPole +from ocgis.interface.base.crs import CFWGS84, CFRotatedPole, WrappableCoordinateReferenceSystem from ocgis.interface.base.dimension.base import VectorDimension from ocgis.interface.base.dimension.spatial import SpatialDimension from ocgis.interface.base.field import Field @@ -18,11 +18,11 @@ from ocgis import constants, env -class TestSpatialSubset(TestBase): +class TestSpatialSubsetOperation(TestBase): def __init__(self, *args, **kwargs): self._target = None - super(TestSpatialSubset, self).__init__(*args, **kwargs) + super(TestSpatialSubsetOperation, self).__init__(*args, **kwargs) def __iter__(self): keywords = dict(target=self.target, @@ -262,16 +262,16 @@ def test_get_spatial_subset_wrap(self): subset_sdim = SpatialDimension.from_records([self.nebraska]) rd = self.test_data.get_rd('cancm4_tas') - self.assertTrue(rd.get().spatial.is_unwrapped) + self.assertEqual(rd.get().spatial.wrapped_state, WrappableCoordinateReferenceSystem._flag_unwrapped) ss = SpatialSubsetOperation(rd, wrap=True) ret = ss.get_spatial_subset('intersects', subset_sdim) - self.assertFalse(ret.spatial.is_unwrapped) + self.assertEqual(ret.spatial.wrapped_state, WrappableCoordinateReferenceSystem._flag_wrapped) self.assertAlmostEqual(ret.spatial.grid.value.data[1].mean(), -99.84375) # test with wrap false ss = SpatialSubsetOperation(rd, wrap=False) ret = ss.get_spatial_subset('intersects', subset_sdim) - self.assertTrue(ret.spatial.is_unwrapped) + self.assertEqual(ret.spatial.wrapped_state, WrappableCoordinateReferenceSystem._flag_unwrapped) self.assertAlmostEqual(ret.spatial.grid.value.data[1].mean(), 260.15625) def test_prepare_target(self): @@ -306,7 +306,7 @@ def test_prepare_subset_sdim(self): field = self.test_data.get_rd('cancm4_tas').get() ss = SpatialSubsetOperation(field) prepared = ss._prepare_subset_sdim_(nebraska) - self.assertTrue(prepared.is_unwrapped) + self.assertEqual(prepared.wrapped_state, WrappableCoordinateReferenceSystem._flag_unwrapped) def test_sdim(self): for ss, k in self: diff --git a/src/ocgis/test/test_real_data/test_multiple_datasets.py b/src/ocgis/test/test_real_data/test_multiple_datasets.py index 489f2df27..a9526ba2c 100644 --- a/src/ocgis/test/test_real_data/test_multiple_datasets.py +++ b/src/ocgis/test/test_real_data/test_multiple_datasets.py @@ -82,6 +82,10 @@ def test_vector_wrap(self): def test_aggregate_clip(self): kwds = {'aggregate':True,'spatial_operation':'clip'} + ops = self.get_ops(kwds=kwds) + # for k, v in ops.dataset.iteritems(): + # v.get()[0, 0, 0, :, :].spatial.write_fiona('/tmp/{0}.shp'.format(k)) + # import ipdb;ipdb.set_trace() ref = self.get_ref(kwds) for field in ref.values(): for variable in field.variables.values(): diff --git a/src/ocgis/test/test_simple/test_simple.py b/src/ocgis/test/test_simple/test_simple.py index 518761163..b0e3af7a6 100644 --- a/src/ocgis/test/test_simple/test_simple.py +++ b/src/ocgis/test/test_simple/test_simple.py @@ -24,7 +24,7 @@ from ocgis.interface.base import crs from shapely.geometry.geo import mapping from shapely import wkt -from ocgis.interface.base.crs import CoordinateReferenceSystem, WGS84, CFWGS84 +from ocgis.interface.base.crs import CoordinateReferenceSystem, WGS84, CFWGS84, WrappableCoordinateReferenceSystem from ocgis.api.request.base import RequestDataset, RequestDatasetCollection from copy import deepcopy from contextlib import contextmanager @@ -1460,10 +1460,10 @@ def test_vector_wrap_in_operations(self): rd = RequestDataset(**self.get_dataset()) field = rd.get() - self.assertTrue(field.spatial.is_unwrapped) + self.assertEqual(field.spatial.wrapped_state, WrappableCoordinateReferenceSystem._flag_unwrapped) ops = OcgOperations(dataset=rd, vector_wrap=True) ret = ops.execute() - self.assertFalse(ret[1]['foo'].spatial.is_unwrapped) + self.assertEqual(ret[1]['foo'].spatial.wrapped_state, WrappableCoordinateReferenceSystem._flag_wrapped) def test_wrap(self): diff --git a/src/ocgis/util/spatial/spatial_subset.py b/src/ocgis/util/spatial/spatial_subset.py index 9eb1fca74..9562df46f 100644 --- a/src/ocgis/util/spatial/spatial_subset.py +++ b/src/ocgis/util/spatial/spatial_subset.py @@ -1,5 +1,5 @@ from copy import deepcopy, copy -from ocgis.interface.base.crs import CFRotatedPole, CFWGS84 +from ocgis.interface.base.crs import CFRotatedPole, CFWGS84, WrappableCoordinateReferenceSystem from ocgis.interface.base.dimension.spatial import SpatialDimension from ocgis import RequestDataset @@ -193,7 +193,7 @@ def _get_should_wrap_(self, target): # the output needs to be wrapped and the input data is unwrapped. output from get_spatial_subset is always # wrapped according to the input spatial dimension - if self.wrap and sdim.is_unwrapped: + if self.wrap and sdim.wrapped_state == WrappableCoordinateReferenceSystem._flag_unwrapped: ret = True else: ret = False @@ -212,8 +212,13 @@ def _prepare_subset_sdim_(self, subset_sdim): prepared = deepcopy(subset_sdim) prepared.update_crs(self.sdim.crs) - if self.sdim.is_unwrapped and not prepared.is_unwrapped: - prepared.unwrap() + if self.sdim.wrapped_state == WrappableCoordinateReferenceSystem._flag_unwrapped: + if prepared.wrapped_state == WrappableCoordinateReferenceSystem._flag_wrapped: + prepared.unwrap() + elif self.sdim.wrapped_state == WrappableCoordinateReferenceSystem._flag_wrapped: + if prepared.wrapped_state == WrappableCoordinateReferenceSystem._flag_unwrapped: + prepared.wrap() + return prepared def _prepare_target_(self): From d930bf567239d322a9cfcda37bc8bbcd17ee1bc3 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Sun, 23 Nov 2014 18:57:18 -0700 Subject: [PATCH 21/71] fixes for numpy 1.9.1 #342 Fixed indexing errors related to the new NumPy version. Floating point errors are occurring, but 1.8.2 is still the default NumPy version. --- src/ocgis/calc/base.py | 4 ++-- src/ocgis/interface/base/field.py | 2 +- src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/ocgis/calc/base.py b/src/ocgis/calc/base.py index 9be05b093..7b2eaa02b 100644 --- a/src/ocgis/calc/base.py +++ b/src/ocgis/calc/base.py @@ -246,11 +246,11 @@ def _add_to_collection_(self, units=None, value=None, parent_variables=None, ali fill_value = np.ma.array([], dtype=dtype).fill_value # the value parameters should come in as a dictionary with two keys - try: + if isinstance(value, dict): fill = value['fill'] sample_size = value['sample_size'] # some computations will just pass the array without the sample size if _get_temporal_agg_fill_ is bypassed. - except ValueError: + else: fill = value sample_size = None diff --git a/src/ocgis/interface/base/field.py b/src/ocgis/interface/base/field.py index 7373c7c05..8f38867fd 100644 --- a/src/ocgis/interface/base/field.py +++ b/src/ocgis/interface/base/field.py @@ -166,7 +166,7 @@ def _get_dimension_iterator_1d_(target): for ii in range(ref_idx.shape[0]): for vk in value_keys: try: - to_yld[vk] = ref_idx[vk][ii] + to_yld[vk] = ref_idx.data[vk][ii] ## attempt to access the data directly. masked determination ## is done above. except ValueError: diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py index 5f750b690..4d4698f37 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py @@ -131,7 +131,7 @@ def test_Treshold(self): ret = dv.execute() self.assertEqual(ret['threshold'].value.shape,(2,2,2,3,4)) self.assertNumpyAllClose(ret['threshold'].value[1,1,1,0,:], - np.ma.array([13,16,15,12],mask=False,fill_value=1e20)) + np.ma.array([13,16,15,12],mask=False)) class TestSum(AbstractTestField): From 7785d4f7b00dd66d67091860d2440971adaea9af Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Tue, 25 Nov 2014 17:36:09 -0700 Subject: [PATCH 22/71] change default netcdf data model to NETCDF4 #343 The default data model was changed to NETCDF4 in constants. --- src/ocgis/constants.py | 2 +- src/ocgis/test/test_simple/test_simple.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/ocgis/constants.py b/src/ocgis/constants.py index 80683388a..d9c95d0cf 100644 --- a/src/ocgis/constants.py +++ b/src/ocgis/constants.py @@ -4,7 +4,7 @@ ocgis_bounds = 'bounds' #: Default netCDF4 output file type -netCDF_default_data_model = 'NETCDF4_CLASSIC' +netCDF_default_data_model = 'NETCDF4' #: Standard headers for subset operations. raw_headers = ['did','vid','ugid','tid','lid','gid','variable','alias','time','year','month','day','level','value'] diff --git a/src/ocgis/test/test_simple/test_simple.py b/src/ocgis/test/test_simple/test_simple.py index b0e3af7a6..b8249af35 100644 --- a/src/ocgis/test/test_simple/test_simple.py +++ b/src/ocgis/test/test_simple/test_simple.py @@ -750,6 +750,8 @@ def test_nc_conversion(self): ret = self.get_ret(ops) self.assertNcEqual(rd['uri'], ret, ignore_attributes={'global': ['history']}) + with nc_scope(ret) as ds: + self.assertEqual(ds.file_format, constants.netCDF_default_data_model) def test_nc_conversion_calc(self): calc_grouping = ['month'] From c8c3eb41161135d3f4144e0d94b3e73cb32219c3 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 1 Dec 2014 10:52:14 -0700 Subject: [PATCH 23/71] unicode escape character in metadata strings #344 When a unicode string is like u'hi \u2013 was a good year', 2013 is not an acceptable unicode character and may not be encoded. The exception is now caught with a warning raised that the attribute is not printable. --- src/ocgis/api/request/driver/nc.py | 12 ++++++--- src/ocgis/interface/metadata.py | 9 ++++++- .../test_interface/test_metadata.py | 26 ++++++++++++------- 3 files changed, 33 insertions(+), 14 deletions(-) diff --git a/src/ocgis/api/request/driver/nc.py b/src/ocgis/api/request/driver/nc.py index c64acce03..977f5750e 100644 --- a/src/ocgis/api/request/driver/nc.py +++ b/src/ocgis/api/request/driver/nc.py @@ -1,6 +1,7 @@ from copy import deepcopy import logging import netCDF4 as nc +from warnings import warn import numpy as np @@ -325,10 +326,13 @@ def get_dimension_map(variable, metadata): ocgis_lh(msg, logger='nc.driver', level=logging.WARNING, check_duplicate=True) bounds_var = None - try: - assert(isinstance(bounds_var, basestring)) - except AssertionError: - assert(bounds_var is None) + # bounds variables sometime appear oddly, if it is not none and not a string, display what the value is, raise a + # warning and continue setting the bounds variable to None. + if not isinstance(bounds_var, basestring): + if bounds_var is not None: + msg = 'Bounds variable is not a string and is not None. The value is "{0}". Setting bounds to None.'.format(bounds_var) + warn(msg) + bounds_var = None value.update({'bounds': bounds_var}) diff --git a/src/ocgis/interface/metadata.py b/src/ocgis/interface/metadata.py index b1f8ab9b2..8ced526f1 100644 --- a/src/ocgis/interface/metadata.py +++ b/src/ocgis/interface/metadata.py @@ -1,5 +1,6 @@ from abc import ABCMeta, abstractmethod from collections import OrderedDict +from warnings import warn import numpy as np @@ -100,6 +101,12 @@ def _get_lines_(self): lines.append('// global attributes:') template = ' :{0} = {1} ;' for key, value in self['dataset'].iteritems(): - lines.append(template.format(key, value)) + try: + lines.append(template.format(key, value)) + except UnicodeEncodeError: + # for a unicode string, if "\u" is in the string and an inappropriate unicode character is used, then + # template formatting will break. + msg = 'Unable to encode attribute "{0}". Skipping printing of attribute value.'.format(key) + warn(msg) return lines \ No newline at end of file diff --git a/src/ocgis/test/test_ocgis/test_interface/test_metadata.py b/src/ocgis/test/test_ocgis/test_interface/test_metadata.py index 8f1a9d223..3efd59e56 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_metadata.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_metadata.py @@ -1,18 +1,26 @@ -import unittest -import netCDF4 as nc +import os +from ocgis.test.test_simple.test_simple import nc_scope from ocgis.test.base import TestBase from ocgis.interface.metadata import NcMetadata class TestNcMetadata(TestBase): - def setUp(self): - uri = self.test_data.get_rd('cancm4_tasmax_2001').uri - self.rootgrp = nc.Dataset(uri) - - def tearDown(self): - self.rootgrp.close() + @property + def rd(self): + return self.test_data.get_rd('cancm4_tasmax_2001') def test_init(self): - ncm = NcMetadata(self.rootgrp) + with nc_scope(self.rd.uri, 'r') as ds: + ncm = NcMetadata(ds) self.assertEqual(set(ncm.keys()), set(['dataset', 'variables', 'dimensions', 'file_format'])) + + def test_get_lines(self): + # test with a unicode string + path = os.path.join(self.current_dir_output, 'foo.nc') + with nc_scope(path, 'w') as ds: + ds.foo = u'a bad \u2013 unicode character' + md = NcMetadata(rootgrp=ds) + ds.sync() + lines = md._get_lines_() + self.assertEqual(lines[4], '// global attributes:') From 7fa6f5bcecc3ab663ed8252599a8e81192c5c362 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 1 Dec 2014 11:53:27 -0700 Subject: [PATCH 24/71] multifile variables Raise a more explicit exception in the case of a variable not being in a URI of a multifile dataset. --- src/ocgis/api/request/driver/nc.py | 22 +++++++++++++++++-- .../test_request/test_driver/test_nc.py | 12 ++++++++++ 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/src/ocgis/api/request/driver/nc.py b/src/ocgis/api/request/driver/nc.py index 977f5750e..c42bdc8e0 100644 --- a/src/ocgis/api/request/driver/nc.py +++ b/src/ocgis/api/request/driver/nc.py @@ -15,7 +15,7 @@ from ocgis.interface.nc.dimension import NcVectorDimension from ocgis.interface.nc.field import NcField from ocgis.interface.nc.temporal import NcTemporalDimension -from ocgis.util.helpers import assert_raise, itersubclasses +from ocgis.util.helpers import assert_raise, itersubclasses, get_iter from ocgis.util.logging_ocgis import ocgis_lh @@ -243,7 +243,25 @@ def open(self): try: ret = nc.Dataset(self.rd.uri, 'r') except TypeError: - ret = nc.MFDataset(self.rd.uri) + try: + ret = nc.MFDataset(self.rd.uri) + except KeyError as e: + # it is possible the variable is not in one of the data URIs. check for this to raise a cleaner error. + for uri in get_iter(self.rd.uri): + ds = nc.Dataset(uri, 'r') + try: + for variable in get_iter(self.rd.variable): + try: + ds.variables[variable] + except KeyError: + msg = 'The variable "{0}" was not found in URI "{1}".'.format(variable, uri) + raise KeyError(msg) + finally: + ds.close() + + # if all variables were found, raise the other error + raise e + return ret diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index 7e26f4878..2ea01690d 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -370,6 +370,18 @@ def test_load_climatology_bounds(self): field = rd.get() self.assertNotEqual(field.temporal.bounds,None) + def test_open(self): + # test a multifile dataset where the variable does not appear in all datasets + uri1 = self.test_data.get_uri('cancm4_tas') + uri2 = self.test_data.get_uri('cancm4_tasmax_2001') + uri = [uri1, uri2] + rd = RequestDataset(uri=uri, variable='tas') + driver = DriverNetcdf(rd) + with self.assertRaises(KeyError): + driver.open() + with self.assertRaises(KeyError): + rd.source_metadata + class Test(TestBase): From 2c55ec73a8dc9cc260bd668ffafd66e4e9251114 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Tue, 2 Dec 2014 10:40:41 -0700 Subject: [PATCH 25/71] added init test --- .../test/test_ocgis/test_calc/test_base.py | 24 +++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/src/ocgis/test/test_ocgis/test_calc/test_base.py b/src/ocgis/test/test_ocgis/test_calc/test_base.py index b63fd7e6f..69540574e 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_base.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_base.py @@ -1,7 +1,8 @@ +from ocgis.test.base import TestBase from ocgis.test.test_ocgis.test_interface.test_base.test_field import AbstractTestField from ocgis.calc.base import AbstractUnivariateFunction,\ - AbstractUnivariateSetFunction, AbstractFunction -from ocgis import constants + AbstractUnivariateSetFunction, AbstractFunction, AbstractMultivariateFunction +from ocgis import constants, OcgOperations from cfunits.cfunits import Units from ocgis.exc import UnitsValidationError import numpy as np @@ -62,6 +63,25 @@ def test_execute_meta_attrs(self): self.assertDictEqual(meta_attrs, {'something_new': 'is about to happen'}) +class FakeAbstractMultivariateFunction(AbstractMultivariateFunction): + description = '' + dtype = int + key = 'fmv' + long_name = 'long' + standard_name = 'short' + required_variables = ['tas', 'pr'] + + def calculate(self, *args, **kwargs): + pass + + +class TestAbstractMultivariateFunction(TestBase): + + def test_init(self): + self.assertEqual(AbstractMultivariateFunction.__bases__, (AbstractFunction,)) + + FakeAbstractMultivariateFunction() + class TestAbstractUnivariateFunction(AbstractTestField): From 4084a06464594d8bf5cace8b2cfde23e5305da56 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Tue, 2 Dec 2014 12:33:28 -0700 Subject: [PATCH 26/71] fixed multivariate calculation validation Non-string parameter values were causing an error with validation. --- src/ocgis/calc/base.py | 18 ++++++++---- .../test/test_ocgis/test_calc/test_base.py | 28 +++++++++++++++++-- 2 files changed, 38 insertions(+), 8 deletions(-) diff --git a/src/ocgis/calc/base.py b/src/ocgis/calc/base.py index 7b2eaa02b..ae8918f9a 100644 --- a/src/ocgis/calc/base.py +++ b/src/ocgis/calc/base.py @@ -638,15 +638,23 @@ def validate(cls, ops): 'Multivariate functions do not calculate sample size at this time.') ocgis_lh(exc=exc, logger='calc.base') - # ensure the required variables are presents - aliases = [d.alias for d in ops.dataset.itervalues()] + # ensure the required variables are present should_raise = False for c in ops.calc: if c['func'] == cls.key: - if not len(set(c['kwds'].keys()).intersection(set(cls.required_variables))) >= 2: - should_raise = True - if not len(set(c['kwds'].values()).intersection(set(aliases))) >= 2: + kwds = c['kwds'] + + # check the required variables are keyword arguments + if not len(set(kwds.keys()).intersection(set(cls.required_variables))) >= 2: should_raise = True + break + + # ensure the mapped aliases exist + for xx in cls.required_variables: + to_check = kwds[xx] + if to_check not in ops.dataset: + should_raise = True + break if should_raise: from ocgis.api.parms.definition import Calc diff --git a/src/ocgis/test/test_ocgis/test_calc/test_base.py b/src/ocgis/test/test_ocgis/test_calc/test_base.py index 69540574e..3019d7f37 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_base.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_base.py @@ -1,10 +1,11 @@ +from copy import deepcopy from ocgis.test.base import TestBase from ocgis.test.test_ocgis.test_interface.test_base.test_field import AbstractTestField from ocgis.calc.base import AbstractUnivariateFunction,\ AbstractUnivariateSetFunction, AbstractFunction, AbstractMultivariateFunction -from ocgis import constants, OcgOperations +from ocgis import constants, OcgOperations, FunctionRegistry from cfunits.cfunits import Units -from ocgis.exc import UnitsValidationError +from ocgis.exc import UnitsValidationError, DefinitionValidationError import numpy as np @@ -82,7 +83,28 @@ def test_init(self): FakeAbstractMultivariateFunction() - + def test_validate(self): + FunctionRegistry.append(FakeAbstractMultivariateFunction) + rd1 = self.test_data.get_rd('cancm4_tas') + rd1.alias = 'tas2' + rd2 = deepcopy(rd1) + rd2.alias = 'pr2' + + # test non-string keyword arguments will not raise an exception + calc = [{'func': 'fmv', 'name': 'fmv', 'kwds': {'tas': 'tas2', 'pr': 'pr2', 'random': {}}}] + OcgOperations(dataset=[rd1, rd2], calc=calc) + + # test with an alias map missing + calc = [{'func': 'fmv', 'name': 'fmv', 'kwds': {'pr': 'pr2', 'random': {}}}] + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=[rd1, rd2], calc=calc) + + # test with the wrong alias mapped + calc = [{'func': 'fmv', 'name': 'fmv', 'kwds': {'tas': 'tas2', 'pr': 'pr3', 'random': {}}}] + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=[rd1, rd2], calc=calc) + + class TestAbstractUnivariateFunction(AbstractTestField): def test_validate_units(self): From 21f0557106f59ae659838119a8036a2783797a41 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Sat, 6 Dec 2014 12:52:30 -0700 Subject: [PATCH 27/71] allow dataset argument to take field objects #338 The dataset argument to operations now takes field objects in addition to request datasets. A number of changes were made to the codebase to handle this new condition. The includes changing how netCDF files are written. There is now a method on dimensions to write to an open netCDF dataset objects. The temporal dimension was also streamlined to include the numeric conversions that were previously in a netCDF-specific temporal dimension object. --- doc/api.rst | 5 +- src/ocgis/__init__.py | 1 + src/ocgis/api/collection.py | 16 +- src/ocgis/api/interpreter.py | 6 +- src/ocgis/api/operations.py | 57 +- src/ocgis/api/parms/base.py | 10 +- src/ocgis/api/parms/definition.py | 58 +- src/ocgis/api/request/base.py | 128 ++- src/ocgis/api/request/driver/nc.py | 316 ++++--- src/ocgis/api/request/nc.py.OLD | 648 ------------- src/ocgis/api/subset.py | 69 +- src/ocgis/calc/base.py | 51 +- src/ocgis/calc/engine.py | 2 +- src/ocgis/calc/eval_function.py | 3 +- src/ocgis/constants.py | 26 +- src/ocgis/contrib/library_icclim.py | 74 +- src/ocgis/conv/base.py | 146 +-- src/ocgis/conv/csv_.py | 10 +- src/ocgis/conv/esmpy.py | 53 ++ src/ocgis/conv/fiona_.py | 16 +- src/ocgis/conv/meta.py | 10 + src/ocgis/conv/nc.py | 376 ++++---- src/ocgis/exc.py | 25 +- src/ocgis/interface/base/attributes.py | 32 + src/ocgis/interface/base/crs.py | 126 ++- src/ocgis/interface/base/dimension/base.py | 286 +++--- src/ocgis/interface/base/dimension/spatial.py | 315 ++++--- .../interface/base/dimension/temporal.py | 850 ++++++++++++------ src/ocgis/interface/base/field.py | 418 ++++++--- src/ocgis/interface/base/variable.py | 102 +-- src/ocgis/interface/nc/dimension.py | 4 +- src/ocgis/interface/nc/field.py | 4 +- src/ocgis/interface/nc/spatial.py | 112 +++ src/ocgis/interface/nc/temporal.py | 287 +----- src/ocgis/regrid/base.py | 63 +- src/ocgis/test/base.py | 195 +++- src/ocgis/test/test_base.py | 64 +- .../test_ocgis/test_api/test_collection.py | 85 +- .../test_ocgis/test_api/test_operations.py | 79 +- .../test_api/test_parms/test_definition.py | 320 +++++-- .../test_api/test_request/test_base.py | 153 +++- .../test_request/test_driver/test_nc.py | 208 ++++- .../test/test_ocgis/test_api/test_subset.py | 159 +++- .../test/test_ocgis/test_calc/test_base.py | 58 +- .../test_ocgis/test_calc/test_calc_general.py | 46 +- .../test_dynamic_kernel_percentile.py | 3 +- .../test_calc/test_library/test_math.py | 1 - .../test_calc/test_library/test_statistics.py | 3 +- .../test_contrib/test_library_icclim.py | 30 +- .../test/test_ocgis/test_conv/test_base.py | 5 +- .../test/test_ocgis/test_conv/test_esmpy.py | 97 ++ .../test/test_ocgis/test_conv/test_meta.py | 7 + .../test/test_ocgis/test_conv/test_nc.py | 65 +- .../test_base/test_attributes.py | 42 + .../test_interface/test_base/test_crs.py | 116 ++- .../test_base/test_dimension/test_base.py | 417 ++++++--- .../test_base/test_dimension/test_spatial.py | 251 ++++-- .../test_base/test_dimension/test_temporal.py | 511 +++++++++-- .../test_interface/test_base/test_field.py | 717 +++++++++------ .../test_interface/test_base/test_variable.py | 100 ++- .../test_interface/test_nc/test_spatial.py | 95 ++ .../test_interface/test_nc/test_temporal.py | 101 +-- .../test/test_ocgis/test_regrid/test_base.py | 149 ++- .../test/test_ocgis/test_util/test_helpers.py | 68 +- .../test/test_ocgis/test_util/test_inspect.py | 3 +- .../test_spatial/test_spatial_subset.py | 1 - .../test/test_real_data/test_combinatorial.py | 75 ++ src/ocgis/test/test_real_data/test_narccap.py | 21 +- .../test_real_data/test_random_datasets.py | 59 +- src/ocgis/test/test_simple/make_test_data.py | 9 +- .../test/test_simple/test_dependencies.py | 12 + .../test_simple/test_optional_dependencies.py | 6 +- src/ocgis/test/test_simple/test_simple.py | 212 ++--- src/ocgis/util/helpers.py | 780 ++++++++-------- src/ocgis/util/shp_cabinet.py | 34 +- 75 files changed, 6254 insertions(+), 3808 deletions(-) delete mode 100644 src/ocgis/api/request/nc.py.OLD create mode 100644 src/ocgis/conv/esmpy.py create mode 100644 src/ocgis/interface/base/attributes.py create mode 100644 src/ocgis/interface/nc/spatial.py create mode 100644 src/ocgis/test/test_ocgis/test_conv/test_esmpy.py create mode 100644 src/ocgis/test/test_ocgis/test_interface/test_base/test_attributes.py create mode 100644 src/ocgis/test/test_ocgis/test_interface/test_nc/test_spatial.py create mode 100644 src/ocgis/test/test_real_data/test_combinatorial.py diff --git a/doc/api.rst b/doc/api.rst index 883863ead..4683ed9ca 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -53,7 +53,10 @@ Additional information on arguments are found in their respective sections. dataset ~~~~~~~ -A `dataset` is the target file(s) where data is stored. A `dataset` may be on the local machine or network location accessible by the software. Unsecured OpenDAP datasets may also be accessed. +A ``dataset`` is the target file(s) or object(s) containing data to process. A ``dataset`` may be: + 1. A file on the local machine or network location accessible by the software (use :class:`~ocgis.RequestDataset` or :class:`~ocgis.RequestDatasetCollection`). + 2. A URL to an unsecured OpenDAP dataset (use :class:`~ocgis.RequestDataset` or :class:`~ocgis.RequestDatasetCollection`). + 3. An OpenClimateGIS field object (use :class:`~Field` or :class:`~ocgis.RequestDatasetCollection`). If a :class:`~ocgis.Field` object is used, be aware operations may modify the object inplace. .. autoclass:: ocgis.RequestDataset :members: inspect, inspect_as_dct diff --git a/src/ocgis/__init__.py b/src/ocgis/__init__.py index e5413cc84..825ea51ce 100644 --- a/src/ocgis/__init__.py +++ b/src/ocgis/__init__.py @@ -13,6 +13,7 @@ from ocgis.util.inspect import Inspect from ocgis.util.shp_cabinet import ShpCabinet, ShpCabinetIterator from ocgis.util.zipper import format_return +from ocgis.interface.base.dimension.temporal import TemporalDimension __version__ = '1.0.1' diff --git a/src/ocgis/api/collection.py b/src/ocgis/api/collection.py index 086833e43..5af860ae9 100644 --- a/src/ocgis/api/collection.py +++ b/src/ocgis/api/collection.py @@ -103,7 +103,7 @@ def __init__(self, meta=None, key=None, crs=None, headers=None, value_keys=None) # self._uid_ctr_field = 1 # self._ugid = OrderedDict() - + @property def _archetype_field(self): ukey = self.keys()[0] @@ -138,21 +138,21 @@ def add_field(self, ugid, geom, field, properties=None, name=None): self.update({ugid:{}}) assert(name not in self[ugid]) self[ugid].update({name:field}) - - def get_iter_dict(self,use_upper_keys=False,conversion_map=None): + + def get_iter_dict(self, use_upper_keys=False, conversion_map=None): r_headers = self.headers use_conversion = False if conversion_map is None else True - for ugid,field_dict in self.iteritems(): + for ugid, field_dict in self.iteritems(): for field in field_dict.itervalues(): for row in field.get_iter(value_keys=self.value_keys): row['ugid'] = ugid - yld_row = {k:row[k] for k in r_headers} + yld_row = {k: row.get(k) for k in r_headers} if use_conversion: - for k,v in conversion_map.iteritems(): + for k, v in conversion_map.iteritems(): yld_row[k] = v(yld_row[k]) if use_upper_keys: - yld_row = {k.upper():v for k,v in yld_row.iteritems()} - yield(row['geom'],yld_row) + yld_row = {k.upper(): v for k, v in yld_row.iteritems()} + yield row['geom'], yld_row def get_iter_elements(self): for ugid,fields in self.iteritems(): diff --git a/src/ocgis/api/interpreter.py b/src/ocgis/api/interpreter.py index ba663a647..dc9f0f46d 100644 --- a/src/ocgis/api/interpreter.py +++ b/src/ocgis/api/interpreter.py @@ -51,7 +51,7 @@ def execute(self): # flag to indicate a directory is made. mostly a precaution to make sure the appropriate directory is is removed. made_output_directory = False - if self.ops.output_format == 'numpy': + if self.ops.output_format in ['numpy', 'esmpy', 'meta']: # no output directory for numpy output outdir = None else: @@ -77,10 +77,10 @@ def execute(self): ## if file logging is enable, perform some logic based on the operational ## parameters. if env.ENABLE_FILE_LOGGING and self.ops.add_auxiliary_files == True: - if self.ops.output_format == 'numpy': + if self.ops.output_format in ['numpy', 'esmpy', 'meta']: to_file = None else: - to_file = os.path.join(outdir,prefix+'.log') + to_file = os.path.join(outdir, prefix+'.log') else: to_file = None diff --git a/src/ocgis/api/operations.py b/src/ocgis/api/operations.py index e50381a75..9c8412467 100644 --- a/src/ocgis/api/operations.py +++ b/src/ocgis/api/operations.py @@ -1,3 +1,4 @@ +from ocgis.conv.base import AbstractConverter from ocgis.api.parms.definition import * from ocgis.api.interpreter import OcgInterpreter from ocgis import env @@ -6,7 +7,6 @@ from ocgis.calc.base import AbstractMultivariateFunction, AbstractKeyedOutputFunction from ocgis.interface.base.crs import CFRotatedPole, WGS84 from ocgis.api.subset import SubsetOperation -import numpy as np from ocgis.calc.engine import OcgCalculationEngine @@ -27,10 +27,10 @@ class OcgOperations(object): The builtins :func:`__getattribute__` and :func:`__setattr__` are overloaded to perform validation and input formatting. - :param dataset: The target dataset(s) for the request. This is the only required parameter. All elements of datasets - will be processed. - :type dataset: :class:`~ocgis.RequestDatasetCollection`, :class:`~ocgis.RequestDataset`, or sequence of :class:`~ocgis.RequestDataset` - objects + :param dataset: The target dataset(s) for the request. This is the only required parameter. All elements of + ``dataset`` will be processed. + :type dataset: :class:`~ocgis.RequestDatasetCollection`, :class:`~ocgis.RequestDataset`/:class:`~ocgis.Field`, or + sequence of :class:`~ocgis.RequestDataset`/:class:`~ocgis.Field` objects :param spatial_operation: The geometric operation to be performed. :type spatial_operation: str :param geom: The selection geometry(s) used for the spatial subset. If `None`, selection defaults to entire spatial @@ -72,8 +72,8 @@ class OcgOperations(object): :param headers: A sequence of strings specifying the output headers. Default value of ('did', 'ugid', 'gid') is always applied. :type headers: sequence - :param format_time: If `True` (the default), attempt to coerce time values to datetime stamps. If `False`, pass - values through without a coercion attempt. + :param format_time: If ``True`` (the default), attempt to coerce time values to datetime stamps. If ``False``, pass + values through without a coercion attempt. This only affects :class:`~ocgis.RequestDataset` objects. :type format_time: bool :param calc_sample_size: If `True`, calculate statistical sample sizes for calculations. :type calc_sample_size: bool @@ -233,33 +233,34 @@ def get_base_request_size(self): msg = 'Base request size not supported with a regrid destination.' raise DefinitionValidationError(RegridDestination, msg) - def _get_kb_(dtype,elements): - nbytes = np.array([1],dtype=dtype).nbytes - return(float((elements*nbytes)/1024.0)) - + def _get_kb_(dtype, elements): + nbytes = np.array([1], dtype=dtype).nbytes + return float((elements * nbytes) / 1024.0) + def _get_zero_or_kb_(dimension): - ret = {'shape':None,'kb':0.0,'dtype':None} + ret = {'shape': None, 'kb': 0.0, 'dtype': None} if dimension is not None: try: ret['dtype'] = dimension.dtype ret['shape'] = dimension.shape - ret['kb'] = _get_kb_(dimension.dtype,dimension.shape[0]) - ## dtype may not be available, check if it is the realization dimension. - ## this is often not associated with a variable. + ret['kb'] = _get_kb_(dimension.dtype, dimension.shape[0]) + # dtype may not be available, check if it is the realization dimension. this is often not associated + # with a variable. except ValueError: - if dimension._axis != 'R': + if dimension.axis != 'R': raise - return(ret) - + return ret + ops_size = deepcopy(self) - subset = SubsetOperation(ops_size,request_base_size_only=True) + subset = SubsetOperation(ops_size, request_base_size_only=True) ret = dict(variables={}) for coll in subset: for row in coll.get_iter_melted(): - elements = reduce(lambda x,y: x*y,row['field'].shape) - kb = _get_kb_(row['variable'].dtype,elements) + elements = reduce(lambda x, y: x * y, row['field'].shape) + kb = _get_kb_(row['variable'].dtype, elements) ret['variables'][row['variable_alias']] = {} - ret['variables'][row['variable_alias']]['value'] = {'shape':row['field'].shape,'kb':kb,'dtype':row['variable'].dtype} + ret['variables'][row['variable_alias']]['value'] = {'shape': row['field'].shape, 'kb': kb, + 'dtype': row['variable'].dtype} ret['variables'][row['variable_alias']]['realization'] = _get_zero_or_kb_(row['field'].realization) ret['variables'][row['variable_alias']]['temporal'] = _get_zero_or_kb_(row['field'].temporal) ret['variables'][row['variable_alias']]['level'] = _get_zero_or_kb_(row['field'].level) @@ -272,7 +273,7 @@ def _get_zero_or_kb_(dimension): for v3 in v2.itervalues(): total += float(v3['kb']) ret['total'] = total - return(ret) + return ret def get_meta(self): meta_converter = MetaConverter(self) @@ -440,10 +441,8 @@ def _raise_(msg, obj=OutputFormat): # snippet only relevant for subsetting not operations with a calculation or time region if self.snippet: if self.calc is not None: - _raise_( - 'Snippets are not implemented for calculations. Apply a limiting time range for faster responses.', - obj=Snippet) - for rd in self.dataset.itervalues(): + _raise_('Snippets are not implemented for calculations. Apply a limiting time range for faster responses.',obj=Snippet) + for rd in self.dataset.iter_request_datasets(): if rd.time_region is not None: _raise_('Snippets are not implemented for time regions.', obj=Snippet) @@ -468,3 +467,7 @@ def _raise_(msg, obj=OutputFormat): else: for c in self.calc: c['ref'].validate(self) + + # validate the converter + converter_klass = AbstractConverter.get_converter(self.output_format) + converter_klass.validate_ops(self) diff --git a/src/ocgis/api/parms/base.py b/src/ocgis/api/parms/base.py index 0ba01d370..c23671af5 100644 --- a/src/ocgis/api/parms/base.py +++ b/src/ocgis/api/parms/base.py @@ -88,14 +88,18 @@ def finalize(self): pass def get_meta(self): - ''':rtype: list of strings''' + """ + :returns: A list of strings without a new line return. + :rtype: list of str + """ + subrows = self._get_meta_() - if isinstance(subrows,basestring): + if isinstance(subrows, basestring): subrows = [subrows] rows = ['* '+str(self)] rows.extend(subrows) rows.append('') - return(rows) + return rows @classmethod def iter_possible(cls): diff --git a/src/ocgis/api/parms/definition.py b/src/ocgis/api/parms/definition.py index 5a6efbb3f..68a46efaf 100644 --- a/src/ocgis/api/parms/definition.py +++ b/src/ocgis/api/parms/definition.py @@ -411,27 +411,43 @@ class Dataset(base.OcgParameter): name = 'dataset' nullable = False default = None - input_types = [RequestDataset,list,tuple,RequestDatasetCollection,dict] - return_type = RequestDatasetCollection - - def __init__(self,arg): - if arg is not None: - if isinstance(arg,RequestDatasetCollection): - init_value = arg + input_types = [RequestDataset, list, tuple, RequestDatasetCollection, dict, Field] + return_type = [RequestDatasetCollection] + _perform_deepcopy = False + + def __init__(self, init_value): + if init_value is not None: + if isinstance(init_value, RequestDatasetCollection): + init_value = deepcopy(init_value) else: - if isinstance(arg,RequestDataset): - itr = [arg] - elif isinstance(arg,dict): - itr = [arg] + if isinstance(init_value, (RequestDataset, dict, Field)): + itr = [init_value] + elif type(init_value) in [list, tuple]: + itr = init_value else: - itr = arg + should_raise = True + try: + import ESMF + except ImportError: + # ESMF is not a required library + ocgis_lh('Could not import ESMF library.', level=logging.WARN, check_duplicate=True) + else: + if isinstance(init_value, ESMF.Field): + from ocgis.regrid.base import get_ocgis_field_from_esmpy_field + field = get_ocgis_field_from_esmpy_field(init_value) + itr = [field] + should_raise = False + if should_raise: + raise DefinitionValidationError(self, 'Type not accepted: {0}'.format(type(init_value))) rdc = RequestDatasetCollection() for rd in itr: + if not isinstance(rd, Field): + rd = deepcopy(rd) rdc.update(rd) init_value = rdc else: - init_value = arg - super(Dataset,self).__init__(init_value) + init_value = init_value + super(Dataset, self).__init__(init_value) def parse_string(self,value): lowered = value.strip() @@ -442,12 +458,18 @@ def parse_string(self,value): return(ret) def get_meta(self): - return(self.value._get_meta_rows_()) + try: + ret = self.value._get_meta_rows_() + except AttributeError: + # likely a field object + ret = ['Field object with name: "{0}"'.format(self.value.name)] + return ret - def _get_meta_(self): pass + def _get_meta_(self): + pass - def _parse_string_(self,lowered): - raise(NotImplementedError) + def _parse_string_(self, lowered): + raise NotImplementedError class DirOutput(base.StringParameter): diff --git a/src/ocgis/api/request/base.py b/src/ocgis/api/request/base.py index 32205a63e..f926adb60 100644 --- a/src/ocgis/api/request/base.py +++ b/src/ocgis/api/request/base.py @@ -3,6 +3,7 @@ import logging import os import itertools +from ocgis.interface.base.field import Field from ocgis.api.collection import AbstractCollection from ocgis.api.request.driver.nc import DriverNetcdf from ocgis.exc import RequestValidationError, NoUnitsError @@ -434,8 +435,9 @@ def _validate_time_subset_(self): class RequestDatasetCollection(AbstractCollection): - '''A set of :class:`ocgis.RequestDataset` objects. - + """ + A set of :class:`ocgis.RequestDataset` and/or :class:`~ocgis.Field` objects. + >>> from ocgis import RequestDatasetCollection, RequestDataset >>> uris = ['http://some.opendap.dataset1', 'http://some.opendap.dataset2'] >>> variables = ['tasmax', 'tasmin'] @@ -446,59 +448,115 @@ class RequestDatasetCollection(AbstractCollection): >>> rdc = RequestDatasetCollection() >>> for rd in request_datasets: ... rdc.update(rd) - - :param request_datasets: A sequence of :class:`ocgis.RequestDataset` objects. - :type request_datasets: sequence of :class:`ocgis.RequestDataset` objects - ''' - - def __init__(self, request_datasets=None): + + :param target: A sequence of request dataset or field objects. + :type target: sequence[:class:`~ocgis.RequestDataset` and/or :class:`~ocgis.Field` objects, ...] + """ + + def __init__(self, target=None): super(RequestDatasetCollection, self).__init__() - self._did = [] + self._unique_id_store = [] - if request_datasets is not None: - for rd in get_iter(request_datasets, dtype=(dict, RequestDataset)): - self.update(rd) + if target is not None: + for element in get_iter(target, dtype=(dict, RequestDataset, Field)): + self.update(element) def __str__(self): ret = '{klass}(request_datasets=[{request_datasets}])' request_datasets = ', '.join([str(rd) for rd in self.itervalues()]) return ret.format(klass=self.__class__.__name__, request_datasets=request_datasets) - - def update(self,request_dataset): - """Add a :class:`ocgis.RequestDataset` to the collection. + + def iter_request_datasets(self): + """ + :returns: An iterator over only the request dataset objects contained in the collection. Field objects are + excluded. + :rtype: `~ocgis.RequestDataset` + """ + + for value in self.itervalues(): + if isinstance(value, Field): + continue + else: + yield value + + def update(self, target): + """ + Add an object to the collection. - :param request_dataset: The :class:`ocgis.RequestDataset` to add. - :type request_dataset: :class:`ocgis.RequestDataset` + :param target: The object to add. + :type target: :class:`~ocgis.RequestDataset` or :class:`~ocgis.Field` """ + try: - new_key = request_dataset.name + new_key = target.name except AttributeError: - request_dataset = RequestDataset(**request_dataset) - new_key = request_dataset.name - - if request_dataset.did is None: - if len(self._did) == 0: - did = 1 + target = RequestDataset(**target) + new_key = target.name + + unique_id = self._get_unique_id_(target) + + if unique_id is None: + if len(self._unique_id_store) == 0: + unique_id = 1 else: - did = max(self._did) + 1 - self._did.append(did) - request_dataset.did = did + unique_id = max(self._unique_id_store) + 1 + self._unique_id_store.append(unique_id) + self._set_unique_id_(target, unique_id) else: - self._did.append(request_dataset.did) - + self._unique_id_store.append(unique_id) + if new_key in self._storage: - raise(KeyError('Name "{0}" already in collection. Attempted to add dataset with URI "{1}".'\ - .format(request_dataset.name,request_dataset.uri))) + raise KeyError('Name "{0}" already in collection. Names must be unique'.format(target.name)) else: - self._storage.update({request_dataset.name:request_dataset}) - + self._storage.update({target.name: target}) + def _get_meta_rows_(self): + """ + :returns: A list of strings containing metadata on the collection objects. + :rtype: list[str, ...] + """ + rows = ['* dataset='] for value in self.itervalues(): - rows += value._get_meta_rows_() + try: + rows += value._get_meta_rows_() + except AttributeError: + # likely a field object + msg = '{klass}(name={name}, ...)'.format(klass=value.__class__.__name__, name=value.name) + rows.append(msg) rows.append('') - return(rows) + + return rows + + @staticmethod + def _get_unique_id_(target): + """ + :param target: The object to retrieve the unique identifier from. + :type target: :class:`~ocgis.RequestDataset` or :class:`~ocgis.Field` + :returns: The unique identifier of the object if available. ``None`` will be returned if no unique can be found. + :rtype: int or ``None`` + """ + + try: + ret = target.did + except AttributeError: + ret = target.uid + + return ret + + @staticmethod + def _set_unique_id_(target, uid): + """ + :param target: The target object for setting the unique identifier. + :type target: :class:`~ocgis.RequestDataset` or :class:`~ocgis.Field` + :param int target: The unique identifier. + """ + + if isinstance(target, RequestDataset): + target.did = uid + elif isinstance(target, Field): + target.uid = uid def get_tuple(value): diff --git a/src/ocgis/api/request/driver/nc.py b/src/ocgis/api/request/driver/nc.py index c42bdc8e0..376dd946a 100644 --- a/src/ocgis/api/request/driver/nc.py +++ b/src/ocgis/api/request/driver/nc.py @@ -5,17 +5,18 @@ import numpy as np +from ocgis.interface.nc.spatial import NcSpatialGridDimension from ocgis import constants from ocgis.api.request.driver.base import AbstractDriver from ocgis.exc import ProjectionDoesNotMatch, VariableNotFoundError, DimensionNotFound -from ocgis.interface.base.crs import CFWGS84, CFCoordinateReferenceSystem -from ocgis.interface.base.dimension.spatial import SpatialGridDimension, SpatialDimension +from ocgis.interface.base.crs import CFCoordinateReferenceSystem +from ocgis.interface.base.dimension.spatial import SpatialDimension from ocgis.interface.base.variable import VariableCollection, Variable from ocgis.interface.metadata import NcMetadata from ocgis.interface.nc.dimension import NcVectorDimension from ocgis.interface.nc.field import NcField from ocgis.interface.nc.temporal import NcTemporalDimension -from ocgis.util.helpers import assert_raise, itersubclasses, get_iter +from ocgis.util.helpers import itersubclasses, get_iter from ocgis.util.logging_ocgis import ocgis_lh @@ -36,6 +37,31 @@ def raw_metadata(self): self.close(ds) return self._raw_metadata + def open(self): + try: + ret = nc.Dataset(self.rd.uri, 'r') + except TypeError: + try: + ret = nc.MFDataset(self.rd.uri) + except KeyError as e: + # it is possible the variable is not in one of the data URIs. check for this to raise a cleaner error. + for uri in get_iter(self.rd.uri): + ds = nc.Dataset(uri, 'r') + try: + for variable in get_iter(self.rd.variable): + try: + ds.variables[variable] + except KeyError: + msg = 'The variable "{0}" was not found in URI "{1}".'.format(variable, uri) + raise KeyError(msg) + finally: + ds.close() + + # if all variables were found, raise the other error + raise e + + return ret + def close(self, obj): obj.close() @@ -76,127 +102,172 @@ def get_dimensioned_variables(self): return ret - def _get_field_(self, format_time=True, interpolate_spatial_bounds=False): + def get_source_metadata(self): + metadata = self.raw_metadata + + try: + var = metadata['variables'][self.rd._variable[0]] + except KeyError: + raise VariableNotFoundError(self.rd.uri, self.rd._variable[0]) + if self.rd.dimension_map is None: + metadata['dim_map'] = get_dimension_map(var['name'], metadata) + else: + for k, v in self.rd.dimension_map.iteritems(): + try: + variable_name = metadata['variables'][v]['name'] + except KeyError: + variable_name = None + self.rd.dimension_map[k] = {'variable': variable_name, + 'dimension': v, + 'pos': var['dimensions'].index(v)} + metadata['dim_map'] = self.rd.dimension_map + + return metadata + + def _get_vector_dimension_(self, k, v, source_metadata): + """ + :param str k: The string name/key of the dimension to load. + :param dict v: A series of keyword parameters to pass to the dimension class. + :param dict source_metadata: The request dataset's metadata as returned from + :attr:`ocgis.api.request.base.RequestDataset.source_metadata`. + :returns: A vector dimension object linked to the source data. If the variable is not one-dimension return the + ``source_metadata`` reference to the variable. + :rtype: :class:`ocgis.interface.base.dimension.base.VectorDimension` + """ + + # this is the string axis representation + axis_value = v['axis'] + # pull the axis information out of the dimension map + ref_axis = source_metadata['dim_map'].get(axis_value) + # if the axis is not represented, fill it with none. this happens when a dataset does not have a vertical + # level or projection axis for example. + if ref_axis is None: + fill = None + else: + ref_variable = source_metadata['variables'].get(ref_axis['variable']) + + # for data with a projection/realization axis there may be no associated variable. + try: + ref_variable['axis'] = ref_axis + except TypeError: + if axis_value == 'R' and ref_variable is None: + ref_variable = {'axis': ref_axis, 'name': ref_axis['dimension'], 'attrs': {}} + + # realization axes may not have a variable associated with them + if k != 'realization'and len(ref_variable['dimensions']) > 1: + return ref_variable + + # extract the data length to use when creating the source index arrays. + length = source_metadata['dimensions'][ref_axis['dimension']]['len'] + src_idx = np.arange(0, length, dtype=constants.np_int) + + # get the target data type for the dimension + try: + dtype = np.dtype(ref_variable['dtype']) + # the realization dimension may not be a associated with a variable + except KeyError: + if k == 'realization' and ref_variable['axis']['variable'] is None: + dtype = None + else: + raise + + # get the name of the dimension + name = ref_variable['axis']['dimension'] + + # assemble parameters for creating the dimension class then initialize the class. + kwds = dict(name_uid=v['name_uid'], src_idx=src_idx, data=self.rd, meta=ref_variable, axis=axis_value, + name_value=ref_variable.get('name'), dtype=dtype, attrs=ref_variable['attrs'].copy(), + name=name, name_bounds=ref_variable['axis'].get('bounds')) + + # there may be additional parameters for each dimension. + if v['adds'] is not None: + try: + kwds.update(v['adds'](ref_variable['attrs'])) + # adds may not be a callable object. assume they are a dictionary. + except TypeError: + kwds.update(v['adds']) + + # check for the name of the bounds dimension in the source metadata. loop through the dimension map, + # look for a bounds variable, and choose the bounds dimension if possible + name_bounds_suffix = self._get_name_bounds_suffix_(source_metadata) + kwds['name_bounds_suffix'] = name_bounds_suffix + + # create instance of the dimension + fill = v['cls'](**kwds) + + return fill + + def _get_field_(self, format_time=True): """ :param bool format_time: - :param bool interpolate_spatial_bounds: :raises ValueError: """ + # reference the request dataset's source metadata + source_metadata = self.rd.source_metadata + def _get_temporal_adds_(ref_attrs): ## calendar should default to standard if it is not present and the ## t_calendar overload is not used. calendar = self.rd.t_calendar or ref_attrs.get('calendar', None) or 'standard' - return ({'units': self.rd.t_units or ref_attrs['units'], - 'calendar': calendar, - 'format_time': format_time}) + return {'units': self.rd.t_units or ref_attrs['units'], 'calendar': calendar, 'format_time': format_time} - ## this dictionary contains additional keyword arguments for the row - ## and column dimensions. - adds_row_col = {'interpolate_bounds': interpolate_spatial_bounds} - - ## parameters for the loading loop + # parameters for the loading loop to_load = {'temporal': {'cls': NcTemporalDimension, 'adds': _get_temporal_adds_, 'axis': 'T', 'name_uid': 'tid', - 'name_value': 'time'}, + 'name': 'time'}, 'level': {'cls': NcVectorDimension, 'adds': None, 'axis': 'Z', 'name_uid': 'lid', - 'name_value': 'level'}, - 'row': {'cls': NcVectorDimension, 'adds': adds_row_col, 'axis': 'Y', 'name_uid': 'row_id', - 'name_value': 'row'}, - 'col': {'cls': NcVectorDimension, 'adds': adds_row_col, 'axis': 'X', 'name_uid': 'col_id', - 'name_value': 'col'}, + 'name': 'level'}, + 'row': {'cls': NcVectorDimension, 'adds': None, 'axis': 'Y', 'name_uid': 'yc_id', + 'name': 'yc'}, + 'col': {'cls': NcVectorDimension, 'adds': None, 'axis': 'X', 'name_uid': 'xc_id', + 'name': 'xc'}, 'realization': {'cls': NcVectorDimension, 'adds': None, 'axis': 'R', 'name_uid': 'rlz_id', 'name_value': 'rlz'}} - loaded = {} + loaded = {} + kwds_grid = {} + has_row_column = True for k, v in to_load.iteritems(): - ## this is the string axis representation - axis_value = v['axis'] or v['cls']._axis - ## pull the axis information out of the dimension map - ref_axis = self.rd.source_metadata['dim_map'].get(axis_value) - ref_axis = self.rd.source_metadata['dim_map'].get(axis_value) - ## if the axis is not represented, fill it with none. this happens - ## when a dataset does not have a vertical level or projection axis - ## for example. - if ref_axis is None: - fill = None - else: - ref_variable = self.rd.source_metadata['variables'].get(ref_axis['variable']) - - ## for data with a projection/realization axis there may be no - ## associated variable. - try: - ref_variable['axis'] = ref_axis - except TypeError: - if axis_value == 'R' and ref_variable is None: - ref_variable = {'axis': ref_axis, 'name': ref_axis['dimension'], 'attrs': {}} - - ## extract the data length to use when creating the source index - ## arrays. - length = self.rd.source_metadata['dimensions'][ref_axis['dimension']]['len'] - src_idx = np.arange(0, length, dtype=constants.np_int) - - ## get the target data type for the dimension - try: - dtype = np.dtype(ref_variable['dtype']) - ## the realization dimension may not be a associated with a variable - except KeyError: - if k == 'realization' and ref_variable['axis']['variable'] is None: - dtype = None - else: - raise - - ## assemble parameters for creating the dimension class then initialize - ## the class. - kwds = dict(name_uid=v['name_uid'], name_value=v['name_value'], src_idx=src_idx, - data=self.rd, meta=ref_variable, axis=axis_value, name=ref_variable.get('name'), - dtype=dtype) - - ## there may be additional parameters for each dimension. - if v['adds'] is not None: - try: - kwds.update(v['adds'](ref_variable['attrs'])) - ## adds may not be a callable object. assume they are a - ## dictionary. - except TypeError: - kwds.update(v['adds']) - kwds.update({'name': ref_variable.get('name')}) - fill = v['cls'](**kwds) - + fill = self._get_vector_dimension_(k, v, source_metadata) + if k != 'realization' and not isinstance(fill, NcVectorDimension) and fill is not None: + assert k in ('row', 'col') + has_row_column = False + kwds_grid[k] = fill loaded[k] = fill - assert_raise(set(('temporal', 'row', 'col')).issubset(set([k for k, v in loaded.iteritems() if v != None])), - logger='request', - exc=ValueError('Target variable must at least have temporal, row, and column dimensions.')) - - grid = SpatialGridDimension(row=loaded['row'], col=loaded['col']) + loaded_keys = set([k for k, v in loaded.iteritems() if v is not None]) + if has_row_column: + if not {'temporal', 'row', 'col'}.issubset(loaded_keys): + raise ValueError('Target variable must at least have temporal, row, and column dimensions.') + kwds_grid = {'row': loaded['row'], 'col': loaded['col']} + else: + shape_src_idx = [source_metadata['dimensions'][xx]['len'] for xx in kwds_grid['row']['dimensions']] + src_idx = {} + src_idx['row'] = np.arange(0, shape_src_idx[0], dtype=constants.np_int) + src_idx['col'] = np.arange(0, shape_src_idx[1], dtype=constants.np_int) + name_row = kwds_grid['row']['name'] + name_col = kwds_grid['col']['name'] + kwds_grid = {'name_row': name_row, 'name_col': name_col, 'data': self.rd, 'src_idx': src_idx} - # crs = None - # if rd.crs is not None: - # crs = rd.crs - # else: - # crs = rd._get_crs_(rd._variable[0]) - # if crs is None: - # ocgis_lh('No "grid_mapping" attribute available assuming WGS84: {0}'.format(rd.uri), - # 'request', logging.WARN) - # crs = CFWGS84() + grid = NcSpatialGridDimension(**kwds_grid) spatial = SpatialDimension(name_uid='gid', grid=grid, crs=self.rd.crs, abstraction=self.rd.s_abstraction) vc = VariableCollection() for vdict in self.rd: - variable_meta = deepcopy(self.rd._source_metadata['variables'][vdict['variable']]) + variable_meta = deepcopy(source_metadata['variables'][vdict['variable']]) variable_units = vdict['units'] or variable_meta['attrs'].get('units') dtype = np.dtype(variable_meta['dtype']) fill_value = variable_meta['fill_value'] variable = Variable(vdict['variable'], vdict['alias'], units=variable_units, meta=variable_meta, data=self.rd, conform_units_to=vdict['conform_units_to'], dtype=dtype, - fill_value=fill_value) + fill_value=fill_value, attrs=variable_meta['attrs'].copy()) vc.add_variable(variable) ret = NcField(variables=vc, spatial=spatial, temporal=loaded['temporal'], level=loaded['level'], - realization=loaded['realization'], meta=deepcopy(self.rd._source_metadata), uid=self.rd.did, - name=self.rd.name) + realization=loaded['realization'], meta=source_metadata.copy(), uid=self.rd.did, + name=self.rd.name, attrs=source_metadata['dataset'].copy()) ## apply any subset parameters after the field is loaded if self.rd.time_range is not None: @@ -217,52 +288,27 @@ def _get_temporal_adds_(ref_attrs): return ret - def get_source_metadata(self): - metadata = self.raw_metadata - - try: - var = metadata['variables'][self.rd._variable[0]] - except KeyError: - raise VariableNotFoundError(self.rd.uri, self.rd._variable[0]) - if self.rd.dimension_map is None: - metadata['dim_map'] = get_dimension_map(var['name'], metadata) - else: - for k, v in self.rd.dimension_map.iteritems(): - try: - variable_name = metadata['variables'][v]['name'] - except KeyError: - variable_name = None - self.rd.dimension_map[k] = {'variable': variable_name, - 'dimension': v, - 'pos': var['dimensions'].index(v)} - metadata['dim_map'] = self.rd.dimension_map - - return metadata + @staticmethod + def _get_name_bounds_suffix_(source_metadata): + """ + :param dict source_metadata: Metadata dictionary as returned from :attr:`~ocgis.RequestDataset.source_metadata`. + :returns: The name of the bounds suffix to use when creating dimensions. If no bounds are found in the source + metadata return ``None``. + :rtype: str or None + """ - def open(self): - try: - ret = nc.Dataset(self.rd.uri, 'r') - except TypeError: + name_bounds_suffix = None + for v2 in source_metadata['dim_map'].itervalues(): + # it is possible the dimension itself is none try: - ret = nc.MFDataset(self.rd.uri) - except KeyError as e: - # it is possible the variable is not in one of the data URIs. check for this to raise a cleaner error. - for uri in get_iter(self.rd.uri): - ds = nc.Dataset(uri, 'r') - try: - for variable in get_iter(self.rd.variable): - try: - ds.variables[variable] - except KeyError: - msg = 'The variable "{0}" was not found in URI "{1}".'.format(variable, uri) - raise KeyError(msg) - finally: - ds.close() - - # if all variables were found, raise the other error - raise e - - return ret + if v2 is not None and v2['bounds'] is not None: + name_bounds_suffix = source_metadata['variables'][v2['bounds']]['dimensions'][1] + break + except KeyError: + # bounds key is likely just not there + if 'bounds' in v2: + raise + return name_bounds_suffix def get_axis(dimvar, dims, dim): diff --git a/src/ocgis/api/request/nc.py.OLD b/src/ocgis/api/request/nc.py.OLD deleted file mode 100644 index 47ad410f0..000000000 --- a/src/ocgis/api/request/nc.py.OLD +++ /dev/null @@ -1,648 +0,0 @@ -import itertools -from ocgis.exc import DefinitionValidationError, ProjectionDoesNotMatch, \ - DimensionNotFound, NoUnitsError, VariableNotFoundError, RequestValidationError -from copy import deepcopy -import inspect -import os -from ocgis import env, constants -from ocgis.util.helpers import locate, validate_time_subset, itersubclasses, \ - assert_raise, get_iter -import netCDF4 as nc -from ocgis.interface.metadata import NcMetadata -from ocgis.util.logging_ocgis import ocgis_lh -import logging -from ocgis.interface.nc.temporal import NcTemporalDimension -import numpy as np -from ocgis.interface.base.dimension.spatial import SpatialGridDimension, \ - SpatialDimension -from ocgis.interface.base.crs import CFCoordinateReferenceSystem, CFWGS84 -from ocgis.interface.nc.dimension import NcVectorDimension -from ocgis.interface.nc.field import NcField -from ocgis.interface.base.variable import Variable, VariableCollection -from ocgis.util.inspect import Inspect - - -class NcRequestDataset(object): - - def __init__(self, uri=None, variable=None, alias=None, units=None, time_range=None, time_region=None, - level_range=None, conform_units_to=None, s_crs=None, t_units=None, t_calendar=None, did=None, - meta=None, s_abstraction=None, dimension_map=None, name=None): - - self._is_init = True - - if uri is None: - raise RequestValidationError('uri', 'Cannot be None') - else: - self._uri = self._get_uri_(uri) - self.variable = variable - - self.alias = alias - self.name = name - self.time_range = time_range - self.time_region = time_region - self.level_range = level_range - self.s_crs = deepcopy(s_crs) - self.t_units = t_units - self.t_calendar = t_calendar - self.dimension_map = deepcopy(dimension_map) - self.did = did - self.meta = meta or {} - - self.__source_metadata__ = None - self.units = units - self.conform_units_to = conform_units_to - - self.s_abstraction = s_abstraction - try: - self.s_abstraction = self.s_abstraction.lower() - assert self.s_abstraction in ('point', 'polygon') - except AttributeError: - if s_abstraction is None: - pass - else: - raise - - self._is_init = False - - self._validate_time_subset_() - - def __iter__(self): - attrs = ['_alias', '_variable', '_units', '_conform_units_to'] - for ii in range(len(self)): - yield {a[1:]: getattr(self, a)[ii] for a in attrs} - - def __len__(self,): - return len(self._variable) - - def _open_(self): - try: - ret = nc.Dataset(self.uri, 'r') - except TypeError: - ret = nc.MFDataset(self.uri) - return ret - - @property - def level_range(self): - return self._level_range.value - - @level_range.setter - def level_range(self, value): - from ocgis.api.parms.definition import LevelRange - - self._level_range = LevelRange(value) - - @property - def time_range(self): - return self._time_range.value - - @time_range.setter - def time_range(self, value): - from ocgis.api.parms.definition import TimeRange - - self._time_range = TimeRange(value) - ## ensure the time range and region overlaps - if not self._is_init: - self._validate_time_subset_() - - @property - def time_region(self): - return self._time_region.value - - @time_region.setter - def time_region(self, value): - from ocgis.api.parms.definition import TimeRegion - - self._time_region = TimeRegion(value) - ## ensure the time range and region overlaps - if not self._is_init: - self._validate_time_subset_() - - def _get_units_from_metadata_(self, variable): - return self._source_metadata['variables'][variable]['attrs'].get('units') - - @property - def _source_metadata(self): - if self.__source_metadata__ is None: - ds = self._open_() - try: - metadata = NcMetadata(ds) - try: - var = ds.variables[self._variable[0]] - except KeyError: - raise VariableNotFoundError(self.uri, self._variable[0]) - if self.dimension_map is None: - metadata['dim_map'] = get_dimension_map(ds, var, metadata) - else: - for k, v in self.dimension_map.iteritems(): - try: - variable_name = ds.variables.get(v)._name - except AttributeError: - variable_name = None - self.dimension_map[k] = {'variable': variable_name, - 'dimension': v, - 'pos': var.dimensions.index(v)} - metadata['dim_map'] = self.dimension_map - self.__source_metadata__ = metadata - finally: - ds.close() - return self.__source_metadata__ - - def get(self, **kwargs): - if not get_is_none(self._conform_units_to): - src_units = [] - dst_units = [] - for rdict in self: - if rdict['conform_units_to'] is not None: - variable_units = rdict.get('units') or self._get_units_from_metadata_(rdict['variable']) - if variable_units is None: - raise NoUnitsError(rdict['variable']) - src_units.append(variable_units) - dst_units.append(rdict['conform_units_to']) - validate_unit_equivalence(src_units, dst_units) - return self._get_(**kwargs) - - def _get_(self, format_time=True, interpolate_spatial_bounds=False): - """ - :param bool format_time: - :param bool interpolate_spatial_bounds: - :raises ValueError: - """ - - def _get_temporal_adds_(ref_attrs): - ## calendar should default to standard if it is not present and the - ## t_calendar overload is not used. - calendar = self.t_calendar or ref_attrs.get('calendar', None) or 'standard' - - return ({'units': self.t_units or ref_attrs['units'], - 'calendar': calendar, - 'format_time': format_time}) - - ## this dictionary contains additional keyword arguments for the row - ## and column dimensions. - adds_row_col = {'interpolate_bounds': interpolate_spatial_bounds} - - ## parameters for the loading loop - to_load = {'temporal': {'cls': NcTemporalDimension, 'adds': _get_temporal_adds_, 'axis': 'T', 'name_uid': 'tid', - 'name_value': 'time'}, - 'level': {'cls': NcVectorDimension, 'adds': None, 'axis': 'Z', 'name_uid': 'lid', - 'name_value': 'level'}, - 'row': {'cls': NcVectorDimension, 'adds': adds_row_col, 'axis': 'Y', 'name_uid': 'row_id', - 'name_value': 'row'}, - 'col': {'cls': NcVectorDimension, 'adds': adds_row_col, 'axis': 'X', 'name_uid': 'col_id', - 'name_value': 'col'}, - 'realization': {'cls': NcVectorDimension, 'adds': None, 'axis': 'R', 'name_uid': 'rlz_id', - 'name_value': 'rlz'}} - loaded = {} - - for k, v in to_load.iteritems(): - ## this is the string axis representation - axis_value = v['axis'] or v['cls']._axis - ## pull the axis information out of the dimension map - ref_axis = self._source_metadata['dim_map'].get(axis_value) - ref_axis = self._source_metadata['dim_map'].get(axis_value) - ## if the axis is not represented, fill it with none. this happens - ## when a dataset does not have a vertical level or projection axis - ## for example. - if ref_axis is None: - fill = None - else: - ref_variable = self._source_metadata['variables'].get(ref_axis['variable']) - - ## for data with a projection/realization axis there may be no - ## associated variable. - try: - ref_variable['axis'] = ref_axis - except TypeError: - if axis_value == 'R' and ref_variable is None: - ref_variable = {'axis': ref_axis, 'name': ref_axis['dimension'], 'attrs': {}} - - ## extract the data length to use when creating the source index - ## arrays. - length = self._source_metadata['dimensions'][ref_axis['dimension']]['len'] - src_idx = np.arange(0, length, dtype=constants.np_int) - - ## get the target data type for the dimension - try: - dtype = np.dtype(ref_variable['dtype']) - ## the realization dimension may not be a associated with a variable - except KeyError: - if k == 'realization' and ref_variable['axis']['variable'] == None: - dtype = None - else: - raise - - ## assemble parameters for creating the dimension class then initialize - ## the class. - kwds = dict(name_uid=v['name_uid'], name_value=v['name_value'], src_idx=src_idx, - data=self, meta=ref_variable, axis=axis_value, name=ref_variable.get('name'), - dtype=dtype) - - ## there may be additional parameters for each dimension. - if v['adds'] is not None: - try: - kwds.update(v['adds'](ref_variable['attrs'])) - ## adds may not be a callable object. assume they are a - ## dictionary. - except TypeError: - kwds.update(v['adds']) - kwds.update({'name': ref_variable.get('name')}) - fill = v['cls'](**kwds) - - loaded[k] = fill - - assert_raise(set(('temporal', 'row', 'col')).issubset(set([k for k, v in loaded.iteritems() if v != None])), - logger='request', - exc=ValueError('Target variable must at least have temporal, row, and column dimensions.')) - - grid = SpatialGridDimension(row=loaded['row'], col=loaded['col']) - crs = None - if self.s_crs is not None: - crs = self.s_crs - else: - crs = self._get_crs_(self._variable[0]) - if crs is None: - ocgis_lh('No "grid_mapping" attribute available assuming WGS84: {0}'.format(self.uri), - 'request', logging.WARN) - crs = CFWGS84() - - # ## rotated pole coordinate systems require transforming the coordinates to - # ## WGS84 before they may be loaded. - # if isinstance(crs,CFRotatedPole): - # msg = 'CFRotatedPole projection found. Transforming coordinates to WGS84 and replacing the CRS with CFWGS84' - # ocgis_lh(msg=msg,logger='request.nc',level=logging.WARN) - # grid = get_rotated_pole_spatial_grid_dimension(crs,grid) - # crs = CFWGS84() - - spatial = SpatialDimension(name_uid='gid', grid=grid, crs=crs, abstraction=self.s_abstraction) - - vc = VariableCollection() - for vdict in self: - variable_meta = deepcopy(self._source_metadata['variables'][vdict['variable']]) - variable_units = vdict['units'] or variable_meta['attrs'].get('units') - dtype = np.dtype(variable_meta['dtype']) - fill_value = variable_meta['fill_value'] - variable = Variable(vdict['variable'], vdict['alias'], units=variable_units, meta=variable_meta, - data=self, conform_units_to=vdict['conform_units_to'], dtype=dtype, - fill_value=fill_value) - vc.add_variable(variable) - - ret = NcField(variables=vc, spatial=spatial, temporal=loaded['temporal'], level=loaded['level'], - realization=loaded['realization'], meta=deepcopy(self._source_metadata), uid=self.did, - name=self.name) - - ## apply any subset parameters after the field is loaded - if self.time_range is not None: - ret = ret.get_between('temporal', min(self.time_range), max(self.time_range)) - if self.time_region is not None: - ret = ret.get_time_region(self.time_region) - if self.level_range is not None: - try: - ret = ret.get_between('level', min(self.level_range), max(self.level_range)) - except AttributeError: - ## there may be no level dimension - if ret.level == None: - msg = ("A level subset was requested but the target dataset does not have a level dimension. The " - "dataset's alias is: {0}".format(self.alias)) - raise (ValueError(msg)) - else: - raise - - return ret - - def inspect(self): - '''Print inspection output using :class:`~ocgis.Inspect`. This is a - convenience method.''' - - ip = Inspect(request_dataset=self) - return ip - - def inspect_as_dct(self): - ''' - Return a dictionary representation of the target's metadata. If the variable - is `None`. An attempt will be made to find the target dataset's time bounds - raising a warning if none is found or the time variable is lacking units - and/or calendar attributes. - - >>> rd = ocgis.RequestDataset('rhs_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc','rhs') - >>> ret = rd.inspect_as_dct() - >>> ret.keys() - ['dataset', 'variables', 'dimensions', 'derived'] - >>> ret['derived'] - OrderedDict([('Start Date', '2011-01-01 12:00:00'), ('End Date', '2020-12-31 12:00:00'), ('Calendar', '365_day'), ('Units', 'days since 1850-1-1'), ('Resolution (Days)', '1'), ('Count', '8192'), ('Has Bounds', 'True'), ('Spatial Reference', 'WGS84'), ('Proj4 String', '+proj=longlat +datum=WGS84 +no_defs '), ('Extent', '(-1.40625, -90.0, 358.59375, 90.0)'), ('Interface Type', 'NcPolygonDimension'), ('Resolution', '2.80091351339')]) - - :rtype: :class:`collections.OrderedDict` - ''' - ip = Inspect(request_dataset=self) - ret = ip._as_dct_() - return ret - - @property - def interface(self): - attrs = ['s_crs', 't_units', 't_calendar', 's_abstraction'] - ret = {attr: getattr(self, attr) for attr in attrs} - return ret - - @property - def alias(self): - return get_first_or_sequence(self._alias) - - @alias.setter - def alias(self, value): - if value is None: - self._alias = deepcopy(self._variable) - else: - self._alias = get_tuple(value) - if len(self._alias) != len(self._variable): - raise RequestValidationError('alias', 'Each variable must have an alias. The sequence lengths differ.') - - @property - def name(self): - if self._name is None: - ret = '_'.join(self._alias) - else: - ret = self._name - return ret - - @name.setter - def name(self, value): - self._name = value - - @property - def conform_units_to(self): - return get_first_or_sequence(self._conform_units_to) - - @conform_units_to.setter - def conform_units_to(self, value): - if value is not None: - value = get_tuple(value) - if len(value) != len(self._variable): - raise RequestValidationError('conform_units_to', ('Must match "variable" element-wise. The sequence ' - 'lengths differ.')) - validate_units('conform_units_to', value) - else: - value = tuple([None]*len(self._variable)) - self._conform_units_to = value - - @property - def units(self): - return get_first_or_sequence(self._units) - - @units.setter - def units(self, value): - if value is None: - value = tuple([None]*len(self._variable)) - else: - value = get_tuple(value) - if len(value) != len(self._variable): - raise RequestValidationError('units', ('Must match "variable" element-wise. The sequence ' - 'lengths differ.')) - if env.USE_CFUNITS: - validate_units('units', value) - self._units = value - - @property - def uri(self): - return get_first_or_sequence(self._uri) - - @property - def variable(self): - return get_first_or_sequence(self._variable) - - @variable.setter - def variable(self, value): - if value is None: - raise RequestValidationError('variable', 'Cannot be None') - else: - self._variable = get_tuple(value) - - def __eq__(self, other): - if isinstance(other, self.__class__): - return self.__dict__ == other.__dict__ - else: - return False - - def __str__(self): - msg = '{0}({1})' - argspec = inspect.getargspec(self.__class__.__init__) - parms = [] - for name in argspec.args: - if name == 'self': - continue - else: - as_str = '{0}={1}' - value = getattr(self, name) - if isinstance(value, basestring): - fill = '"{0}"'.format(value) - else: - fill = value - as_str = as_str.format(name, fill) - parms.append(as_str) - msg = msg.format(self.__class__.__name__, ','.join(parms)) - return msg - - def _get_crs_(self, variable): - crs = None - for potential in itersubclasses(CFCoordinateReferenceSystem): - try: - crs = potential.load_from_metadata(variable, self._source_metadata) - break - except ProjectionDoesNotMatch: - continue - return crs - - def _get_uri_(self, uri, ignore_errors=False, followlinks=True): - out_uris = [] - if isinstance(uri, basestring): - uris = [uri] - else: - uris = uri - assert (len(uri) >= 1) - for uri in uris: - ret = None - ## check if the path exists locally - if os.path.exists(uri) or '://' in uri: - ret = uri - ## if it does not exist, check the directory locations - else: - if env.DIR_DATA is not None: - if isinstance(env.DIR_DATA, basestring): - dirs = [env.DIR_DATA] - else: - dirs = env.DIR_DATA - for directory in dirs: - for filepath in locate(uri, directory, followlinks=followlinks): - ret = filepath - break - if ret is None: - if not ignore_errors: - raise (ValueError( - 'File not found: "{0}". Check env.DIR_DATA or ensure a fully qualified URI is used.'.format( - uri))) - else: - if not os.path.exists(ret) and not ignore_errors: - raise (ValueError( - 'Path does not exist and is likely not a remote URI: "{0}". Set "ignore_errors" to True if this is not the case.'.format( - ret))) - out_uris.append(ret) - return out_uris - - def _get_meta_rows_(self): - if self.time_range is None: - tr = None - else: - tr = '{0} to {1} (inclusive)'.format(self.time_range[0], self.time_range[1]) - if self.level_range is None: - lr = None - else: - lr = '{0} to {1} (inclusive)'.format(self.level_range[0], self.level_range[1]) - - rows = [' URI: {0}'.format(self.uri), - ' Variable: {0}'.format(self.variable), - ' Alias: {0}'.format(self.alias), - ' Time Range: {0}'.format(tr), - ' Time Region/Selection: {0}'.format(self.time_region), - ' Level Range: {0}'.format(lr), - ' Overloaded Parameters:', - ' PROJ4 String: {0}'.format(self.s_crs), - ' Time Units: {0}'.format(self.t_units), - ' Time Calendar: {0}'.format(self.t_calendar)] - return rows - - def _validate_time_subset_(self): - if not validate_time_subset(self.time_range, self.time_region): - raise RequestValidationError("time_range/time_region", '"time_range" and "time_region" must overlap.') - - -def get_axis(dimvar, dims, dim): - try: - axis = getattr(dimvar, 'axis') - except AttributeError: - ocgis_lh('Guessing dimension location with "axis" attribute missing for variable "{0}".'.format(dimvar._name), - logger='nc.dataset', - level=logging.WARN, - check_duplicate=True) - axis = guess_by_location(dims, dim) - return axis - - -def get_dimension_map(ds, var, metadata): - dims = var.dimensions - mp = dict.fromkeys(['T', 'Z', 'X', 'Y']) - - ## try to pull dimensions - for dim in dims: - dimvar = None - try: - dimvar = ds.variables[dim] - except KeyError: - ## search for variable with the matching dimension - for key, value in metadata['variables'].iteritems(): - if len(value['dimensions']) == 1 and value['dimensions'][0] == dim: - dimvar = ds.variables[key] - break - ## the dimension variable may not exist - if dimvar is None: - msg = 'Dimension variable not found for axis: "{0}". You may need to use the "dimension_map" parameter.'.format( - dim) - ocgis_lh(logger='request.nc', exc=DimensionNotFound(msg)) - axis = get_axis(dimvar, dims, dim) - ## pull metadata information the variable and dimension names - mp[axis] = {'variable': dimvar._name, 'dimension': dim} - try: - mp[axis].update({'pos': var.dimensions.index(dimvar._name)}) - except ValueError: - ## variable name may differ from the dimension name - mp[axis].update({'pos': var.dimensions.index(dimvar.dimensions[0])}) - - ## look for bounds variables - bounds_names = set(constants.name_bounds) - for key, value in mp.iteritems(): - if value is None: - continue - bounds_var = None - var = ds.variables[value['variable']] - intersection = list(bounds_names.intersection(set(var.ncattrs()))) - try: - bounds_var = ds.variables[getattr(var, intersection[0])]._name - except KeyError: - ## the data has listed a bounds variable, but the variable is not - ## actually present in the dataset. - ocgis_lh( - 'Bounds listed for variable "{0}" but the destination bounds variable "{1}" does not exist.'.format( - var._name, getattr(var, intersection[0])), - logger='nc.dataset', - level=logging.WARNING, - check_duplicate=True) - bounds_var = None - except IndexError: - ## if no bounds variable is found for time, it may be a climatological. - if key == 'T': - try: - bounds_var = getattr(ds.variables[value['variable']], 'climatology') - ocgis_lh('Climatological bounds found for variable: {0}'.format(var._name), - logger='request.nc', - level=logging.INFO) - ## climatology is not found on time axis - except AttributeError: - pass - ## bounds variable not found by other methods - if bounds_var is None: - ocgis_lh( - 'No bounds attribute found for variable "{0}". Searching variable dimensions for bounds information.'.format( - var._name), - logger='request.nc', - level=logging.WARN, - check_duplicate=True) - bounds_names_copy = bounds_names.copy() - bounds_names_copy.update([value['dimension']]) - for key2, value2 in metadata['variables'].iteritems(): - intersection = bounds_names_copy.intersection(set(value2['dimensions'])) - if len(intersection) == 2: - bounds_var = ds.variables[key2]._name - value.update({'bounds': bounds_var}) - return mp - - -def guess_by_location(dims, target): - mp = {3: {0: 'T', 1: 'Y', 2: 'X'}, - 4: {0: 'T', 2: 'Y', 3: 'X', 1: 'Z'}} - return mp[len(dims)][dims.index(target)] - - -def get_tuple(value): - if isinstance(value, basestring) or value is None: - ret = (value,) - else: - ret = tuple(value) - return ret - - -def get_first_or_sequence(value): - if len(value) > 1: - ret = value - else: - ret = value[0] - return ret - - -def get_is_none(value): - return all([v is None for v in get_iter(value)]) - - -def validate_units(keyword, sequence): - from cfunits import Units - try: - map(Units, sequence) - except ValueError as e: - raise RequestValidationError(keyword, e.message) - - -def validate_unit_equivalence(src_units, dst_units): - ## import the cfunits package and attempt to construct a units object. - ## if this is okay, save the units string - from cfunits import Units - for s, d in itertools.izip(src_units, dst_units): - if not Units(s).equivalent(Units(d)): - raise RequestValidationError('conform_units_to', - 'The units specified in "conform_units_to" ("{0}") are not equivalent to the source units "{1}".'.\ - format(d.format(names=True), s.format(names=True))) diff --git a/src/ocgis/api/subset.py b/src/ocgis/api/subset.py index c362d9c00..63282b2e7 100644 --- a/src/ocgis/api/subset.py +++ b/src/ocgis/api/subset.py @@ -1,17 +1,20 @@ +import logging +from copy import deepcopy, copy + +import numpy as np +from shapely.geometry import Point, MultiPoint + from ocgis.calc.engine import OcgCalculationEngine from ocgis import env, constants -from ocgis.exc import EmptyData, ExtentError, MaskedDataError, EmptySubsetError, VariableInCollectionError +from ocgis.exc import EmptyData, ExtentError, MaskedDataError, EmptySubsetError, VariableInCollectionError, \ + BoundsAlreadyAvailableError from ocgis.interface.base.field import Field from ocgis.util.logging_ocgis import ocgis_lh, ProgressOcgOperations -import logging from ocgis.api.collection import SpatialCollection from ocgis.interface.base.crs import CFWGS84, CFRotatedPole, Spherical, WGS84, WrappableCoordinateReferenceSystem from ocgis.calc.base import AbstractMultivariateFunction, AbstractKeyedOutputFunction from ocgis.util.helpers import get_default_or_apply -from copy import deepcopy, copy -import numpy as np from ocgis.calc.eval_function import MultivariateEvalFunction -from shapely.geometry import Point, MultiPoint from ocgis.interface.base.dimension.spatial import SpatialGeometryPolygonDimension @@ -106,8 +109,20 @@ def _iter_collections_(self): ## process the data collections for rds in itr_rd: - msg = 'Processing URI(s): {0}'.format([rd.uri for rd in rds]) - ocgis_lh(msg=msg,logger=self._subset_log) + + try: + msg = 'Processing URI(s): {0}'.format([rd.uri for rd in rds]) + except AttributeError: + # field objects do not have uris associated with them + msg = [] + for rd in rds: + try: + msg.append(rd.uri) + except AttributeError: + # likely a field object + msg.append(rd.name) + msg = 'Processing URI(s) / field names: {0}'.format(msg) + ocgis_lh(msg=msg, logger=self._subset_log) for coll in self._process_subsettables_(rds): ## if there are calculations, do those now and return a new type of collection @@ -181,12 +196,38 @@ def _process_subsettables_(self, rds): ocgis_lh('processing...',self._subset_log,alias=alias,level=logging.DEBUG) ## return the field object try: - ## look for field optimizations + # look for field optimizations if self.ops.optimizations is not None and 'fields' in self.ops.optimizations: field = [self.ops.optimizations['fields'][rd.alias] for rd in rds] + # no field optimizations, extract the target data from the dataset collection else: - field = [rd.get(format_time=self.ops.format_time, - interpolate_spatial_bounds=self.ops.interpolate_spatial_bounds) for rd in rds] + len_rds = len(rds) + field = [None]*len_rds + for ii in range(len_rds): + rds_element = rds[ii] + try: + field_object = rds_element.get(format_time=self.ops.format_time) + except AttributeError: + # likely a field object which does not need to be loaded from source + if not self.ops.format_time: + raise NotImplementedError + field_object = rds_element + + # extrapolate the spatial bounds if requested + if self.ops.interpolate_spatial_bounds: + try: + try: + field_object.spatial.grid.row.set_extrapolated_bounds() + field_object.spatial.grid.col.set_extrapolated_bounds() + except AttributeError: + # row/col is likely none. attempt to extrapolate using the grid values + field_object.spatial.grid.set_extrapolated_corners() + except BoundsAlreadyAvailableError: + msg = 'Bounds/corners already on object. Ignoring "interpolate_spatial_bounds".' + ocgis_lh(msg=msg, logger=self._subset_log, level=logging.WARNING) + + field[ii] = field_object + # update the spatial abstraction to match the operations value. sfield will be none if the operation returns # empty and it is allowed to have empty returns. for f in field: @@ -211,8 +252,8 @@ def _process_subsettables_(self, rds): # this error is related to subsetting by time or level. spatial subsetting occurs below. except EmptySubsetError as e: if self.ops.allow_empty: - ocgis_lh(msg='time or level subset empty but empty returns allowed', - logger=self._subset_log,level=logging.WARN) + ocgis_lh(msg='time or level subset empty but empty returns allowed', logger=self._subset_log, + level=logging.WARN) coll = SpatialCollection(headers=headers) coll.add_field(1, None, None, name='_'.join([rd.name for rd in rds])) try: @@ -220,7 +261,7 @@ def _process_subsettables_(self, rds): finally: return else: - ocgis_lh(exc=ExtentError(message=str(e)),alias=rd.alias,logger=self._subset_log) + ocgis_lh(exc=ExtentError(message=str(e)), alias=str([rd.name for rd in rds]), logger=self._subset_log) ## set iterator based on presence of slice. slice always overrides geometry. if self.ops.slice is not None: @@ -450,7 +491,7 @@ def _get_regridded_field_with_subset_(self, sfield, subset_sdim_for_regridding=N :param bool with_buffer: If ``True``, buffer the geometry used to subset the destination grid. """ - # todo: cache spatial operations on regrid destination field + #todo: cache spatial operations on regrid destination field from ocgis.regrid.base import iter_regridded_fields from ocgis.util.spatial.spatial_subset import SpatialSubsetOperation diff --git a/src/ocgis/calc/base.py b/src/ocgis/calc/base.py index ae8918f9a..271ad2c01 100644 --- a/src/ocgis/calc/base.py +++ b/src/ocgis/calc/base.py @@ -1,3 +1,4 @@ +from collections import OrderedDict from copy import deepcopy import numpy as np import abc @@ -238,6 +239,30 @@ def validate_units(self, *args, **kwargs): def _add_to_collection_(self, units=None, value=None, parent_variables=None, alias=None, dtype=None, fill_value=None): + """ + :param str units: The units for the derived variable. + + >>> units = 'kelvin' + + :param value: The value for the derived variable. + :type value: :class:`numpy.ma.core.MaskedArray` or dict + + >>> import numpy as np + >>> value = np.zeros((2, 3, 4, 5, 6)) + >>> value = np.ma.array(value) + + *or* + + >>> sample_size = value.copy() + >>> sample_size[:] = 5 + >>> value = {'fill': value, 'sample_size': sample_size} + + :param parent_variables: A variable collection containing variable data used to derive the current output. + :type parent_variables: :class:`ocgis.interface.base.variable.VariableCollection` + :param str alias: The alias of the derived variable. + :param type dtype: The type of the derived variable. + :param fill_value: The mask fill value of the derived variable. + """ # dtype should come in with each new variable assert (dtype is not None) @@ -256,14 +281,12 @@ def _add_to_collection_(self, units=None, value=None, parent_variables=None, ali alias = alias or self.alias fdef = self.get_function_definition() - meta = {'attrs': {'standard_name': self.standard_name, 'long_name': self.long_name}} - parents = VariableCollection(variables=parent_variables) - # attempt to copy the grid_mapping attribute for the derived variable - try: - meta['attrs']['grid_mapping'] = parents.first().meta['attrs']['grid_mapping'] - except KeyError: - pass + attrs = OrderedDict() + attrs['standard_name'] = self.standard_name + attrs['long_name'] = self.long_name + + parents = VariableCollection(variables=parent_variables) # if the operation is file only, creating a variable with an empty value will raise an exception. pass a dummy # data source because even if the value is trying to be loaded it should not be accessible! @@ -272,23 +295,27 @@ def _add_to_collection_(self, units=None, value=None, parent_variables=None, ali else: data = None - dv = DerivedVariable(name=self.key, alias=alias, units=units, value=fill, fdef=fdef, parents=parents, meta=meta, - data=data, dtype=dtype, fill_value=fill_value) + dv = DerivedVariable(name=self.key, alias=alias, units=units, value=fill, fdef=fdef, parents=parents, data=data, + dtype=dtype, fill_value=fill_value, attrs=attrs) # allow more complex manipulations of metadata self.set_variable_metadata(dv) # overload the metadata attributes with any provided if self.meta_attrs is not None: - dv.meta['attrs'].update(self.meta_attrs) + # dv.meta['attrs'].update(self.meta_attrs) + dv.attrs.update(self.meta_attrs) # add the variable to the variable collection self._set_derived_variable_alias_(dv, parent_variables) self.vc.add_variable(dv) # add the sample size if it is present in the fill dictionary if sample_size is not None: - meta = {'attrs': {'standard_name': 'sample_size', 'long_name': 'Statistical Sample Size'}} + # meta = {'attrs': {'standard_name': 'sample_size', 'long_name': 'Statistical Sample Size'}} + attrs = OrderedDict() + attrs['standard_name'] = constants.default_sample_size_standard_name + attrs['long_name'] = constants.default_sample_size_long_name dv = DerivedVariable(name=None, alias='n_' + dv.alias, units=None, value=sample_size, fdef=None, - parents=parents, meta=meta, dtype=constants.np_int, fill_value=fill_value) + parents=parents, dtype=constants.np_int, fill_value=fill_value, attrs=attrs) self.vc.add_variable(dv) @abc.abstractmethod diff --git a/src/ocgis/calc/engine.py b/src/ocgis/calc/engine.py index 4fd1569d5..6b5a21258 100644 --- a/src/ocgis/calc/engine.py +++ b/src/ocgis/calc/engine.py @@ -163,6 +163,6 @@ def execute(self,coll,file_only=False,tgds=None): new_temporal = new_temporal or out_field.temporal new_field = klass(variables=out_vc,temporal=new_temporal,spatial=out_field.spatial, level=out_field.level,realization=out_field.realization,meta=out_field.meta, - uid=out_field.uid,name=out_field.name) + uid=out_field.uid,name=out_field.name,attrs=out_field.attrs) coll[ugid][alias_field] = new_field return(coll) diff --git a/src/ocgis/calc/eval_function.py b/src/ocgis/calc/eval_function.py index 17c221aa5..88b59b41d 100644 --- a/src/ocgis/calc/eval_function.py +++ b/src/ocgis/calc/eval_function.py @@ -58,7 +58,8 @@ def _execute_(self): if self.file_only: fill = self._empty_fill ## evaluate the expression and update the data type. - ## TODO: with numpy 1.8.+ you can do the type modification inplace. this + + #todo: with numpy 1.8.+ you can do the type modification inplace. this ## will make the type conversion operation less memory intensive. else: fill = eval(expr) diff --git a/src/ocgis/constants.py b/src/ocgis/constants.py index d9c95d0cf..bd41930fa 100644 --- a/src/ocgis/constants.py +++ b/src/ocgis/constants.py @@ -6,6 +6,30 @@ #: Default netCDF4 output file type netCDF_default_data_model = 'NETCDF4' +#: Default temporal calendar. +default_temporal_calendar = 'standard' + +#: Default temporal units. +default_temporal_units = 'days since 0001-01-01 00:00:00' + +#: Default name for coordinate systems in netCDF file if none is provided. +default_coordinate_system_name = 'coordinate_system' + +#: Default sample size variable standard name. +default_sample_size_standard_name = 'sample_size' + +#: Default sample size variable long name. +default_sample_size_long_name = 'Statistical Sample Size' + +#: Default row coordinate name. +default_name_row_coordinates = 'yc' + +#: Default column coordinate name. +default_name_col_coordinates = 'xc' + +#: Default corners dimension name. +default_name_corners_dimension = 'ncorners' + #: Standard headers for subset operations. raw_headers = ['did','vid','ugid','tid','lid','gid','variable','alias','time','year','month','day','level','value'] #: Standard headers for computation. @@ -13,8 +37,6 @@ #: Standard headers for multivariate calculations. multi_headers = ['did','cid','ugid','tid','lid','gid','calc_key','calc_alias','time','year','month','day','level','value'] -level_headers = ['lid','level'] - #: Required headers for every request. required_headers = ['did','ugid','gid'] diff --git a/src/ocgis/contrib/library_icclim.py b/src/ocgis/contrib/library_icclim.py index c89b692d7..803913c92 100644 --- a/src/ocgis/contrib/library_icclim.py +++ b/src/ocgis/contrib/library_icclim.py @@ -1,3 +1,4 @@ +from ocgis.interface.base.attributes import Attributes from ocgis.calc.base import AbstractUnivariateSetFunction,\ AbstractMultivariateFunction from ocgis import constants @@ -48,22 +49,16 @@ } -class NcVariableSimulator(object): - - def __init__(self,meta): - self.meta = meta - - def setncattr(self,key,value): - self.meta['attrs'][key] = value - - -class NcDatasetSimulator(NcVariableSimulator): - - def __getattr__(self,name): - return(self.meta['dataset'][name]) - - def setncattr(self,key,value): - self.meta['dataset'][key] = value +class NcAttributesSimulator(object): + + def __init__(self, attrs): + self.attrs = attrs + + def __getattr__(self, name): + return self.attrs[name] + + def setncattr(self, key, value): + self.attrs[key] = value class AbstractIcclimFunction(object): @@ -76,10 +71,7 @@ class AbstractIcclimFunction(object): _allowed_temporal_groupings = [('month',),('month','year'),('year',)] def set_field_metadata(self): - sim = NcDatasetSimulator(self.field.meta) - - ## we are going to strip the metadata elements and store in a dictionary - ## JSON representation + # we are going to strip the metadata elements and store in a dictionary JSON representation def _get_value_(key,target): try: @@ -95,37 +87,33 @@ def _get_value_(key,target): except KeyError: pass return('',key) - - ## reorganize the output metadata pushing source global attributes to a - ## new attribute. the old attributes are serialized to a JSON string - original = deepcopy(sim.meta['dataset']) - sim.meta['dataset'] = OrderedDict() - sim.meta['dataset'][self._global_attribute_source_name] = original - ## copy attributes from the original dataset + + # reorganize the output metadata pushing source global attributes to a new attribute. the old attributes are + # serialized to a JSON string + original = deepcopy(self.field.attrs) + self.field.attrs = OrderedDict() + sim = NcAttributesSimulator(self.field.attrs) + sim.attrs[self._global_attribute_source_name] = original + # copy attributes from the original dataset for key in self._global_attributes_maintain: - value,value_key = _get_value_(key,sim.meta['dataset'][self._global_attribute_source_name]) - sim.meta['dataset'][value_key] = value - ref = sim.meta['dataset'][self._global_attribute_source_name] - sim.meta['dataset'][self._global_attribute_source_name] = self._get_json_string_(ref) + value,value_key = _get_value_(key,sim.attrs[self._global_attribute_source_name]) + sim.attrs[value_key] = value + ref = sim.attrs[self._global_attribute_source_name] + sim.attrs[self._global_attribute_source_name] = self._get_json_string_(ref) - ## update global attributes using ICCLIM functions + # update global attributes using ICCLIM functions indice_name = self.key.split('_')[1] - set_globattr.history(sim, - self.tgd.grouping, - indice_name, - [self.field.temporal.value_datetime.min(), - self.field.temporal.value_datetime.max()]) + set_globattr.history(sim,self.tgd.grouping,indice_name,[self.field.temporal.value_datetime.min(),self.field.temporal.value_datetime.max()]) set_globattr.title(sim,indice_name) set_globattr.references(sim) set_globattr.institution(sim,'Climate impact portal (http://climate4impact.eu)') set_globattr.comment(sim,indice_name) - - def set_variable_metadata(self,variable): - sim = NcVariableSimulator(variable.meta) + + def set_variable_metadata(self, variable): + sim = NcAttributesSimulator(variable.attrs) _icclim_function_map[self.key]['meta'](sim) - ## update the variable's units from the metadata as this is modified - ## inside ICCLIM - variable.units = variable.meta['attrs']['units'] + # update the variable's units from the metadata as this is modified inside ICCLIM + variable.units = variable.attrs['units'] @staticmethod def _get_json_string_(dct): diff --git a/src/ocgis/conv/base.py b/src/ocgis/conv/base.py index 5d0f0980a..5f4e4d018 100644 --- a/src/ocgis/conv/base.py +++ b/src/ocgis/conv/base.py @@ -1,40 +1,43 @@ -from ocgis.conv.meta import MetaConverter import os.path import abc import csv -from ocgis.util.helpers import get_ordered_dicts_from_records_array -from ocgis.util.inspect import Inspect -from ocgis.util.logging_ocgis import ocgis_lh import logging +from csv import DictWriter + from shapely.geometry.multipolygon import MultiPolygon from shapely.geometry.polygon import Polygon import fiona from shapely.geometry.geo import mapping -from csv import DictWriter + +from ocgis.interface.base.field import Field +from ocgis.conv.meta import MetaConverter +from ocgis.util.helpers import get_ordered_dicts_from_records_array +from ocgis.util.inspect import Inspect +from ocgis.util.logging_ocgis import ocgis_lh class AbstractConverter(object): - '''Base converter object. Intended for subclassing. - - :param colls: A sequence of `~ocgis.OcgCollection` objects. - :type colls: sequence of `~ocgis.OcgCollection` objects + """ + Base converter object. Intended for subclassing. + + :param colls: A sequence of :class:`~ocgis.SpatialCollection` objects. + :type colls: sequence of :class:`~ocgis.SpatialCollection` :param str outdir: Path to the output directory. :param str prefix: The string prepended to the output file or directory. - :param :class:~`ocgis.OcgOperations ops: Optional operations definition. This - is required for some converters. + :param :class:~`ocgis.OcgOperations ops: Optional operations definition. This is required for some converters. :param bool add_meta: If False, do not add a source and OCGIS metadata file. - :param bool add_auxiliary_files: If False, do not create an output folder. Write - only the target ouput file. + :param bool add_auxiliary_files: If False, do not create an output folder. Write only the target ouput file. :parm bool overwrite: If True, attempt to overwrite any existing output files. - ''' + """ + __metaclass__ = abc.ABCMeta _ext = None - _add_did_file = True ## add a descriptor file for the request datasets - _add_ugeom = False ## added user geometry in the output folder - _add_ugeom_nest = True ## nest the user geometry in a shp folder - _add_source_meta = True ## add a source metadata file - - def __init__(self,colls,outdir,prefix,ops=None,add_meta=True,add_auxiliary_files=True, + _add_did_file = True # add a descriptor file for the request datasets + _add_ugeom = False # added user geometry in the output folder + _add_ugeom_nest = True # nest the user geometry in a shp folder + _add_source_meta = True # add a source metadata file + + def __init__(self, colls, outdir=None, prefix=None, ops=None, add_meta=True, add_auxiliary_files=True, overwrite=False): self.colls = colls self.ops = ops @@ -44,17 +47,17 @@ def __init__(self,colls,outdir,prefix,ops=None,add_meta=True,add_auxiliary_files self.add_auxiliary_files = add_auxiliary_files self.overwrite = overwrite self._log = ocgis_lh.get_logger('conv') - + if self._ext is None: self.path = self.outdir else: - self.path = os.path.join(self.outdir,prefix+'.'+self._ext) + self.path = os.path.join(self.outdir, prefix + '.' + self._ext) if os.path.exists(self.path): if not self.overwrite: msg = 'Output path exists "{0}" and must be removed before proceeding. Set "overwrite" argument or env.OVERWRITE to True to overwrite.'.format(self.path) - ocgis_lh(logger=self._log,exc=IOError(msg)) - - ocgis_lh('converter initialized',level=logging.DEBUG,logger=self._log) + raise IOError(msg) + + ocgis_lh('converter initialized', level=logging.DEBUG, logger=self._log) def _build_(self,*args,**kwds): raise(NotImplementedError) @@ -216,41 +219,55 @@ def write(self): with open(out_path,'w') as f: f.write(lines) - ## add the dataset descriptor file if specified and OCGIS operations - ## are present. + # add the dataset descriptor file if requested if self._add_did_file: - ocgis_lh('writing dataset description (DID) file','conv',logging.DEBUG) + ocgis_lh('writing dataset description (DID) file', 'conv', logging.DEBUG) from ocgis.conv.csv_ import OcgDialect - - headers = ['DID','VARIABLE','ALIAS','URI','STANDARD_NAME','UNITS','LONG_NAME'] - out_path = os.path.join(self.outdir,self.prefix+'_did.csv') - with open(out_path,'w') as f: - writer = csv.writer(f,dialect=OcgDialect) + + headers = ['DID', 'VARIABLE', 'ALIAS', 'URI', 'STANDARD_NAME', 'UNITS', 'LONG_NAME'] + out_path = os.path.join(self.outdir, self.prefix + '_did.csv') + with open(out_path, 'w') as f: + writer = csv.writer(f, dialect=OcgDialect) writer.writerow(headers) for rd in self.ops.dataset.itervalues(): - for d in rd: - row = [rd.did,d['variable'],d['alias'],rd.uri] - ref_variable = rd.source_metadata['variables'][d['variable']]['attrs'] - row.append(ref_variable.get('standard_name',None)) - row.append(ref_variable.get('units',None)) - row.append(ref_variable.get('long_name',None)) - writer.writerow(row) - - ## add source metadata if requested + try: + for d in rd: + row = [rd.did, d['variable'], d['alias'], rd.uri] + ref_variable = rd.source_metadata['variables'][d['variable']]['attrs'] + row.append(ref_variable.get('standard_name', None)) + row.append(ref_variable.get('units', None)) + row.append(ref_variable.get('long_name', None)) + writer.writerow(row) + except NotImplementedError: + if isinstance(rd, Field): + for variable in rd.variables.itervalues(): + row = [rd.uid, variable.name, variable.alias, None, variable.attrs.get('standard_name'), variable.units, variable.attrs.get('long_name')] + writer.writerow(row) + else: + raise + + # add source metadata if requested if self._add_source_meta: - ocgis_lh('writing source metadata file','conv',logging.DEBUG) - out_path = os.path.join(self.outdir,self.prefix+'_source_metadata.txt') + ocgis_lh('writing source metadata file', 'conv', logging.DEBUG) + out_path = os.path.join(self.outdir, self.prefix + '_source_metadata.txt') to_write = [] + for rd in self.ops.dataset.itervalues(): - ip = Inspect(meta=rd.source_metadata, uri=rd.uri) - to_write += ip.get_report_no_variable() - with open(out_path,'w') as f: + try: + metadata = rd.source_metadata + except AttributeError: + # assume field object and do not write anything + continue + else: + ip = Inspect(meta=metadata, uri=rd.uri) + to_write += ip.get_report_no_variable() + with open(out_path, 'w') as f: f.writelines('\n'.join(to_write)) - + ## return the internal path unless overloaded by subclasses. ret = self._get_return_() - return(ret) + return ret @classmethod def get_converter_map(cls): @@ -260,16 +277,17 @@ def get_converter_map(cls): # from ocgis.conv.shpidx import ShpIdxConverter # from ocgis.conv.keyed import KeyedConverter from ocgis.conv.nc import NcConverter - - mmap = {'shp':ShpConverter, - 'csv':CsvConverter, - 'csv+':CsvPlusConverter, - 'numpy':NumpyConverter, - 'geojson':GeoJsonConverter, -# 'shpidx':ShpIdxConverter, -# 'keyed':KeyedConverter, - 'nc':NcConverter} - return(mmap) + + mmap = {'shp': ShpConverter, + 'csv': CsvConverter, + 'csv+': CsvPlusConverter, + 'numpy': NumpyConverter, + 'geojson': GeoJsonConverter, + # 'shpidx':ShpIdxConverter, + # 'keyed':KeyedConverter, + 'nc': NcConverter, + 'meta': MetaConverter} + return mmap @classmethod def get_converter(cls,output_format): @@ -282,3 +300,13 @@ def get_converter(cls,output_format): AbstractConverter''' return(cls.get_converter_map()[output_format]) + + @classmethod + def validate_ops(cls, ops): + """ + Validate an operations object. + + :param ops: The input operations object to validate. + :type ops: :class:`ocgis.OcgOperations` + :raises: DefinitionValidationError + """ diff --git a/src/ocgis/conv/csv_.py b/src/ocgis/conv/csv_.py index cb1f7a079..2d5f5eb7e 100644 --- a/src/ocgis/conv/csv_.py +++ b/src/ocgis/conv/csv_.py @@ -53,7 +53,15 @@ def _build_(self,coll): if not self.ops.aggregate: fiona_path = os.path.join(self._get_or_create_shp_folder_(),self.prefix+'_gid.shp') archetype_field = coll._archetype_field - fiona_crs = archetype_field.spatial.crs.value + + try: + fiona_crs = archetype_field.spatial.crs.value + except AttributeError: + if archetype_field.spatial.crs is None: + raise ValueError('"crs" is None. A coordinate systems is required for writing to Fiona output.') + else: + raise + fiona_schema = {'geometry':archetype_field.spatial.abstraction_geometry._geom_type, 'properties':OrderedDict([['DID','int'],['UGID','int'],['GID','int']])} fiona_object = fiona.open(fiona_path,'w',driver='ESRI Shapefile',crs=fiona_crs,schema=fiona_schema) diff --git a/src/ocgis/conv/esmpy.py b/src/ocgis/conv/esmpy.py new file mode 100644 index 000000000..190f9f243 --- /dev/null +++ b/src/ocgis/conv/esmpy.py @@ -0,0 +1,53 @@ +import ESMF +from ocgis.conv.base import AbstractConverter +from ocgis.exc import DefinitionValidationError +from ocgis.regrid.base import get_esmf_grid_from_sdim + + +class ESMPyConverter(AbstractConverter): + #todo: doc + + def __init__(self, *args, **kwargs): + self.with_corners = kwargs.pop('with_corners', True) + self.value_mask = kwargs.pop('value_mask', None) + self.esmf_field_name = kwargs.pop('esmf_field_name', None) + super(ESMPyConverter, self).__init__(*args, **kwargs) + + def __iter__(self): + for coll in self.colls: + yield coll + + @classmethod + def validate_ops(cls, ops): + msg = None + if len(ops.dataset) > 1: + msg = 'Only one requested dataset may be written for "esmpy" output.' + target = 'dataset' + elif ops.spatial_operation == 'clip': + msg = 'Clip operations not allowed for "esmpy" output.' + target = 'spatial_operation' + elif ops.select_ugid is not None and not ops.agg_selection and len(ops.select_ugid) > 1: + msg = 'Only one selection geometry allowed for "esmpy" output.' + target = 'select_ugid' + elif ops.aggregate: + msg = 'No spatial aggregation for "esmpy" output.' + target = 'aggregate' + + if msg is not None: + raise DefinitionValidationError(target, msg) + + def write(self): + #todo: doc + + for coll in self.colls: + """:type coll: :class:`ocgis.api.collection.SpatialCollection`""" + for row in coll.get_iter_melted(): + field = row['field'] + variable = row['variable'] + egrid = get_esmf_grid_from_sdim(field.spatial, with_corners=self.with_corners, + value_mask=self.value_mask) + esmf_field_name = self.esmf_field_name or variable.alias + efield = ESMF.Field(egrid, esmf_field_name, ndbounds=field.shape[0:-2], mask_values=[0]) + efield[:] = variable.value + + return efield \ No newline at end of file diff --git a/src/ocgis/conv/fiona_.py b/src/ocgis/conv/fiona_.py index 1a193becf..9dc3eeb99 100644 --- a/src/ocgis/conv/fiona_.py +++ b/src/ocgis/conv/fiona_.py @@ -88,7 +88,16 @@ def _build_(self, coll): # pull the fiona schema properties together by mapping fiona types to the data types of the first row of the # output data file archetype_field = coll._archetype_field - fiona_crs = archetype_field.spatial.crs.value + + try: + crs = archetype_field.spatial.crs + fiona_crs = crs.value + except AttributeError: + if crs is None: + raise ValueError('"crs" is None. A coordinate system is required for writing to Fiona output.') + else: + raise + geom, arch_row = coll.get_iter_dict().next() fiona_properties = OrderedDict() for header in coll.headers: @@ -147,10 +156,7 @@ def _write_coll_(self, f, coll): fiona_object = f['fiona_object'] for geom, properties in coll.get_iter_dict(use_upper_keys=True, conversion_map=f['fiona_conversion']): to_write = {'geometry': mapping(geom), 'properties': properties} - try: - fiona_object.write(to_write) - except Exception as e: - import ipdb;ipdb.set_trace() + fiona_object.write(to_write) class ShpConverter(FionaConverter): diff --git a/src/ocgis/conv/meta.py b/src/ocgis/conv/meta.py index a7cc63356..96eb61d06 100644 --- a/src/ocgis/conv/meta.py +++ b/src/ocgis/conv/meta.py @@ -62,6 +62,16 @@ def get_rows(self): else: ret.append(line) return(ret) + + @classmethod + def validate_ops(cls, ops): + """ + Validate an operations object. + + :param ops: The input operations object to validate. + :type ops: :class:`ocgis.OcgOperations` + :raises: DefinitionValidationError + """ def write(self): return('\n'.join(self.get_rows())) diff --git a/src/ocgis/conv/nc.py b/src/ocgis/conv/nc.py index 774897e66..5fa46788c 100644 --- a/src/ocgis/conv/nc.py +++ b/src/ocgis/conv/nc.py @@ -3,207 +3,225 @@ from ocgis.conv.base import AbstractConverter import netCDF4 as nc from ocgis import constants -from ocgis.util.logging_ocgis import ocgis_lh -from ocgis.interface.base.crs import CFWGS84 -from ocgis.interface.nc.temporal import NcTemporalGroupDimension - + class NcConverter(AbstractConverter): _ext = 'nc' - - def _finalize_(self,ds): + + def _finalize_(self, ds): ds.close() - - def _build_(self,coll): - ds = nc.Dataset(self.path,'w',format=self._get_file_format_()) - return(ds) - + + def _build_(self, coll): + ds = nc.Dataset(self.path, 'w', format=self._get_file_format_()) + return ds + def _get_file_format_(self): file_format = set() - ## if no operations are present, use the default data model + # if no operations are present, use the default data model if self.ops is None: ret = constants.netCDF_default_data_model else: - for rd in self.ops.dataset.itervalues(): + for rd in self.ops.dataset.iter_request_datasets(): rr = rd.source_metadata['file_format'] - if isinstance(rr,basestring): + if isinstance(rr, basestring): tu = [rr] else: tu = rr file_format.update(tu) if len(file_format) > 1: - exc = ValueError('Multiple file formats found: {0}'.format(file_format)) - ocgis_lh(exc=exc,logger='conv.nc') + raise ValueError('Multiple file formats found: {0}'.format(file_format)) else: - ret = list(file_format)[0] - return(ret) + try: + ret = list(file_format)[0] + except IndexError: + # likely all field objects in the dataset. use the default netcdf data model + ret = constants.netCDF_default_data_model + return ret - def _write_coll_(self,ds,coll): - - ## get the target field from the collection - arch = coll._archetype_field - - ## reference the interfaces - grid = arch.spatial.grid - temporal = arch.temporal - level = arch.level - meta = arch.meta - - # loop through the dimension map, look for a bounds variable, and choose the bounds dimension if possible - bounds_name = None - for k, v in meta['dim_map'].iteritems(): - # it is possible the dimension itself is none - if v is not None and v['bounds'] is not None: - bounds_name = meta['variables'][v['bounds']]['dimensions'][1] - break - # if the name of the bounds dimension was not found, choose the default - bounds_name = bounds_name or constants.ocgis_bounds - - ## add dataset/global attributes - for key,value in meta['dataset'].iteritems(): - setattr(ds,key,value) + def _write_coll_(self, ds, coll): + """ + Write a spatial collection to an open netCDF4 dataset object. - ## make dimensions ##################################################### - - ## time dimensions - name_dim_temporal = meta['dim_map']['T']['dimension'] - name_bounds_temporal = meta['dim_map']['T']['bounds'] - name_variable_temporal = meta['dim_map']['T']['variable'] - - dim_temporal = ds.createDimension(name_dim_temporal) + :param ds: An open dataset object. + :type ds: :class:`netCDF4.Dataset` + :param coll: The collection containing data to write. + :type coll: :class:`~ocgis.SpatialCollection` + """ - ## spatial dimensions - dim_row = ds.createDimension(grid.row.meta['dimensions'][0],grid.row.shape[0]) - dim_col = ds.createDimension(grid.col.meta['dimensions'][0],grid.col.shape[0]) - if grid.row.bounds is None: - dim_bnds = None - else: - dim_bnds = ds.createDimension(bounds_name,2) - - ## set data + attributes ############################################### - - ## time variable - time_nc_value = arch.temporal.value + # get the target field from the collection + arch = coll._archetype_field + """:type arch: :class:`ocgis.Field`""" - ## if bounds are available for the time vector transform those as well - - ## flag to indicate climatology bounds are present and hence the normal - ## bounds attribute should be not be added. - has_climatology_bounds = False - - if isinstance(temporal,NcTemporalGroupDimension): - ## update flag to indicate climatology bounds are present on the - ## output dataset - has_climatology_bounds = True - if dim_bnds is None: - dim_bnds = ds.createDimension(bounds_name,2) - times_bounds = ds.createVariable('climatology_bounds',time_nc_value.dtype, - (dim_temporal._name,bounds_name)) - times_bounds[:] = temporal.bounds - ## place units and calendar on time dimensions - times_bounds.units = temporal.units - times_bounds.calendar = temporal.calendar - elif temporal.bounds is not None: - if dim_bnds is None: - dim_bnds = ds.createDimension(bounds_name,2) - time_bounds_nc_value = temporal.bounds - times_bounds = ds.createVariable(name_bounds_temporal,time_bounds_nc_value.dtype,(dim_temporal._name,bounds_name)) - times_bounds[:] = time_bounds_nc_value - for key,value in meta['variables'][name_bounds_temporal]['attrs'].iteritems(): - setattr(times_bounds,key,value) - ## place units and calendar on time dimensions - times_bounds.units = temporal.units - times_bounds.calendar = temporal.calendar - times = ds.createVariable(name_variable_temporal,time_nc_value.dtype,(dim_temporal._name,)) - times[:] = time_nc_value - - ## always place calendar and units on time dimension - times.units = temporal.units - times.calendar = temporal.calendar + # get from operations if this is file only. + try: + is_file_only = self.ops.file_only + except AttributeError: + # no operations object available + is_file_only = False - ## add time attributes - for key,value in meta['variables'][name_variable_temporal]['attrs'].iteritems(): - ## leave off the normal bounds attribute - if has_climatology_bounds and key == 'bounds': - if key == 'bounds': - continue - setattr(times,key,value) + arch.write_to_netcdf_dataset(ds, file_only=is_file_only) - ## add climatology bounds - if isinstance(temporal,NcTemporalGroupDimension): - setattr(times,'climatology','climatology_bounds') - - ## level variable - ## if there is no level on the variable no need to build one. - if level is None: - dim_level = None - ## if there is a level, create the dimension and set the variable. - else: - name_dim_level = meta['dim_map']['Z']['dimension'] - name_bounds_level = meta['dim_map']['Z']['bounds'] - name_variable_level = meta['dim_map']['Z']['variable'] - - dim_level = ds.createDimension(name_dim_level,len(arch.level.value)) - levels = ds.createVariable(name_variable_level,arch.level.value.dtype,(dim_level._name,)) - levels[:] = arch.level.value - for key,value in meta['variables'][name_variable_level]['attrs'].iteritems(): - setattr(levels,key,value) - if level.bounds is not None: - if dim_bnds is None: - dim_bnds = ds.createDimension(bounds_name,2) - levels_bounds = ds.createVariable(name_bounds_level,arch.level.value.dtype,(dim_level._name,bounds_name)) - levels_bounds[:] = arch.level.bounds - for key,value in meta['variables'][name_bounds_level]['attrs'].iteritems(): - setattr(levels,key,value) - if dim_level is not None: - value_dims = (dim_temporal._name,dim_level._name,dim_row._name,dim_col._name) - else: - value_dims = (dim_temporal._name,dim_row._name,dim_col._name) - - ## spatial variables ################################################### - - ## create and fill a spatial variable - def _make_spatial_variable_(ds,name,values,dimension_tuple,meta): - ret = ds.createVariable(name,values.dtype,[d._name for d in dimension_tuple]) - ret[:] = values - ## add variable attributes - try: - for key,value in meta['variables'][name]['attrs'].iteritems(): - setattr(ret,key,value) - except KeyError: - pass - return(ret) - ## set the spatial data - _make_spatial_variable_(ds,grid.row.meta['axis']['variable'],grid.row.value,(dim_row,),meta) - _make_spatial_variable_(ds,grid.col.meta['axis']['variable'],grid.col.value,(dim_col,),meta) - if grid.row.bounds is not None: - _make_spatial_variable_(ds,grid.row.meta['axis']['bounds'],grid.row.bounds,(dim_row,dim_bnds),meta) - _make_spatial_variable_(ds,grid.col.meta['axis']['bounds'],grid.col.bounds,(dim_col,dim_bnds),meta) - - ## set the variable(s) ################################################# - - ## loop through variables - for variable in arch.variables.itervalues(): - value = ds.createVariable(variable.alias, variable.dtype, value_dims, - fill_value=variable.fill_value) - ## if this is a file only operation, set the value, otherwise leave - ## it empty for now. - try: - is_file_only = self.ops.file_only - ## this will happen if there is no operations object. - except AttributeError: - is_file_only = False - if not is_file_only: - value[:] = variable.value.reshape(*value.shape) - value.setncatts(variable.meta['attrs']) - ## and the units, converting to string as passing a NoneType will raise - ## an exception. - value.units = '' if variable.units is None else variable.units - - ## add projection variable if applicable ############################### - - if not isinstance(arch.spatial.crs, CFWGS84): - arch.spatial.crs.write_to_rootgrp(ds, meta) + # ## reference the interfaces + # grid = arch.spatial.grid + # temporal = arch.temporal + # level = arch.level + # meta = arch.meta + # + # # loop through the dimension map, look for a bounds variable, and choose the bounds dimension if possible + # bounds_name = None + # for k, v in meta['dim_map'].iteritems(): + # # it is possible the dimension itself is none + # if v is not None and v['bounds'] is not None: + # bounds_name = meta['variables'][v['bounds']]['dimensions'][1] + # break + # # if the name of the bounds dimension was not found, choose the default + # bounds_name = bounds_name or constants.ocgis_bounds + # + # ## add dataset/global attributes + # for key,value in meta['dataset'].iteritems(): + # setattr(ds,key,value) + # + # ## make dimensions ##################################################### + # + # ## time dimensions + # name_dim_temporal = meta['dim_map']['T']['dimension'] + # name_bounds_temporal = meta['dim_map']['T']['bounds'] + # name_variable_temporal = meta['dim_map']['T']['variable'] + # + # dim_temporal = ds.createDimension(name_dim_temporal) + # + # ## spatial dimensions + # dim_row = ds.createDimension(grid.row.meta['dimensions'][0],grid.row.shape[0]) + # dim_col = ds.createDimension(grid.col.meta['dimensions'][0],grid.col.shape[0]) + # if grid.row.bounds is None: + # dim_bnds = None + # else: + # dim_bnds = ds.createDimension(bounds_name,2) + # + # ## set data + attributes ############################################### + # + # ## time variable + # time_nc_value = arch.temporal.value + # + # ## if bounds are available for the time vector transform those as well + # + # ## flag to indicate climatology bounds are present and hence the normal + # ## bounds attribute should be not be added. + # has_climatology_bounds = False + # + # if isinstance(temporal,TemporalGroupDimension): + # ## update flag to indicate climatology bounds are present on the + # ## output dataset + # has_climatology_bounds = True + # if dim_bnds is None: + # dim_bnds = ds.createDimension(bounds_name,2) + # times_bounds = ds.createVariable('climatology_bounds',time_nc_value.dtype, + # (dim_temporal._name,bounds_name)) + # times_bounds[:] = temporal.bounds + # ## place units and calendar on time dimensions + # times_bounds.units = temporal.units + # times_bounds.calendar = temporal.calendar + # elif temporal.bounds is not None: + # if dim_bnds is None: + # dim_bnds = ds.createDimension(bounds_name,2) + # time_bounds_nc_value = temporal.bounds + # times_bounds = ds.createVariable(name_bounds_temporal,time_bounds_nc_value.dtype,(dim_temporal._name,bounds_name)) + # times_bounds[:] = time_bounds_nc_value + # for key,value in meta['variables'][name_bounds_temporal]['attrs'].iteritems(): + # setattr(times_bounds,key,value) + # ## place units and calendar on time dimensions + # times_bounds.units = temporal.units + # times_bounds.calendar = temporal.calendar + # times = ds.createVariable(name_variable_temporal,time_nc_value.dtype,(dim_temporal._name,)) + # times[:] = time_nc_value + # + # ## always place calendar and units on time dimension + # times.units = temporal.units + # times.calendar = temporal.calendar + # + # ## add time attributes + # for key,value in meta['variables'][name_variable_temporal]['attrs'].iteritems(): + # ## leave off the normal bounds attribute + # if has_climatology_bounds and key == 'bounds': + # if key == 'bounds': + # continue + # setattr(times,key,value) + # + # ## add climatology bounds + # if isinstance(temporal,TemporalGroupDimension): + # setattr(times,'climatology','climatology_bounds') + # + # ## level variable + # ## if there is no level on the variable no need to build one. + # if level is None: + # dim_level = None + # ## if there is a level, create the dimension and set the variable. + # else: + # name_dim_level = meta['dim_map']['Z']['dimension'] + # name_bounds_level = meta['dim_map']['Z']['bounds'] + # name_variable_level = meta['dim_map']['Z']['variable'] + # + # dim_level = ds.createDimension(name_dim_level,len(arch.level.value)) + # levels = ds.createVariable(name_variable_level,arch.level.value.dtype,(dim_level._name,)) + # levels[:] = arch.level.value + # for key,value in meta['variables'][name_variable_level]['attrs'].iteritems(): + # setattr(levels,key,value) + # if level.bounds is not None: + # if dim_bnds is None: + # dim_bnds = ds.createDimension(bounds_name,2) + # levels_bounds = ds.createVariable(name_bounds_level,arch.level.value.dtype,(dim_level._name,bounds_name)) + # levels_bounds[:] = arch.level.bounds + # for key,value in meta['variables'][name_bounds_level]['attrs'].iteritems(): + # setattr(levels,key,value) + # if dim_level is not None: + # value_dims = (dim_temporal._name,dim_level._name,dim_row._name,dim_col._name) + # else: + # value_dims = (dim_temporal._name,dim_row._name,dim_col._name) + # + # ## spatial variables ################################################### + # + # ## create and fill a spatial variable + # def _make_spatial_variable_(ds,name,values,dimension_tuple,meta): + # ret = ds.createVariable(name,values.dtype,[d._name for d in dimension_tuple]) + # ret[:] = values + # ## add variable attributes + # try: + # for key,value in meta['variables'][name]['attrs'].iteritems(): + # setattr(ret,key,value) + # except KeyError: + # pass + # return(ret) + # ## set the spatial data + # _make_spatial_variable_(ds,grid.row.meta['axis']['variable'],grid.row.value,(dim_row,),meta) + # _make_spatial_variable_(ds,grid.col.meta['axis']['variable'],grid.col.value,(dim_col,),meta) + # if grid.row.bounds is not None: + # _make_spatial_variable_(ds,grid.row.meta['axis']['bounds'],grid.row.bounds,(dim_row,dim_bnds),meta) + # _make_spatial_variable_(ds,grid.col.meta['axis']['bounds'],grid.col.bounds,(dim_col,dim_bnds),meta) + # + # ## set the variable(s) ################################################# + # + # ## loop through variables + # for variable in arch.variables.itervalues(): + # value = ds.createVariable(variable.alias, variable.dtype, value_dims, + # fill_value=variable.fill_value) + # ## if this is a file only operation, set the value, otherwise leave + # ## it empty for now. + # try: + # is_file_only = self.ops.file_only + # ## this will happen if there is no operations object. + # except AttributeError: + # is_file_only = False + # if not is_file_only: + # value[:] = variable.value.reshape(*value.shape) + # value.setncatts(variable.meta['attrs']) + # ## and the units, converting to string as passing a NoneType will raise + # ## an exception. + # value.units = '' if variable.units is None else variable.units + # + # ## add projection variable if applicable ############################### + # + # if not isinstance(arch.spatial.crs, CFWGS84): + # arch.spatial.crs.write_to_rootgrp(ds, meta) ## append to the history attribute history_str = '\n{dt} UTC ocgis-{release}'.format(dt=datetime.datetime.utcnow(), release=ocgis.__release__) diff --git a/src/ocgis/exc.py b/src/ocgis/exc.py index a583209f1..cc96aa199 100644 --- a/src/ocgis/exc.py +++ b/src/ocgis/exc.py @@ -8,6 +8,30 @@ def __str__(self): return self.message +######################################################################################################################## + + +class BoundsAlreadyAvailableError(OcgException): + """Raised when an attempt is made to extrapolate bounds and they are already present.""" + + def __str__(self): + msg = 'Bounds/corners already available.' + return msg + + +class CannotFormatTimeError(OcgException): + """ + Raised when datetime objects from numeric are blocked by "format_time". + """ + + def __init__(self, property_name): + self.property_name = property_name + + def __str__(self): + msg = 'Attempted to retrieve datetime values from "{0}" with "format_time" as "False". Set "format_time" to "True".'.format(self.property_name) + return msg + + class MultipleElementsFound(OcgException): """ Raised when multiple elements are encountered in a :class:`ocgis.interface.base.dimension.spatial.SpatialDimension` @@ -98,7 +122,6 @@ def __str__(self): return msg - class DefinitionValidationError(OcgException): """Raised when validation fails on :class:`~ocgis.OcgOperations`. diff --git a/src/ocgis/interface/base/attributes.py b/src/ocgis/interface/base/attributes.py new file mode 100644 index 000000000..84ba465f5 --- /dev/null +++ b/src/ocgis/interface/base/attributes.py @@ -0,0 +1,32 @@ +from collections import OrderedDict + + +class Attributes(object): + """ + Adds an ``attrs`` attribute and writes to an open netCDF object. + + :param dict attrs: A dictionary of arbitrary attributes to write to a netCDF object. + """ + + def __init__(self, attrs=None): + self.attrs = attrs + + @property + def attrs(self): + return self._attrs + + @attrs.setter + def attrs(self, value): + if value is None: + self._attrs = OrderedDict() + else: + self._attrs = OrderedDict(value) + + def write_attributes_to_netcdf_object(self, target): + """ + :param target: A netCDF data object to write attributes to. + :type target: :class:`netCDF4.Variable` or :class:`netCDF4.Dataset` + """ + + for k, v in self.attrs.iteritems(): + setattr(target, k, v) diff --git a/src/ocgis/interface/base/crs.py b/src/ocgis/interface/base/crs.py index ea97a0e22..679559e5b 100644 --- a/src/ocgis/interface/base/crs.py +++ b/src/ocgis/interface/base/crs.py @@ -1,26 +1,31 @@ from copy import copy, deepcopy import tempfile import itertools +import abc +import logging + from osgeo.osr import SpatialReference + from fiona.crs import from_string, to_string import numpy as np from shapely.geometry import Point, Polygon from shapely.geometry.base import BaseMultipartGeometry +from shapely.geometry.multipolygon import MultiPolygon +from shapely.geometry.multipoint import MultiPoint + from ocgis import constants from ocgis.util.logging_ocgis import ocgis_lh -from ocgis.exc import SpatialWrappingError, ProjectionCoordinateNotFound,\ - ProjectionDoesNotMatch +from ocgis.exc import SpatialWrappingError, ProjectionCoordinateNotFound, ProjectionDoesNotMatch + from ocgis.util.spatial.wrap import Wrapper from ocgis.util.helpers import iter_array -from shapely.geometry.multipolygon import MultiPolygon -import abc -import logging -from shapely.geometry.multipoint import MultiPoint class CoordinateReferenceSystem(object): - def __init__(self, value=None, proj4=None, epsg=None): + def __init__(self, value=None, proj4=None, epsg=None, name=None): + self.name = name or constants.default_coordinate_system_name + if value is None: if proj4 is not None: value = from_string(proj4) @@ -83,6 +88,20 @@ def sr(self): sr.ImportFromProj4(to_string(self.value)) return sr + def write_to_rootgrp(self, rootgrp): + """ + Write the coordinate system to an open netCDF file. + + :param rootgrp: An open netCDF dataset object for writing. + :type rootgrp: :class:`netCDF4.Dataset` + :returns: The netCDF variable object created to hold the coordinate system metadata. + :rtype: :class:`netCDF4.Variable` + """ + + variable = rootgrp.createVariable(self.name, 'c') + variable.proj4 = self.proj4 + return variable + class WrappableCoordinateReferenceSystem(object): """Meant to be used in mixin classes for coordinate systems that can be wrapped.""" @@ -405,7 +424,7 @@ class Spherical(CoordinateReferenceSystem, WrappableCoordinateReferenceSystem): def __init__(self, semi_major_axis=6370997.0): value = {'proj': 'longlat', 'towgs84': '0,0,0,0,0,0,0', 'no_defs': '', 'a': semi_major_axis, 'b': semi_major_axis} - CoordinateReferenceSystem.__init__(self, value=value) + CoordinateReferenceSystem.__init__(self, value=value, name='latitude_longitude') self.major_axis = semi_major_axis @@ -415,7 +434,7 @@ class WGS84(CoordinateReferenceSystem, WrappableCoordinateReferenceSystem): """ def __init__(self): - CoordinateReferenceSystem.__init__(self, epsg=4326) + CoordinateReferenceSystem.__init__(self, epsg=4326, name='latitude_longitude') class CFCoordinateReferenceSystem(CoordinateReferenceSystem): @@ -424,29 +443,30 @@ class CFCoordinateReferenceSystem(CoordinateReferenceSystem): ## if False, no attempt to read projection coordinates will be made. they ## will be set to a None default. _find_projection_coordinates = True - - def __init__(self,**kwds): - self.projection_x_coordinate = kwds.pop('projection_x_coordinate',None) - self.projection_y_coordinate = kwds.pop('projection_y_coordinate',None) - + + def __init__(self, **kwds): + self.projection_x_coordinate = kwds.pop('projection_x_coordinate', None) + self.projection_y_coordinate = kwds.pop('projection_y_coordinate', None) + + name = kwds.pop('name', None) + check_keys = kwds.keys() for key in kwds.keys(): check_keys.remove(key) if len(check_keys) > 0: - exc = ValueError('The keyword parameter(s) "{0}" was/were not provided.'.format(check_keys)) - ocgis_lh(exc=exc,logger='crs') - + raise ValueError('The keyword parameter(s) "{0}" was/were not provided.') + self.map_parameters_values = kwds - crs = {'proj':self.proj_name} + crs = {'proj': self.proj_name} for k in self.map_parameters.keys(): if k in self.iterable_parameters: - v = getattr(self,self.iterable_parameters[k])(kwds[k]) + v = getattr(self, self.iterable_parameters[k])(kwds[k]) crs.update(v) else: - crs.update({self.map_parameters[k]:kwds[k]}) - - super(CFCoordinateReferenceSystem,self).__init__(value=crs) - + crs.update({self.map_parameters[k]: kwds[k]}) + + super(CFCoordinateReferenceSystem, self).__init__(value=crs, name=name) + @abc.abstractproperty def grid_mapping_name(self): str @@ -510,21 +530,27 @@ def _get_projection_coordinate_(target,meta): kwds.pop('grid_mapping_name',None) kwds['projection_x_coordinate'] = pc_x kwds['projection_y_coordinate'] = pc_y + + # add the correct name to the coordinate system + kwds['name'] = r_grid_mapping['name'] cls._load_from_metadata_finalize_(kwds,var,meta) - - return(cls(**kwds)) - - def write_to_rootgrp(self,rootgrp,meta): - name = meta['grid_mapping_variable_name'] - crs = rootgrp.createVariable(name,meta['variables'][name]['dtype']) - attrs = meta['variables'][name]['attrs'] - crs.setncatts(attrs) + + return cls(**kwds) @classmethod def _load_from_metadata_finalize_(cls,kwds,var,meta): pass + def write_to_rootgrp(self, rootgrp): + variable = super(CFCoordinateReferenceSystem, self).write_to_rootgrp(rootgrp) + variable.grid_mapping_name = self.grid_mapping_name + for k, v in self.map_parameters_values.iteritems(): + if v is None: + v = '' + setattr(variable, k, v) + return variable + class CFWGS84(WGS84,CFCoordinateReferenceSystem,): grid_mapping_name = 'latitude_longitude' @@ -532,8 +558,9 @@ class CFWGS84(WGS84,CFCoordinateReferenceSystem,): map_parameters = None proj_name = None - def __init__(self): - WGS84.__init__(self) + def __init__(self, *args, **kwargs): + self.map_parameters_values = {} + WGS84.__init__(self, *args, **kwargs) @classmethod def load_from_metadata(cls,var,meta): @@ -572,7 +599,7 @@ class CFLambertConformal(CFCoordinateReferenceSystem): @classmethod def _load_from_metadata_finalize_(cls,kwds,var,meta): kwds['units'] = meta['variables'][kwds['projection_x_coordinate']]['attrs'].get('units') - + class CFPolarStereographic(CFCoordinateReferenceSystem): grid_mapping_name = 'polar_stereographic' @@ -639,9 +666,11 @@ def get_rotated_pole_transformation(self, spatial, inverse=False): try: rc_original = {'row': {'name': spatial.grid.row.name, - 'meta': spatial.grid.row.meta}, + 'meta': spatial.grid.row.meta, + 'attrs': spatial.grid.row.attrs}, 'col': {'name': spatial.grid.col.name, - 'meta': spatial.grid.col.meta}} + 'meta': spatial.grid.col.meta, + 'attrs': spatial.grid.col.attrs}} # a previously transformed rotated pole spatial dimension will not have row and columns. these should be # available in the state dictionary except AttributeError: @@ -666,6 +695,16 @@ def get_rotated_pole_transformation(self, spatial, inverse=False): return new_spatial + def write_to_rootgrp(self, rootgrp): + """ + .. note:: See :meth:`~ocgis.interface.base.crs.CoordinateReferenceSystem.write_to_rootgrp`. + """ + + variable = super(CFRotatedPole, self).write_to_rootgrp(rootgrp) + variable.proj4 = '' + variable.proj4_transform = self._trans_proj + return variable + def _get_rotated_pole_transformation_for_grid_(self, grid, inverse=False, rc_original=None): """ http://osgeo-org.1560.x6.nabble.com/Rotated-pole-coordinate-system-a-howto-td3885700.html @@ -751,13 +790,16 @@ def _itr_writer_(row, col): new_row = new_row[:, 0] new_col = new_col[0, :] new_grid.row = VectorDimension(value=new_row, name=dict_row['name'], - meta=dict_row['meta']) + meta=dict_row['meta'], + attrs=dict_row['attrs']) new_grid.col = VectorDimension(value=new_col, name=dict_col['name'], - meta=dict_col['meta']) + meta=dict_col['meta'], + attrs=dict_col['attrs']) new_col, new_row = np.meshgrid(new_col, new_row) else: - new_grid._row_src_idx = new_grid.row._src_idx - new_grid._col_src_idx = new_grid.col._src_idx + from ocgis.interface.nc.spatial import NcSpatialGridDimension + assert isinstance(new_grid, NcSpatialGridDimension) + new_grid._src_idx = {'row': new_grid.row._src_idx, 'col': new_grid.col._src_idx} new_grid.row = None new_grid.col = None @@ -774,10 +816,12 @@ def _get_meta_name_(rc_original, key): try: meta = rc_original[key]['meta'] name = rc_original[key]['name'] + attrs = rc_original[key]['attrs'] except TypeError: if rc_original is None: meta = None name = None + attrs = None else: raise - return {'meta': meta, 'name': name} + return {'meta': meta, 'name': name, 'attrs': attrs} diff --git a/src/ocgis/interface/base/dimension/base.py b/src/ocgis/interface/base/dimension/base.py index 7cac238cd..d4817a0fb 100644 --- a/src/ocgis/interface/base/dimension/base.py +++ b/src/ocgis/interface/base/dimension/base.py @@ -1,115 +1,138 @@ import abc +from copy import copy, deepcopy +from operator import mul + import numpy as np + from ocgis import constants +from ocgis.interface.base.attributes import Attributes from ocgis.util.helpers import get_none_or_1d, get_none_or_2d, get_none_or_slice,\ - get_formatted_slice, assert_raise, get_bounds_from_1d -from copy import copy, deepcopy -from ocgis.exc import EmptySubsetError, ResolutionError -from operator import mul + get_formatted_slice, get_bounds_from_1d +from ocgis.exc import EmptySubsetError, ResolutionError, BoundsAlreadyAvailableError from ocgis.interface.base.variable import AbstractValueVariable,\ AbstractSourcedVariable class AbstractDimension(object): - __metaclass__ = abc.ABCMeta - ''' + """ :param dict meta: :param str name: :param array-like properties: - ''' - - @abc.abstractproperty - def _axis(self): ['R','T','Z','X','Y','GEOM','GRID','POINT','POLYGON',None] + """ + __metaclass__ = abc.ABCMeta + @abc.abstractproperty - def _ndims(self): int + def _ndims(self): + """int""" + @abc.abstractproperty - def _attrs_slice(self): 'sequence of strings' - - def __init__(self,meta=None,name=None,properties=None): + def _attrs_slice(self): + """sequence of strings""" + + def __init__(self, meta=None, name=None, properties=None): self.meta = meta or {} - self.name = name or self._axis + self.name = name self.properties = properties - + if self.properties is not None: - assert(isinstance(self.properties,np.ndarray)) - - def __getitem__(self,slc): - slc = get_formatted_slice(slc,self._ndims) + assert isinstance(self.properties, np.ndarray) + + def __getitem__(self, slc): + slc = get_formatted_slice(slc, self._ndims) ret = copy(self) for attr in self._attrs_slice: - ref_set = get_none_or_slice(getattr(ret,attr),slc) - setattr(ret,attr,ref_set) + ref_set = get_none_or_slice(getattr(ret, attr), slc) + setattr(ret, attr, ref_set) ret.properties = self._get_sliced_properties_(slc) - ret = self._format_slice_state_(ret,slc) - return(ret) - + ret = self._format_slice_state_(ret, slc) + return ret + def get_iter(self): - raise(NotImplementedError) - - def _format_slice_state_(self,state,slc): - return(state) - - def _get_none_or_array_(self,arr,masked=False): + raise NotImplementedError + + def _format_slice_state_(self, state, slc): + return state + + def _get_none_or_array_(self, arr, masked=False): if self._ndims == 1: ret = get_none_or_1d(arr) elif self._ndims == 2: ret = get_none_or_2d(arr) else: - raise(NotImplementedError) - if ret is not None and masked and not isinstance(ret,np.ma.MaskedArray): - ret = np.ma.array(ret,mask=False) - return(ret) - - def _get_sliced_properties_(self,slc): + raise (NotImplementedError) + if ret is not None and masked and not isinstance(ret, np.ma.MaskedArray): + ret = np.ma.array(ret, mask=False) + return ret + + def _get_sliced_properties_(self, slc): if self.properties is not None: - raise(NotImplementedError) + raise NotImplementedError else: - return(None) - - + return None + + class AbstractValueDimension(AbstractValueVariable): - ''' - + - :param str name_value: - ''' + """ + :keyword str name_value: (``=None``) The name of the value for the dimension. + """ __metaclass__ = abc.ABCMeta - - def __init__(self,*args,**kwds): - self.name_value = kwds.pop('name_value',None) - - AbstractValueVariable.__init__(self,*args,**kwds) + _name_value = None + + def __init__(self, *args, **kwargs): + self.name_value = kwargs.pop('name_value', None) + AbstractValueVariable.__init__(self, *args, **kwargs) + + @property + def name_value(self): + if self._name_value is None: + ret = self.name + else: + ret = self._name_value + return ret + + @name_value.setter + def name_value(self, value): + self._name_value = value + - if self.name_value is None: - self.name_value = self.name - - class AbstractUidDimension(AbstractDimension): - - def __init__(self,*args,**kwds): - self.uid = kwds.pop('uid',None) - self.name_uid = kwds.pop('name_uid',None) - - super(AbstractUidDimension,self).__init__(*args,**kwds) - - if self.name_uid is None: - self.name_uid = '{0}_uid'.format(self.name) - + + def __init__(self, *args, **kwargs): + self.uid = kwargs.pop('uid', None) + self.name_uid = kwargs.pop('name_uid', None) + + super(AbstractUidDimension, self).__init__(*args, **kwargs) + + @property + def name_uid(self): + if self._name_uid is None: + ret = '{0}_uid'.format(self.name) + else: + ret = self._name_uid + return ret + + @name_uid.setter + def name_uid(self, value): + self._name_uid = value + @property def uid(self): if self._uid is None: self._uid = self._get_uid_() - return(self._uid) + return self._uid + @uid.setter - def uid(self,value): - self._uid = self._get_none_or_array_(value,masked=True) + def uid(self, value): + self._uid = self._get_none_or_array_(value, masked=True) + def _get_uid_(self): if self.value is None: ret = None else: - n = reduce(mul,self.value.shape) - ret = np.arange(1,n+1,dtype=constants.np_int).reshape(self.value.shape) - ret = np.ma.array(ret,mask=False) - return(ret) + n = reduce(mul, self.value.shape) + ret = np.arange(1, n + 1, dtype=constants.np_int).reshape(self.value.shape) + ret = np.ma.array(ret, mask=False) + return ret class AbstractUidValueDimension(AbstractValueDimension,AbstractUidDimension): @@ -132,29 +155,29 @@ def __init__(self,*args,**kwds): AbstractUidDimension.__init__(self,*args,**kwds_uid) -class VectorDimension(AbstractSourcedVariable,AbstractUidValueDimension): - _axis = None +class VectorDimension(AbstractSourcedVariable, AbstractUidValueDimension, Attributes): _attrs_slice = ('uid', '_value', '_src_idx') _ndims = 1 def __init__(self, *args, **kwargs): + if kwargs.get('value') is None and kwargs.get('data') is None: + msg = 'Without a "data" object, "value" is required.' + raise ValueError(msg) + bounds = kwargs.pop('bounds', None) - self.name_bounds = kwargs.pop('name_bounds', None) - self._axis = kwargs.pop('axis', None) - # if True, an attempt will be made to interpolate bounds if None are provided. - self._interpolate_bounds = kwargs.pop('interpolate_bounds', False) - # if True, bounds were interpolated. if False, they were loaded from source data + # used for creating name_bounds as well as the name of the bounds dimension in netCDF + self.name_bounds_suffix = kwargs.pop('name_bounds_suffix', None) or constants.ocgis_bounds + self._name_bounds = kwargs.pop('name_bounds', None) + self.axis = kwargs.pop('axis', None) + # if True, bounds were interpolated. if False, they were loaded from source data. used in conforming units. self._has_interpolated_bounds = False - - AbstractSourcedVariable.__init__(self, kwargs.pop('data', None), src_idx=kwargs.pop('src_idx', None), - value=kwargs.get('value'), dtype=kwargs.get('dtype')) + + AbstractSourcedVariable.__init__(self, kwargs.pop('data', None), kwargs.pop('src_idx', None)) + Attributes.__init__(self, attrs=kwargs.pop('attrs', None)) AbstractUidValueDimension.__init__(self, *args, **kwargs) - + # setting bounds requires checking the data type of value set in a superclass. self.bounds = bounds - - if self._axis is None: - self._axis = 'undefined' def __len__(self): return self.shape[0] @@ -165,11 +188,6 @@ def bounds(self): # are meaningless! self.value - # if the bounds are None, check if an attempt should be made to interpolate bounds from the value itself. - if self._interpolate_bounds and self._bounds is None: - self.set_extrapolated_bounds() - self._has_interpolated_bounds = True - # if no error is encountered, then the bounds should have been set during loading from source. simply return the # value. it will be none, if no bounds were present in the source data. return self._bounds @@ -189,14 +207,17 @@ def extent(self): else: target = self.bounds return target.min(), target.max() - + @property def name_bounds(self): if self._name_bounds is None: - self._name_bounds = '{0}_bnds'.format(self.name_value) - return(self._name_bounds) + ret = '{0}_{1}'.format(self.name, self.name_bounds_suffix) + else: + ret = self._name_bounds + return ret + @name_bounds.setter - def name_bounds(self,value): + def name_bounds(self, value): self._name_bounds = value @property @@ -303,37 +324,86 @@ def get_between(self,lower,upper,return_indices=False,closed=False,use_bounds=Tr ret = (ret,indices[select]) return(ret) - - def get_iter(self): - ref_value,ref_bounds = self._get_iter_value_bounds_() - + + def get_iter(self): + ref_value, ref_bounds = self._get_iter_value_bounds_() + if ref_bounds is None: has_bounds = False else: has_bounds = True - + ref_uid = self.uid ref_name_value = self.name_value - assert_raise(self.name_value != None,logger='interface.dimension.base', - exc=ValueError('The "name_value" attribute is required for iteration.')) + + if self.name_value is None: + msg = 'The "name_value" attribute is required for iteration.' + raise ValueError(msg) + ref_name_uid = self.name_uid ref_name_bounds_lower = '{0}_lower'.format(self.name_bounds) ref_name_bounds_upper = '{0}_upper'.format(self.name_bounds) - + for ii in range(self.value.shape[0]): - yld = {ref_name_value:ref_value[ii],ref_name_uid:ref_uid[ii]} + yld = {ref_name_value: ref_value[ii], ref_name_uid: ref_uid[ii]} if has_bounds: - yld.update({ref_name_bounds_lower:ref_bounds[ii,0], - ref_name_bounds_upper:ref_bounds[ii,1]}) + yld.update({ref_name_bounds_lower: ref_bounds[ii, 0], + ref_name_bounds_upper: ref_bounds[ii, 1]}) else: - yld.update({ref_name_bounds_lower:None, - ref_name_bounds_upper:None}) - yield(ii,yld) + yld.update({ref_name_bounds_lower: None, + ref_name_bounds_upper: None}) + yield ii, yld def set_extrapolated_bounds(self): """Set the bounds variable using extrapolation.""" + if self.bounds is not None: + raise BoundsAlreadyAvailableError self.bounds = get_bounds_from_1d(self.value) + self._has_interpolated_bounds = True + + def write_to_netcdf_dataset(self, dataset, unlimited=False, bounds_dimension_name=None, **kwargs): + """ + Write the dimension and its associated value and bounds to an open netCDF dataset object. + + :param dataset: An open dataset object. + :type dataset: :class:`netCDF4.Dataset` + :param bool unlimited: If ``True``, create the dimension on the netCDF object with ``size=None``. See + http://unidata.github.io/netcdf4-python/netCDF4.Dataset-class.html#createDimension. + :param str bounds_dimension_name: If ``None``, default to :attrs:`ocgis.constants.ocgis_bounds`. + :param kwargs: Extra keyword arguments in addition to ``dimensions`` to pass to ``createVariable``. See + http://unidata.github.io/netcdf4-python/netCDF4.Dataset-class.html#createVariable + """ + + if self.name is None: + raise ValueError('Writing to netCDF requires a "name" be set to a string value. It is currently None.') + + bounds_dimension_name = bounds_dimension_name or self.name_bounds_suffix + + if unlimited: + size = None + else: + size = self.shape[0] + dataset.createDimension(self.name, size=size) + kwargs['dimensions'] = (self.name,) + variable = dataset.createVariable(self.name_value, self.value.dtype, **kwargs) + variable[:] = self.value + variable.axis = self.axis if self.axis is not None else '' + + if self.bounds is not None: + try: + dataset.createDimension(bounds_dimension_name, size=2) + except RuntimeError: + # bounds dimension likely created previously. check for it, then move on + if bounds_dimension_name not in dataset.dimensions: + raise + kwargs['dimensions'] = (self.name, bounds_dimension_name) + bounds_variable = dataset.createVariable(self.name_bounds, self.bounds.dtype, **kwargs) + bounds_variable[:] = self.bounds + variable.setncattr('bounds', self.name_bounds) + + # data mode issues require that this be last...? + self.write_attributes_to_netcdf_object(variable) def _format_private_value_(self,value): return(self._get_none_or_array_(value,masked=False)) diff --git a/src/ocgis/interface/base/dimension/spatial.py b/src/ocgis/interface/base/dimension/spatial.py index df96d2d09..cf9e88bb3 100644 --- a/src/ocgis/interface/base/dimension/spatial.py +++ b/src/ocgis/interface/base/dimension/spatial.py @@ -1,25 +1,27 @@ from collections import deque -import base +import itertools +from copy import copy + import numpy as np -from ocgis.interface.base.crs import CFWGS84, CoordinateReferenceSystem, WGS84, CFRotatedPole -from ocgis.util.logging_ocgis import ocgis_lh -from ocgis.util.helpers import iter_array, get_none_or_slice, \ - get_formatted_slice, get_reduced_slice, get_trimmed_array_by_mask,\ - get_added_slice, make_poly from shapely.geometry.point import Point -from ocgis import constants, env -import itertools from shapely.geometry.polygon import Polygon -from copy import copy from shapely.prepared import prep from shapely.geometry.multipoint import MultiPoint from shapely.geometry.multipolygon import MultiPolygon -from ocgis.exc import EmptySubsetError, SpatialWrappingError, MultipleElementsFound from osgeo.ogr import CreateGeometryFromWkb, Geometry, wkbGeometryCollection, wkbPoint from shapely import wkb import fiona from shapely.geometry.geo import mapping, shape +import base +from ocgis.interface.base.crs import CFWGS84, CoordinateReferenceSystem, WGS84 +from ocgis.util.logging_ocgis import ocgis_lh +from ocgis.util.helpers import iter_array, get_none_or_slice, \ + get_formatted_slice, get_reduced_slice, get_trimmed_array_by_mask,\ + get_added_slice, make_poly, set_name_attributes, get_extrapolated_corners_esmf, get_ocgis_corners_from_esmf_corners +from ocgis import constants, env +from ocgis.exc import EmptySubsetError, SpatialWrappingError, MultipleElementsFound, BoundsAlreadyAvailableError + class GeomMapping(object): """Used to simulate a dictionary key look up for data stored in 2-d ndarrays.""" @@ -63,8 +65,8 @@ def properties(self): @property def uid(self): return self.sdim.uid[0, 0] - - + + class SpatialDimension(base.AbstractUidDimension): """ :param grid: :class:`ocgis.interface.base.dimension.spatial.SpatialGridDimension` @@ -74,8 +76,7 @@ class SpatialDimension(base.AbstractUidDimension): """ _ndims = 2 - _axis = 'SPATIAL' - _attrs_slice = ('uid','grid','_geom') + _attrs_slice = ('uid', 'grid', '_geom') def __init__(self, *args, **kwargs): self.grid = kwargs.pop('grid', None) @@ -91,11 +92,15 @@ def __init__(self, *args, **kwargs): row = kwargs.pop('row', None) col = kwargs.pop('col', None) + # always provide a default name for iteration + kwargs['name'] = kwargs.get('name') or 'spatial' + kwargs['name_uid'] = kwargs.get('name_uid') or 'gid' + ## attempt to build the geometry dimension point = kwargs.pop('point', None) polygon = kwargs.pop('polygon', None) geom_kwds = dict(point=point, polygon=polygon) - if any([g != None for g in geom_kwds.values()]): + if any([g is not None for g in geom_kwds.values()]): self._geom = SpatialGeometryDimension(**geom_kwds) # attempt to construct some core dimensions if they are not passed at initialization @@ -129,7 +134,7 @@ def geom(self): else: self._geom = SpatialGeometryDimension(grid=self.grid, uid=self.grid.uid, abstraction=self._abstraction) return self._geom - + @property def grid(self): return self._grid @@ -164,7 +169,7 @@ def shape(self): @property def single(self): return SingleElementRetriever(self) - + @property def weights(self): if self.geom is None: @@ -191,6 +196,7 @@ def assert_uniform_mask(self): :raises: AssertionError """ + #todo: check mask on grid corners to_compare = [] if self._grid is not None: to_compare.append(self._grid.value[0].mask) @@ -300,7 +306,7 @@ def from_records(cls, records, crs=None): sdim = SpatialDimension(geom=dim_geom, uid=uid, properties=properties, crs=crs, abstraction=mapping_kwds[klass]) return sdim - + def get_clip(self, polygon, return_indices=False, use_spatial_index=True, select_nearest=False): assert(type(polygon) in (Polygon, MultiPolygon)) @@ -605,59 +611,61 @@ def _update_crs_with_geometry_collection_(self, to_sr, value_row, value_col): value_col[ii] = geom.GetX() value_row[ii] = geom.GetY() - + class SpatialGridDimension(base.AbstractUidValueDimension): - _axis = 'GRID' _ndims = 2 _attrs_slice = None + _name_row = None + def __init__(self, *args, **kwargs): self._corners = None self.row = kwargs.pop('row', None) self.col = kwargs.pop('col', None) + self.corners = kwargs.pop('corners', None) - self._row_src_idx = kwargs.pop('row_src_idx', None) - self._col_src_idx = kwargs.pop('col_src_idx', None) + + kwargs['name'] = kwargs.get('name') or 'grid' + + self.name_row = kwargs.pop('name_row', None) + self.name_col = kwargs.pop('name_col', None) super(SpatialGridDimension, self).__init__(*args, **kwargs) - if self._value is None: - if self.row is None or self.col is None: - msg = 'Without a value, a row and column dimension are required.' - raise ValueError(msg) - + self._validate_() + + # set names of row and column if available + name_mapping = {self.row: 'yc', self.col: 'xc'} + set_name_attributes(name_mapping) + def __getitem__(self,slc): slc = get_formatted_slice(slc,2) - + uid = self.uid[slc] - + if self._value is not None: value = self._value[:,slc[0],slc[1]] else: value = None - + if self.row is not None: row = self.row[slc[0]] col = self.col[slc[1]] else: row = None col = None - + ret = copy(self) if ret._corners is not None: ret._corners = ret._corners[:, slc[0], slc[1], :] - - if self._row_src_idx is not None: - ret._row_src_idx = self._row_src_idx[slc[0]] - ret._col_src_idx = self._col_src_idx[slc[1]] - + ret.uid = uid ret._value = value ret.row = row ret.col = col - + return(ret) @property @@ -719,7 +727,7 @@ def corners_esmf(self): ref[:, 1, 1] = _corners[:, ii, jj, 2] ref[:, 1, 0] = _corners[:, ii, jj, 3] return fill - + @property def extent(self): if self.row is None: @@ -750,7 +758,7 @@ def extent(self): def extent_polygon(self): minx, miny, maxx, maxy = self.extent return make_poly([miny, maxy], [minx, maxx]) - + @property def resolution(self): try: @@ -762,27 +770,27 @@ def resolution(self): cols = np.mean(np.diff(r_value[1,:,:],axis=1)) ret = np.mean([rows,cols]) return(ret) - + @property def shape(self): try: ret = (len(self.row), len(self.col)) # occurs if either of these are empty. get the shape from the grid value. except TypeError: - ret = (self.value.shape[1], self.value.shape[2]) + ret = (self.uid.shape[0], self.uid.shape[1]) return ret - + def get_subset_bbox(self,min_col,min_row,max_col,max_row,return_indices=False,closed=True, use_bounds=True): assert(min_row <= max_row) assert(min_col <= max_col) - + if self.row is None: r_row = self.value[0,:,:] real_idx_row = np.arange(0,r_row.shape[0]) r_col = self.value[1,:,:] real_idx_col = np.arange(0,r_col.shape[1]) - + if closed: lower_row = r_row > min_row upper_row = r_row < max_row @@ -793,13 +801,13 @@ def get_subset_bbox(self,min_col,min_row,max_col,max_row,return_indices=False,cl upper_row = r_row <= max_row lower_col = r_col >= min_col upper_col = r_col <= max_col - + idx_row = np.logical_and(lower_row,upper_row) idx_col = np.logical_and(lower_col,upper_col) - + keep_row = np.any(idx_row,axis=1) keep_col = np.any(idx_col,axis=0) - + ## slice reduction may fail due to empty bounding box returns. catch ## these value errors and repurpose as subset errors. try: @@ -816,17 +824,17 @@ def get_subset_bbox(self,min_col,min_row,max_col,max_row,return_indices=False,cl raise(EmptySubsetError(origin='X')) else: raise - + new_mask = np.invert(np.logical_or(idx_row,idx_col)[row_slc,col_slc]) - + else: new_row,row_indices = self.row.get_between(min_row,max_row,return_indices=True,closed=closed,use_bounds=use_bounds) new_col,col_indices = self.col.get_between(min_col,max_col,return_indices=True,closed=closed,use_bounds=use_bounds) row_slc = get_reduced_slice(row_indices) col_slc = get_reduced_slice(col_indices) - + ret = self[row_slc,col_slc] - + try: grid_mask = np.zeros((2,new_mask.shape[0],new_mask.shape[1]),dtype=bool) grid_mask[:,:,:] = new_mask @@ -842,53 +850,102 @@ def get_subset_bbox(self,min_col,min_row,max_col,max_row,return_indices=False,cl ret = (ret,(row_slc,col_slc)) return(ret) - - def _format_private_value_(self,value): + + def set_extrapolated_corners(self): + #todo: doc + if self.corners is not None: + raise BoundsAlreadyAvailableError + else: + data = self.value.data + corners_esmf = get_extrapolated_corners_esmf(data[0]) + corners_esmf.resize(*list([2]+list(corners_esmf.shape))) + corners_esmf[1, :, :] = get_extrapolated_corners_esmf(data[1]) + corners = get_ocgis_corners_from_esmf_corners(corners_esmf) + + # update the corners mask if there are masked values + if self.value.mask.any(): + idx_true = np.where(self.value.mask[0] == True) + corners.mask[:, idx_true[0], idx_true[1], :] = True + + self.corners = corners + + def write_to_netcdf_dataset(self, dataset, **kwargs): + """ + :param dataset: + :type dataset: :class:`netCDF4.Dataset` + """ + + try: + self.row.write_to_netcdf_dataset(dataset, **kwargs) + self.col.write_to_netcdf_dataset(dataset, **kwargs) + except AttributeError: + # likely no row and column. write the grid value. + name_yc = constants.default_name_row_coordinates + name_xc = constants.default_name_col_coordinates + dataset.createDimension(name_yc, size=self.shape[0]) + dataset.createDimension(name_xc, size=self.shape[1]) + value = self.value + dimensions = (name_yc, name_xc) + yc = dataset.createVariable(name_yc, value.dtype, dimensions=dimensions) + yc[:] = value[0, :, :] + yc.axis = 'Y' + xc = dataset.createVariable(name_xc, value.dtype, dimensions=dimensions) + xc[:] = value[1, :, :] + xc.axis = 'X' + + if self.corners is not None: + corners = self.corners + ncorners = constants.default_name_corners_dimension + dataset.createDimension(ncorners, size=4) + name_yc_corner = '{0}_corners'.format(name_yc) + name_xc_corner = '{0}_corners'.format(name_xc) + dimensions = (name_yc, name_xc, ncorners) + for idx, name in zip([0, 1], [name_yc_corner, name_xc_corner]): + var = dataset.createVariable(name, corners.dtype, dimensions=dimensions) + var[:] = corners[idx] + yc.corners = name_yc_corner + xc.corners = name_xc_corner + + def _validate_(self): + if self._value is None: + if self.row is None or self.col is None: + msg = 'Without a value, a row and column dimension are required.' + raise ValueError(msg) + + def _format_private_value_(self, value): if value is None: ret = None else: - assert(len(value.shape) == 3) - assert(value.shape[0] == 2) - assert(isinstance(value,np.ma.MaskedArray)) + assert len(value.shape) == 3 + assert value.shape[0] == 2 + assert isinstance(value, np.ma.MaskedArray) ret = value - return(ret) - - def _get_slice_(self,state,slc): + return ret + + def _get_uid_(self, shp=None): + if shp is None: + if self._value is None: + shp = len(self.row), len(self.col) + else: + shp = self._value.shape[1], self._value.shape[2] + ret = np.arange(1, (shp[0] * shp[1]) + 1, dtype=constants.np_int).reshape(shp) + ret = np.ma.array(ret, mask=False) + return ret - if self._value is None: - state._value = None - else: - state._value = state.value[:,slc[0],slc[1]] - if state.row is not None: - state.row = state.row[slc[0]] - state.col = state.col[slc[1]] - - return(state) - - def _get_uid_(self): - if self._value is None: - shp = len(self.row),len(self.col) - else: - shp = self._value.shape[1],self._value.shape[2] - ret = np.arange(1,(shp[0]*shp[1])+1,dtype=constants.np_int).reshape(shp) - ret = np.ma.array(ret,mask=False) - return(ret) - def _get_value_(self): - ## assert types of row and column are equivalent + # assert types of row and column are equivalent if self.row.value.dtype != self.col.value.dtype: self.col._value = self.col._value.astype(self.row.value.dtype) - ## fill the centroids - fill = np.empty((2,self.row.shape[0],self.col.shape[0]),dtype=self.row.value.dtype) - fill = np.ma.array(fill,mask=False) - col_coords,row_coords = np.meshgrid(self.col.value,self.row.value) - fill[0,:,:] = row_coords - fill[1,:,:] = col_coords - return(fill) - - + # fill the centroids + fill = np.empty((2, self.row.shape[0], self.col.shape[0]), dtype=self.row.value.dtype) + fill = np.ma.array(fill, mask=False) + col_coords, row_coords = np.meshgrid(self.col.value, self.row.value) + fill[0, :, :] = row_coords + fill[1, :, :] = col_coords + return fill + + class SpatialGeometryDimension(base.AbstractUidDimension): - _axis = 'GEOM' _ndims = 2 _attrs_slice = ('uid', 'grid', '_point', '_polygon') @@ -898,6 +955,8 @@ def __init__(self, *args, **kwargs): self._polygon = kwargs.pop('polygon', None) self._abstraction = kwargs.pop('abstraction', None) + kwargs['name'] = kwargs.get('name') or 'geometry' + super(SpatialGeometryDimension, self).__init__(*args, **kwargs) if self.grid is None and self._point is None and self._polygon is None: @@ -923,7 +982,7 @@ def point(self): if self._point is None and self.grid is not None: self._point = SpatialGeometryPointDimension(grid=self.grid, uid=self.grid.uid) return self._point - + @property def polygon(self): if self._polygon is None: @@ -939,7 +998,7 @@ def polygon(self): else: raise return self._polygon - + @property def shape(self): if self.point is None: @@ -947,7 +1006,7 @@ def shape(self): else: ret = self.point.shape return(ret) - + def get_highest_order_abstraction(self): """ :returns: Return the highest order abstraction geometry with preference given by: @@ -971,15 +1030,15 @@ def get_highest_order_abstraction(self): raise ValueError(msg) return ret - + def get_iter(self): - raise(NotImplementedError) - - def _get_slice_(self,state,slc): - state._point = get_none_or_slice(state._point,slc) - state._polygon = get_none_or_slice(state._polygon,slc) - return(state) - + raise NotImplementedError + + def _get_slice_(self, state, slc): + state._point = get_none_or_slice(state._point, slc) + state._polygon = get_none_or_slice(state._polygon, slc) + return state + def _get_uid_(self): if self._point is not None: ret = self._point.uid @@ -991,22 +1050,23 @@ def _get_uid_(self): class SpatialGeometryPointDimension(base.AbstractUidValueDimension): - _axis = 'POINT' _ndims = 2 - _attrs_slice = ('uid','_value','grid') + _attrs_slice = ('uid', '_value', 'grid') _geom_type = 'Point' - - def __init__(self,*args,**kwds): - self.grid = kwds.pop('grid',None) - super(SpatialGeometryPointDimension,self).__init__(*args,**kwds) - + def __init__(self, *args, **kwargs): + self.grid = kwargs.pop('grid', None) + + kwargs['name'] = kwargs.get('name') or self._geom_type.lower() + + super(SpatialGeometryPointDimension, self).__init__(*args, **kwargs) + @property def weights(self): ret = np.ones(self.value.shape,dtype=constants.np_float) ret = np.ma.array(ret,mask=self.value.mask) return(ret) - + def get_intersects_masked(self,polygon,use_spatial_index=True): """ :param polygon: The Shapely geometry to use for subsetting. @@ -1019,17 +1079,17 @@ def get_intersects_masked(self,polygon,use_spatial_index=True): :raises: NotImplementedError, EmptySubsetError :returns: :class:`ocgis.interface.base.dimension.spatial.SpatialGeometryPointDimension` """ - + # only polygons are acceptable for subsetting. if a point is required, buffer it. if type(polygon) not in (Polygon,MultiPolygon): raise(NotImplementedError(type(polygon))) - + # return a shallow copy of self ret = copy(self) # create the fill array and reference the mask. this is the outpout geometry value array. fill = np.ma.array(ret.value,mask=True) ref_fill_mask = fill.mask - + # this is the path if a spatial index is used. if use_spatial_index: # keep this as a local import as it is not a required dependency @@ -1062,18 +1122,18 @@ def get_intersects_masked(self,polygon,use_spatial_index=True): else: bool_value = True ref_fill_mask[ii,jj] = bool_value - + # if everything is masked, this is an empty subset if ref_fill_mask.all(): raise(EmptySubsetError(self.name)) - + # set the returned value to the fill array ret._value = fill # also update the unique identifier array ret.uid = np.ma.array(ret.uid, mask=fill.mask.copy()) - + return ret - + def update_crs(self, to_crs, from_crs): """ :type to_crs: :class:`ocgis.crs.CoordinateReferenceSystem` @@ -1096,22 +1156,19 @@ def write_fiona(self,path,crs,driver='ESRI Shapefile'): 'properties':{'UGID':'int'}} ref_prep = self._write_fiona_prep_geom_ ref_uid = self.uid - + with fiona.open(path,'w',driver=driver,crs=crs,schema=schema) as f: for (ii,jj),geom in iter_array(self.value,return_value=True): geom = ref_prep(geom) - try: - uid = int(ref_uid[ii,jj]) - except Exception as e: - import ipdb;ipdb.set_trace() + uid = int(ref_uid[ii,jj]) feature = {'properties':{'UGID':uid},'geometry':mapping(geom)} f.write(feature) return(path) - + def _write_fiona_prep_geom_(self,geom): return(geom) - + def _format_private_value_(self,value): if value is not None: try: @@ -1123,7 +1180,7 @@ def _format_private_value_(self,value): ret = None ret = self._get_none_or_array_(ret,masked=True) return(ret) - + def _get_geometry_fill_(self,shape=None): if shape is None: shape = (self.grid.shape[0],self.grid.shape[1]) @@ -1133,11 +1190,11 @@ def _get_geometry_fill_(self,shape=None): fill = np.ma.array(np.zeros(shape),mask=mask,dtype=object) return(fill) - + def _get_value_(self): # we are interested in creating geometries for all the underly coordinates regardless if the data is masked ref_grid = self.grid.value.data - + fill = self._get_geometry_fill_() r_data = fill.data for idx_row,idx_col in iter_array(ref_grid[0],use_mask=False): @@ -1146,13 +1203,13 @@ def _get_value_(self): pt = Point(x,y) r_data[idx_row,idx_col] = pt return(fill) - - + + class SpatialGeometryPolygonDimension(SpatialGeometryPointDimension): _geom_type = 'MultiPolygon' - _axis = 'POLYGON' def __init__(self, *args, **kwargs): + kwargs['name'] = kwargs.get('name') or 'polygon' super(SpatialGeometryPolygonDimension, self).__init__(*args, **kwargs) if self._value is None: @@ -1183,7 +1240,7 @@ def area(self): for (ii,jj),geom in iter_array(r_value,return_value=True): fill[ii,jj] = geom.area return(fill) - + @property def weights(self): return(self.area/self.area.max()) diff --git a/src/ocgis/interface/base/dimension/temporal.py b/src/ocgis/interface/base/dimension/temporal.py index a2e54c651..3d97a0169 100644 --- a/src/ocgis/interface/base/dimension/temporal.py +++ b/src/ocgis/interface/base/dimension/temporal.py @@ -1,78 +1,294 @@ -import base -import numpy as np from collections import deque import itertools import datetime +from copy import deepcopy +import netCDF4 as nc + +import netcdftime +import numpy as np + +import base from ocgis import constants from ocgis.util.logging_ocgis import ocgis_lh -from ocgis.exc import EmptySubsetError, IncompleteSeasonError -from ocgis.util.helpers import get_is_date_between -from copy import deepcopy +from ocgis.exc import EmptySubsetError, IncompleteSeasonError, CannotFormatTimeError +from ocgis.util.helpers import get_is_date_between, iter_array, get_none_or_slice class TemporalDimension(base.VectorDimension): - _date_parts = ('year','month','day','hour','minute','second') - _axis = 'T' - - def get_grouping(self,grouping): - ## there is no need to go through the process of breaking out datetime - ## parts when the grouping is 'all'. + """ + .. note:: Accepts all parameters to :class:`~ocgis.interface.base.dimension.base.VectorDimension`. + + :keyword str calendar: (``='standard'``) The calendar to use when converting from float to datetime objects. Any of + the netCDF-CF calendar tyes: http://unidata.github.io/netcdf4-python/netCDF4-module.html#num2date + :keyword bool format_time: (``=True``) If ``False``, do not allow access to ``value_datetime``, + ``bounds_datetime``, and ``extent_datetime``. If these properties are accessed raise + :class:``~ocgis.exc.CannotFormatTimeError``. + :keyword str units: (``='days since 0000-01-01 00:00:00'``) The units string to use when converting from float to + datetime objects. See: http://unidata.github.io/netcdf4-python/netCDF4-module.html#num2date + """ + + _attrs_slice = ('uid', '_value', '_src_idx', '_value_datetime', '_value_numtime') + _date_parts = ('year', 'month', 'day', 'hour', 'minute', 'second') + + def __init__(self, *args, **kwargs): + self.calendar = kwargs.pop('calendar', constants.default_temporal_calendar) + self.format_time = kwargs.pop('format_time', True) + + kwargs['axis'] = kwargs.get('axis') or 'T' + kwargs['name'] = kwargs.get('name') or 'time' + kwargs['name_uid'] = kwargs.get('name_uid') or 'tid' + + super(TemporalDimension, self).__init__(*args, **kwargs) + + self.units = self.units or constants.default_temporal_units + # test if the units are the special case with months in the time units + if self.units.startswith('months'): + self._has_months_units = True + else: + self._has_months_units = False + + self._value_datetime = None + self._bounds_datetime = None + self._value_numtime = None + self._bounds_numtime = None + + @property + def bounds_datetime(self): + if not self.format_time: + raise CannotFormatTimeError('bounds_datetime') + if self.bounds is not None: + if self._bounds_datetime is None: + if get_datetime_conversion_state(self.bounds[0, 0]): + self._bounds_datetime = np.atleast_2d(self.get_datetime(self.bounds)) + else: + self._bounds_datetime = self.bounds + return self._bounds_datetime + + @property + def bounds_numtime(self): + if self.bounds is not None: + if self._bounds_numtime is None: + if not get_datetime_conversion_state(self.bounds[0, 0]): + self._bounds_numtime = np.atleast_2d(self.get_numtime(self.bounds)) + else: + self._bounds_numtime = self.bounds + return self._bounds_numtime + + @property + def extent_datetime(self): + if not self.format_time: + raise CannotFormatTimeError('extent_datetime') + extent = self.extent + if get_datetime_conversion_state(extent[0]): + extent = self.get_datetime(extent) + return tuple(extent) + + @property + def extent_numtime(self): + extent = self.extent + if not get_datetime_conversion_state(extent[0]): + extent = self.get_numtime(extent) + return tuple(extent) + + @property + def value_datetime(self): + if not self.format_time: + raise CannotFormatTimeError('value_datetime') + if self._value_datetime is None: + if get_datetime_conversion_state(self.value[0]): + self._value_datetime = np.atleast_1d(self.get_datetime(self.value)) + else: + self._value_datetime = self.value + return self._value_datetime + + @property + def value_numtime(self): + if self._value_numtime is None: + if not get_datetime_conversion_state(self.value[0]): + self._value_numtime = np.atleast_1d(self.get_numtime(self.value)) + else: + self._value_numtime = self.value + return self._value_numtime + + def get_between(self, lower, upper, return_indices=False): + if get_datetime_conversion_state(self.value[0]): + lower, upper = tuple(self.get_numtime([lower, upper])) + return super(TemporalDimension, self).get_between(lower, upper, return_indices=return_indices) + + def get_datetime(self, arr): + """ + :param arr: An array of floats to convert to datetime objects. + :type arr: :class:`numpy.ndarray` + :returns: An object array of the same shape as ``arr`` with float objects converted to datetime objects. + :rtype: :class:`numpy.ndarray` + """ + + # if there are month units, call the special procedure to convert those to datetime objects + if not self._has_months_units: + arr = np.atleast_1d(nc.num2date(arr, self.units, calendar=self.calendar)) + dt = datetime.datetime + for idx, t in iter_array(arr, return_value=True): + # attempt to convert times to datetime objects + try: + arr[idx] = dt(t.year, t.month, t.day, t.hour, t.minute, t.second) + # this may fail for some calendars, in that case maintain the instance object returned from + # netcdftime see: http://netcdf4-python.googlecode.com/svn/trunk/docs/netcdftime.netcdftime.datetime-class.html + except ValueError: + arr[idx] = arr[idx] + else: + arr = get_datetime_from_months_time_units(arr, self.units, month_centroid=constants.calc_month_centroid) + return arr + + def get_grouping(self, grouping): + """ + :param sequence grouping: The temporal grouping to use when creating the temporal group dimension. + + >>> grouping = ['month'] + + :returns: A temporal group dimension. + :rtype: :class:`~ocgis.interface.base.dimension.temporal.TemporalGroupDimension` + """ + + # there is no need to go through the process of breaking out datetime parts when the grouping is 'all'. if grouping == 'all': - new_bounds,date_parts,repr_dt,dgroups = self._get_grouping_all_() - ## the process for getting "unique" seasons is also specialized + new_bounds, date_parts, repr_dt, dgroups = self._get_grouping_all_() + # the process for getting "unique" seasons is also specialized elif 'unique' in grouping: - new_bounds,date_parts,repr_dt,dgroups = self._get_grouping_seasonal_unique_(grouping) - ## for standard groups ("['month']") or seasons across entire time range + new_bounds, date_parts, repr_dt, dgroups = self._get_grouping_seasonal_unique_(grouping) + # for standard groups ("['month']") or seasons across entire time range else: - new_bounds,date_parts,repr_dt,dgroups = self._get_grouping_other_(grouping) - - tgd = self._get_temporal_group_dimension_( - grouping=grouping,date_parts=date_parts,bounds=new_bounds, - dgroups=dgroups,value=repr_dt,name_value='time',name_uid='tid', - name=self.name,meta=self.meta,units=self.units) - - return(tgd) - - def _get_grouping_seasonal_unique_(self, grouping): + new_bounds, date_parts, repr_dt, dgroups = self._get_grouping_other_(grouping) + + tgd = self._get_temporal_group_dimension_(grouping=grouping, date_parts=date_parts, bounds=new_bounds, + dgroups=dgroups, value=repr_dt, name_value='time', name_uid='tid', + name=self.name, meta=self.meta, units=self.units) + + return tgd + + def get_iter(self, *args, **kwargs): + r_name_value = self.name_value + r_set_date_parts = self._set_date_parts_ + for ii, yld in super(TemporalDimension, self).get_iter(*args, **kwargs): + r_value = yld[r_name_value] + r_set_date_parts(yld, r_value) + yield (ii, yld) + + def get_numtime(self, arr): + """ + :param arr: An array of datetime objects to convert to numeric time. + :type array: :class:`numpy.array` + :returns: An array of numeric values with same shape as ``arr``. + :rtype: :class:`numpy.array` """ - :param list grouping: A seasonal list containing the unique flag. - >>> grouping = [[12, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11], 'unique'] + try: + ret = np.atleast_1d(nc.date2num(arr, self.units, calendar=self.calendar)) + except ValueError: + # special behavior for conversion of time units with months + if self._has_months_units: + ret = get_num_from_months_time_units(arr, self.units, dtype=None) + else: + raise + return ret - :returns: A tuple of elements necessary to create a :class:`ocgis.interface.base.dimension.temporal.TemporalGroupDimension` - object. - :rtype: tuple + def get_time_region(self,time_region,return_indices=False): + assert(isinstance(time_region,dict)) + + ## return the values to use for the temporal region subsetting. + value = self.value_datetime + bounds = self.bounds_datetime + + ## switch to indicate if bounds or centroid datetimes are to be used. + use_bounds = False if bounds is None else True + + ## remove any none values in the time_region dictionary. this will save + ## time in iteration. + time_region = time_region.copy() + time_region = {k:v for k,v in time_region.iteritems() if v is not None} + assert(len(time_region) > 0) + + ## this is the boolean selection array. + select = np.zeros(self.shape[0],dtype=bool) + + ## for each row, determine if the date criterion are met updating the + ## select matrix accordingly. + row_check = np.zeros(len(time_region),dtype=bool) + + for idx_row in range(select.shape[0]): + ## do the comparison for each time_region element. + if use_bounds: + row = bounds[idx_row,:] + else: + row = value[idx_row] + for ii,(k,v) in enumerate(time_region.iteritems()): + if use_bounds: + to_include = [] + for element in v: + kwds = {k:element} + to_include.append(get_is_date_between(row[0],row[1],**kwds)) + fill = any(to_include) + else: + part = getattr(row,k) + fill = True if part in v else False + row_check[ii] = fill + if row_check.all(): + select[idx_row] = True + + if not select.any(): + ocgis_lh(logger='nc.temporal',exc=EmptySubsetError(origin='temporal')) + + ret = self[select] + + if return_indices: + raw_idx = np.arange(0,self.shape[0])[select] + ret = (ret,raw_idx) + + return(ret) + + def write_to_netcdf_dataset(self, dataset, **kwargs): + """ + Calls superclass write method then adds ``calendar`` and ``units`` attributes to time variable and time bounds + variable. See documentation for :meth:`~ocgis.interface.base.dimension.base.VectorDimension#write_to_netcdf_dataset`. """ - # remove the unique keyword from the list - grouping = list(deepcopy(grouping)) - grouping.remove('unique') - grouping = get_sorted_seasons(grouping) - # turn the seasons into time regions - time_regions = get_time_regions(grouping, self._get_datetime_value_(), raise_if_incomplete=False) - # holds the boolean selection arrays - dgroups = deque() - new_bounds = np.array([], dtype=object).reshape(-1, 2) - repr_dt = np.array([], dtype=object) - # return temporal dimensions and convert to groups - for dgroup, sub in iter_boolean_groups_from_time_regions(time_regions, self, yield_subset=True, - raise_if_incomplete=False): - dgroups.append(dgroup) - sub_value_datetime = sub._get_datetime_value_() - new_bounds = np.vstack((new_bounds, [min(sub_value_datetime), max(sub_value_datetime)])) - repr_dt = np.append(repr_dt, sub_value_datetime[int(sub.shape[0] / 2)]) - # no date parts yet... - date_parts = None + # swap the value/bounds references from datetime to numtime for the duration for the write + if not get_datetime_conversion_state(self.value[0]): + self._value = self.value_numtime + self._bounds = self.bounds_numtime + swapped_value_bounds = True + else: + swapped_value_bounds = False + + super(TemporalDimension, self).write_to_netcdf_dataset(dataset, **kwargs) + + # return the value and bounds to their original state + if swapped_value_bounds: + self._value = self.value_datetime + self._bounds = self.bounds_datetime + + for name in [self.name_value, self.name_bounds]: + try: + variable = dataset.variables[name] + except KeyError: + # bounds are likely missing + if self.bounds is not None: + raise + variable.calendar = self.calendar + variable.units = self.units + + def _format_slice_state_(self, state, slc): + state = super(TemporalDimension, self)._format_slice_state_(state, slc) + state._bounds_datetime = get_none_or_slice(state._bounds_datetime, (slc, slice(None))) + state._bounds_numtime = get_none_or_slice(state._bounds_numtime, (slc, slice(None))) + return state - return new_bounds, date_parts, repr_dt, dgroups - def _get_grouping_all_(self): ''' Applied when the grouping is 'all'. ''' - - value = self._get_datetime_value_() - bounds = self._get_datetime_bounds_() + + value = self.value_datetime + bounds = self.bounds_datetime try: lower = bounds.min() upper = bounds.max() @@ -80,7 +296,7 @@ def _get_grouping_all_(self): except AttributeError: lower = value.min() upper = value.max() - + ## new bounds are simply the minimum and maximum values chosen either from ## the value or bounds array. bounds are given preference. new_bounds = np.array([lower,upper]).reshape(-1,2) @@ -90,27 +306,27 @@ def _get_grouping_all_(self): dgroups = [slice(None)] ## the representative datetime is the center of the value array. repr_dt = np.array([value[int((self.value.shape[0]/2)-1)]]) - + return(new_bounds,date_parts,repr_dt,dgroups) - + def _get_grouping_other_(self,grouping): ''' Applied to groups other than 'all'. ''' - + ## map date parts to index positions in date part storage array and flip ## they key-value pairs group_map = dict(zip(range(0,len(self._date_parts)),self._date_parts,)) group_map_rev = dict(zip(self._date_parts,range(0,len(self._date_parts)),)) - + ## this array will hold the value data constructed differently depending ## on if temporal bounds are present value = np.empty((self.value.shape[0],3),dtype=object) - + ## reference the value and bounds datetime object arrays - value_datetime = self._get_datetime_value_() - value_datetime_bounds = self._get_datetime_bounds_() - + value_datetime = self.value_datetime + value_datetime_bounds = self.bounds_datetime + ## populate the value array depending on the presence of bounds if self.bounds is None: value[:,:] = value_datetime.reshape(-1,1) @@ -119,15 +335,15 @@ def _get_grouping_other_(self,grouping): value[:,0] = value_datetime_bounds[:,0] value[:,1] = value_datetime value[:,2] = value_datetime_bounds[:,1] - + def _get_attrs_(dt): return([dt.year,dt.month,dt.day,dt.hour,dt.minute,dt.second]) - + ## extract the date parts parts = np.empty((len(self.value),len(self._date_parts)),dtype=int) for row in range(parts.shape[0]): parts[row,:] = _get_attrs_(value[row,1]) - + ## grouping is different for date part combinations v. seasonal ## aggregation. if all([isinstance(ii,basestring) for ii in grouping]): @@ -138,16 +354,16 @@ def _get_attrs_(dt): else: fill = [None] unique.append(fill) - + select = deque() idx2_seq = range(len(self._date_parts)) for idx in itertools.product(*[range(len(u)) for u in unique]): select.append([unique[idx2][idx[idx2]] for idx2 in idx2_seq]) select = np.array(select) dgroups = deque() - + idx_cmp = [group_map_rev[group] for group in grouping] - + keep_select = [] for idx in range(select.shape[0]): match = select[idx,idx_cmp] == parts[:,idx_cmp] @@ -157,7 +373,7 @@ def _get_attrs_(dt): dgroups.append(dgrp) select = select[keep_select,:] assert(len(dgroups) == select.shape[0]) - + dtype = [(dp,object) for dp in self._date_parts] ## this is for seasonal aggregations else: @@ -174,7 +390,7 @@ def _get_attrs_(dt): else: has_year = False years = [None] - + dgroups = deque() grouping_season = deque() @@ -195,11 +411,11 @@ def _get_attrs_(dt): grouping_season.append([season,year]) dtype = [('months',object),('year',int)] grouping = grouping_season - + ## init arrays to hold values and bounds for the grouped data new_value = np.empty((len(dgroups),),dtype=dtype) new_bounds = np.empty((len(dgroups),2),dtype=object) - + for idx,dgrp in enumerate(dgroups): ## tuple conversion is required for structure arrays: http://docs.scipy.org/doc/numpy/user/basics.rec.html#filling-structured-arrays try: @@ -214,89 +430,14 @@ def _get_attrs_(dt): new_value[idx]['months'] = grouping[idx][0] sel = value[dgrp][:,(0,2)] new_bounds[idx,:] = [sel.min(),sel.max()] - + new_bounds = np.atleast_2d(new_bounds).reshape(-1,2) date_parts = np.atleast_1d(new_value) ## this is the representative center time for the temporal group repr_dt = self._get_grouping_representative_datetime_(grouping,new_bounds,date_parts) - + return(new_bounds,date_parts,repr_dt,dgroups) - - def get_iter(self,*args,**kwds): - r_name_value = self.name_value - r_set_date_parts = self._set_date_parts_ - for ii,yld in super(TemporalDimension,self).get_iter(*args,**kwds): - r_value = yld[r_name_value] - r_set_date_parts(yld,r_value) - yield(ii,yld) - - def _set_date_parts_(self,yld,value): - yld['year'],yld['month'],yld['day'] = value.year,value.month,value.day - - def get_time_region(self,time_region,return_indices=False): - assert(isinstance(time_region,dict)) - - ## return the values to use for the temporal region subsetting. - value = self._get_datetime_value_() - bounds = self._get_datetime_bounds_() - - ## switch to indicate if bounds or centroid datetimes are to be used. - use_bounds = False if bounds is None else True - - ## remove any none values in the time_region dictionary. this will save - ## time in iteration. - time_region = time_region.copy() - time_region = {k:v for k,v in time_region.iteritems() if v is not None} - assert(len(time_region) > 0) - - ## this is the boolean selection array. - select = np.zeros(self.shape[0],dtype=bool) - - ## for each row, determine if the date criterion are met updating the - ## select matrix accordingly. - row_check = np.zeros(len(time_region),dtype=bool) - - for idx_row in range(select.shape[0]): - ## do the comparison for each time_region element. - if use_bounds: - row = bounds[idx_row,:] - else: - row = value[idx_row] - for ii,(k,v) in enumerate(time_region.iteritems()): - if use_bounds: - to_include = [] - for element in v: - kwds = {k:element} - to_include.append(get_is_date_between(row[0],row[1],**kwds)) - fill = any(to_include) - else: - part = getattr(row,k) - fill = True if part in v else False - row_check[ii] = fill - if row_check.all(): - select[idx_row] = True - - if not select.any(): - ocgis_lh(logger='nc.temporal',exc=EmptySubsetError(origin='temporal')) - ret = self[select] - - if return_indices: - raw_idx = np.arange(0,self.shape[0])[select] - ret = (ret,raw_idx) - - return(ret) - - def _get_datetime_bounds_(self): - '''Intended for subclasses to overload the method for accessing the datetime - value. For example, netCDF times are floats that must be converted.''' - return(self.bounds) - - def _get_datetime_value_(self): - '''Intended for subclasses to overload the method for accessing the datetime - value. For example, netCDF times are floats that must be converted.''' - return(self.value) - def _get_grouping_representative_datetime_(self,grouping,bounds,value): ref_value = value ref_bounds = bounds @@ -360,87 +501,252 @@ def _get_grouping_representative_datetime_(self,grouping,bounds,value): ret[idx] = fill return(ret) + def _get_grouping_seasonal_unique_(self, grouping): + """ + :param list grouping: A seasonal list containing the unique flag. + + >>> grouping = [[12, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11], 'unique'] + + :returns: A tuple of elements necessary to create a :class:`ocgis.interface.base.dimension.temporal.TemporalGroupDimension` + object. + :rtype: tuple + """ + + # remove the unique keyword from the list + grouping = list(deepcopy(grouping)) + grouping.remove('unique') + grouping = get_sorted_seasons(grouping) + # turn the seasons into time regions + time_regions = get_time_regions(grouping, self.value_datetime, raise_if_incomplete=False) + # holds the boolean selection arrays + dgroups = deque() + new_bounds = np.array([], dtype=object).reshape(-1, 2) + repr_dt = np.array([], dtype=object) + # return temporal dimensions and convert to groups + for dgroup, sub in iter_boolean_groups_from_time_regions(time_regions, self, yield_subset=True, + raise_if_incomplete=False): + dgroups.append(dgroup) + sub_value_datetime = sub.value_datetime + new_bounds = np.vstack((new_bounds, [min(sub_value_datetime), max(sub_value_datetime)])) + repr_dt = np.append(repr_dt, sub_value_datetime[int(sub.shape[0] / 2)]) + # no date parts yet... + date_parts = None + + return new_bounds, date_parts, repr_dt, dgroups + def _get_iter_value_bounds_(self): - return(self._get_datetime_value_(),self._get_datetime_bounds_()) - - def _get_temporal_group_dimension_(self,*args,**kwds): - return(TemporalGroupDimension(*args,**kwds)) + if self.format_time: + ret = self.value_datetime, self.bounds_datetime + else: + ret = self.value_numtime, self.bounds_numtime + return ret + + def _get_temporal_group_dimension_(self, *args, **kwargs): + return TemporalGroupDimension(*args, **kwargs) + + def _set_date_parts_(self, yld, value): + if self.format_time: + fill = (value.year, value.month, value.day) + else: + fill = [None]*3 + yld['year'], yld['month'], yld['day'] = fill class TemporalGroupDimension(TemporalDimension): - - def __init__(self,*args,**kwds): - self.grouping = kwds.pop('grouping') - self.dgroups = kwds.pop('dgroups') - self.date_parts = kwds.pop('date_parts') - - TemporalDimension.__init__(self,*args,**kwds) + def __init__(self, *args, **kwargs): + self.grouping = kwargs.pop('grouping') + self.dgroups = kwargs.pop('dgroups') + self.date_parts = kwargs.pop('date_parts') -def iter_boolean_groups_from_time_regions(time_regions, temporal_dimension, yield_subset=False, - raise_if_incomplete=True): + TemporalDimension.__init__(self, *args, **kwargs) + + def write_to_netcdf_dataset(self, dataset, **kwargs): + """ + For CF-compliance, ensures climatology bounds are correctly attributed. + """ + + previous_name_bounds = self.name_bounds + self.name_bounds = 'climatology_bounds' + try: + super(TemporalGroupDimension, self).write_to_netcdf_dataset(dataset, **kwargs) + variable = dataset.variables[self.name_value] + variable.climatology = variable.bounds + variable.delncattr('bounds') + finally: + self.name_bounds = previous_name_bounds + + +def get_datetime_conversion_state(archetype): + """ + :param archetype: The object to test for conversion to datetime. + :type archetyp: float, :class:`datetime.datetime`, or :class:`netcdftime.datetime` + :returns: ``True`` if the object should be converted to datetime. + :rtype: bool """ - :param time_regions: Sequence of nested time region dictionaries. - >>> [[{'month':[1,2],'year':[2024]},...],...] + if isinstance(archetype, (datetime.datetime, netcdftime.datetime)): + ret = False + else: + ret = True + return ret - :param temporal_dimension: A temporal dimension object. - :type temporal_dimension: :class:`ocgis.interface.base.dimension.temporal.TemporalDimension` - :param bool yield_subset: If ``True``, yield a tuple with the subset of ``temporal_dimension``. - :param bool raise_if_incomplete: If ``True``, raise an exception if the season is incomplete. - :returns: boolean ndarray vector with yld.shape == temporal_dimension.shape - :raises: IncompleteSeasonError + +def get_datetime_from_months_time_units(vec, units, month_centroid=16): """ + Convert a vector of months offsets into :class:``datetime.datetime`` objects. - for sub_time_regions in time_regions: - # incomplete seasons are searched for in the nested loop. this indicates if a time region group should be - # considered a season. - is_complete = True - idx_append = np.array([], dtype=int) - for time_region in sub_time_regions: - sub, idx = temporal_dimension.get_time_region(time_region, return_indices=True) - ## insert a check to ensure there are months present for each time region - months = set([d.month for d in sub._get_datetime_value_()]) - try: - assert (months == set(time_region['month'])) - except AssertionError: - if raise_if_incomplete: - for m in time_region['month']: - if m not in months: - raise IncompleteSeasonError(time_region, month=m) - else: - is_complete = False - idx_append = np.append(idx_append, idx) + :param vec: Vector of integer month offsets. + :type vec: :class:``np.ndarray`` + :param str units: Source units to parse. + :param month_centroid: The center day of the month to use when creating the :class:``datetime.datetime`` objects. - # if the season is complete append, otherwise pass to next iteration. - if is_complete: - dgroup = np.zeros(temporal_dimension.shape[0], dtype=bool) - dgroup[idx_append] = True - else: - continue + >>> units = "months since 1978-12" + >>> vec = np.array([0,1,2,3]) + >>> get_datetime_from_months_time_units(vec,units) + array([1978-12-16 00:00:00, 1979-01-16 00:00:00, 1979-02-16 00:00:00, + 1979-03-16 00:00:00], dtype=object) + """ + + # only work with integer inputs + vec = np.array(vec, dtype=int) + + def _get_datetime_(current_year, origin_month, offset_month, current_month_correction, month_centroid): + return datetime.datetime(current_year, (origin_month + offset_month) - current_month_correction, month_centroid) + + origin = get_origin_datetime_from_months_units(units) + origin_month = origin.month + current_year = origin.year + current_month_correction = 0 + ret = np.ones(len(vec), dtype=object) + for ii, offset_month in enumerate(vec): + try: + fill = _get_datetime_(current_year, origin_month, offset_month, current_month_correction, month_centroid) + except ValueError: + current_month_correction += 12 + current_year += 1 + fill = _get_datetime_(current_year, origin_month, offset_month, current_month_correction, month_centroid) + ret[ii] = fill + return ret + + +def get_difference_in_months(origin, target): + """ + Get the integer difference in months between an origin and target datetime. + + :param :class:``datetime.datetime`` origin: The origin datetime object. + :param :class:``datetime.datetime`` target: The target datetime object. + + >>> get_difference_in_months(datetime.datetime(1978,12,1),datetime.datetime(1979,3,1)) + 3 + >>> get_difference_in_months(datetime.datetime(1978,12,1),datetime.datetime(1978,7,1)) + -5 + """ + + def _count_(start_month, stop_month, start_year, stop_year, direction): + count = 0 + curr_month = start_month + curr_year = start_year + while True: + if curr_month == stop_month and curr_year == stop_year: + break + else: + pass + + if direction == 'forward': + curr_month += 1 + elif direction == 'backward': + curr_month -= 1 + else: + raise (NotImplementedError(direction)) + + if curr_month == 13: + curr_month = 1 + curr_year += 1 + if curr_month == 0: + curr_month = 12 + curr_year -= 1 + + if direction == 'forward': + count += 1 + else: + count -= 1 + + return count + + origin_month, origin_year = origin.month, origin.year + target_month, target_year = target.month, target.year + + if origin <= target: + direction = 'forward' + else: + direction = 'backward' + + diff_months = _count_(origin_month, target_month, origin_year, target_year, direction) + return diff_months - if yield_subset: - yld = (dgroup, temporal_dimension[dgroup]) - else: - yld = dgroup - yield yld - def get_is_interannual(sequence): - ''' - Returns ``True`` if an integer sequence representing a season crosses a year - boundary. - + """ + Returns ``True`` if an integer sequence representing a season crosses a year boundary. + >>> sequence = [11,12,1] >>> get_is_interannual(sequence) True - ''' - + """ + if 12 in sequence and 1 in sequence: ret = True else: ret = False - return(ret) + return ret + + +def get_num_from_months_time_units(vec, units, dtype=None): + """ + Convert a vector of :class:``datetime.datetime`` objects into an integer vector. + + :param vec: Input vector to convert. + :type vec: :class:``np.ndarray`` + :param str units: Source units to parse. + :param type dtype: Output vector array type. + + >>> units = "months since 1978-12" + >>> vec = np.array([datetime.datetime(1978,12,1),datetime.datetime(1979,1,1)]) + >>> get_num_from_months_time_units(vec,units) + array([0, 1]) + """ + + origin = get_origin_datetime_from_months_units(units) + ret = [get_difference_in_months(origin, target) for target in vec] + return np.array(ret, dtype=dtype) + + +def get_origin_datetime_from_months_units(units): + """ + Get the origin Python :class:``datetime.datetime`` object from a month string. + + :param str units: Source units to parse. + :returns: :class:``datetime.datetime`` + + >>> units = "months since 1978-12" + >>> get_origin_datetime_from_months_units(units) + datetime.datetime(1978, 12, 1, 0, 0) + """ + + origin = ' '.join(units.split(' ')[2:]) + to_try = ['%Y-%m', '%Y-%m-%d %H'] + converted = False + for tt in to_try: + try: + origin = datetime.datetime.strptime(origin, tt) + converted = True + break + except ValueError as e: + continue + if not converted: + raise e + return origin def get_sorted_seasons(seasons, method='max'): @@ -459,79 +765,121 @@ def get_sorted_seasons(seasons, method='max'): methods = {'min': min, 'max': max} season_map = {} - for ii,season in enumerate(seasons): + for ii, season in enumerate(seasons): season_map[ii] = season max_map = {} - for key,value in season_map.iteritems(): + for key, value in season_map.iteritems(): max_map[methods[method](value)] = key sorted_maxes = sorted(max_map) ret = [seasons[max_map[s]] for s in sorted_maxes] ret = deepcopy(ret) - return(ret) + return ret -def get_time_regions(seasons,dates,raise_if_incomplete=True): - ''' +def get_time_regions(seasons, dates, raise_if_incomplete=True): + """ >>> seasons = [[6,7,8],[9,10,11],[12,1,2]] >>> dates = - ''' - ## extract the years from the data vector collapsing them to a unique - ## set then sort in ascending order + """ + + # extract the years from the data vector collapsing them to a unique set then sort in ascending order years = list(set([d.year for d in dates])) years.sort() - ## determine if any of the seasons are interannual - interannual_check = map(get_is_interannual,seasons) - ## holds the return value + # determine if any of the seasons are interannual + interannual_check = map(get_is_interannual, seasons) + # holds the return value time_regions = [] - ## the interannual cases requires two time region sequences to - ## properly extract + # the interannual cases requires two time region sequences to properly extract if any(interannual_check): - ## loop over years first to ensure each year is accounted for - ## in the time region output - for ii_year,year in enumerate(years): - ## the interannual flag is used internally for simple optimization - for ic,cg in itertools.izip(interannual_check,seasons): - ## if no exception is raised for an incomplete season, - ## this flag indicate whether to append to the output + # loop over years first to ensure each year is accounted for in the time region output + for ii_year, year in enumerate(years): + # the interannual flag is used internally for simple optimization + for ic, cg in itertools.izip(interannual_check, seasons): + # if no exception is raised for an incomplete season, this flag indicate whether to append to the output append_to_time_regions = True if ic: - ## copy and sort in descending order the season because - ## december of the current year should be first. + # copy and sort in descending order the season because december of the current year should be first. _cg = deepcopy(cg) _cg.sort() _cg.reverse() - ## look for the interannual break and split the season - ## into the current year and next year. + # look for the interannual break and split the season into the current year and next year. diff = np.abs(np.diff(_cg)) - split_base = np.arange(1,len(_cg)) + split_base = np.arange(1, len(_cg)) split_indices = split_base[diff > 1] - split = np.split(_cg,split_indices) - ## will hold the sub-element time regions + split = np.split(_cg, split_indices) + # will hold the sub-element time regions sub_time_region = [] - for ii_split,s in enumerate(split): + for ii_split, s in enumerate(split): try: - to_append_sub = {'year':[years[ii_year+ii_split]],'month':s.tolist()} + to_append_sub = {'year': [years[ii_year + ii_split]], 'month': s.tolist()} sub_time_region.append(to_append_sub) - ## there may not be another year of data for an - ## interannual season. we DO NOT keep incomplete - ## seasons. + # there may not be another year of data for an interannual season. we DO NOT keep incomplete + # seasons. except IndexError: - ## don't just blow through an incomplete season - ## unless asked to + # don't just blow through an incomplete season unless asked to if raise_if_incomplete: - raise(IncompleteSeasonError(_cg,year)) + raise (IncompleteSeasonError(_cg, year)) else: append_to_time_regions = False continue to_append = sub_time_region else: - to_append = [{'year':[year],'month':cg}] + to_append = [{'year': [year], 'month': cg}] if append_to_time_regions: time_regions.append(to_append) - ## without interannual seasons the time regions are unique combos of - ## the years and seasons designations + # without interannual seasons the time regions are unique combos of the years and seasons designations else: - for year,season in itertools.product(years,seasons): - time_regions.append([{'year':[year],'month':season}]) - - return(time_regions) \ No newline at end of file + for year, season in itertools.product(years, seasons): + time_regions.append([{'year': [year], 'month': season}]) + + return time_regions + + +def iter_boolean_groups_from_time_regions(time_regions, temporal_dimension, yield_subset=False, + raise_if_incomplete=True): + """ + :param time_regions: Sequence of nested time region dictionaries. + + >>> [[{'month':[1,2],'year':[2024]},...],...] + + :param temporal_dimension: A temporal dimension object. + :type temporal_dimension: :class:`ocgis.interface.base.dimension.temporal.TemporalDimension` + :param bool yield_subset: If ``True``, yield a tuple with the subset of ``temporal_dimension``. + :param bool raise_if_incomplete: If ``True``, raise an exception if the season is incomplete. + :returns: boolean ndarray vector with yld.shape == temporal_dimension.shape + :raises: IncompleteSeasonError + """ + + for sub_time_regions in time_regions: + # incomplete seasons are searched for in the nested loop. this indicates if a time region group should be + # considered a season. + is_complete = True + idx_append = np.array([], dtype=int) + for time_region in sub_time_regions: + sub, idx = temporal_dimension.get_time_region(time_region, return_indices=True) + ## insert a check to ensure there are months present for each time region + months = set([d.month for d in sub.value_datetime]) + try: + assert (months == set(time_region['month'])) + except AssertionError: + if raise_if_incomplete: + for m in time_region['month']: + if m not in months: + raise IncompleteSeasonError(time_region, month=m) + else: + is_complete = False + idx_append = np.append(idx_append, idx) + + # if the season is complete append, otherwise pass to next iteration. + if is_complete: + dgroup = np.zeros(temporal_dimension.shape[0], dtype=bool) + dgroup[idx_append] = True + else: + continue + + if yield_subset: + yld = (dgroup, temporal_dimension[dgroup]) + else: + yld = dgroup + + yield yld diff --git a/src/ocgis/interface/base/field.py b/src/ocgis/interface/base/field.py index 8f38867fd..a9f6125b1 100644 --- a/src/ocgis/interface/base/field.py +++ b/src/ocgis/interface/base/field.py @@ -1,26 +1,55 @@ -from ocgis.util.helpers import get_default_or_apply, get_none_or_slice,\ - get_formatted_slice, get_reduced_slice, assert_raise -import numpy as np +from contextlib import contextmanager from copy import copy, deepcopy from collections import deque import itertools +import logging + +import numpy as np from shapely.ops import cascaded_union from shapely.geometry.multipoint import MultiPoint from shapely.geometry.multipolygon import MultiPolygon +from shapely.geometry.point import Point + +from ocgis.interface.base.attributes import Attributes +from ocgis.util.helpers import get_default_or_apply, get_none_or_slice, get_formatted_slice, get_reduced_slice, \ + set_name_attributes from ocgis.interface.base.variable import Variable, VariableCollection from ocgis import constants -from shapely.geometry.point import Point -import logging from ocgis.util.logging_ocgis import ocgis_lh - -class Field(object): - _axis_map = {'realization':0,'temporal':1,'level':2} - _axes = ['R','T','Z','Y','X'] - _value_dimension_names = ('realization','temporal','level','row','column') - - def __init__(self,variables=None,realization=None,temporal=None,level=None, - spatial=None,meta=None,uid=None,name=None): + +class Field(Attributes): + """ + :param variables: A variable collection containing the values for the field. + :type variables: :class:`~ocgis.interface.base.variable.VariableCollection` + :param realization: The realization dimension. + :type realization: :class:`~ocgis.interface.base.dimension.base.VectorDimension` + :param temporal: The temporal dimension. + :type temporal: :class:`~ocgis.interface.base.dimension.temporal.TemporalDimension` + :param level: The level dimension. + :type level: :class:`~ocgis.interface.base.dimension.base.VectorDimension` + :param spatial: The spatial dimension. + :type spatial: :class:`~ocgis.interface.base.dimension.spatial.SpatialDimension` + :param dict meta: A dictionary containing additional metadata elements. + :param int uid: A unique identifier for the field. + :param str name: A string name for the field. + :param bool regrid_destination: If ``True``, this field should be used as a regrid destination target. + :param dict attrs: A dictionary of arbitrary key-value attributes. + """ + + _axis_map = {'realization': 0, 'temporal': 1, 'level': 2} + _axes = ['R', 'T', 'Z', 'Y', 'X'] + _value_dimension_names = ('realization', 'temporal', 'level', 'row', 'column') + _variables = None + + def __init__(self, variables=None, realization=None, temporal=None, level=None, spatial=None, meta=None, uid=None, + name=None, regrid_destination=False, attrs=None): + + if spatial is None: + msg = 'At least "spatial" is required.' + raise ValueError(msg) + + Attributes.__init__(self, attrs=attrs) self.realization = realization self.temporal = temporal @@ -28,73 +57,112 @@ def __init__(self,variables=None,realization=None,temporal=None,level=None, self.level = level self.spatial = spatial self.meta = meta or {} - ## holds raw values for aggregated datasets. + self.regrid_destination = regrid_destination + # holds raw values for aggregated datasets. self._raw = None - ## add variables - dimensions are needed first for shape checking + # add variables - dimensions are needed first for shape checking self.variables = variables self._name = name + # flag used in regridding operations. this should be updated by the driver. self._should_regrid = False # flag used in regridding to indicate if a coordinate system was assigned by the user in the driver. self._has_assigned_coordinate_system = False - - def __getitem__(self,slc): - slc = get_formatted_slice(slc,5) + + # set default names for the dimensions + name_mapping = {self.realization: 'realization', self.level: 'level'} + set_name_attributes(name_mapping) + + def __iter__(self): + raise NotImplementedError + + def __getitem__(self, slc): + slc = get_formatted_slice(slc, 5) ret = copy(self) - ret.realization = get_none_or_slice(self.realization,slc[0]) - ret.temporal = get_none_or_slice(self.temporal,slc[1]) - ret.level = get_none_or_slice(self.level,slc[2]) - ret.spatial = get_none_or_slice(self.spatial,(slc[3],slc[4])) - + ret.realization = get_none_or_slice(self.realization, slc[0]) + ret.temporal = get_none_or_slice(self.temporal, slc[1]) + ret.level = get_none_or_slice(self.level, slc[2]) + ret.spatial = get_none_or_slice(self.spatial, (slc[3], slc[4])) + ret.variables = self.variables.get_sliced_variables(slc) - return(ret) + return ret + + @property + def crs(self): + return self.spatial.crs @property def name(self): + """ + :returns: The name of the field derived from its variables if not provided. + :rtype str: + """ + if self._name is None: ret = '_'.join([v.alias for v in self.variables.itervalues()]) else: ret = self._name - return(ret) + return ret + + @name.setter + def name(self, value): + self._name = value @property def shape(self): - shape_realization = get_default_or_apply(self.realization,len,1) - shape_temporal = get_default_or_apply(self.temporal,len,1) - shape_level = get_default_or_apply(self.level,len,1) - shape_spatial = get_default_or_apply(self.spatial,lambda x: x.shape,(1,1)) - ret = (shape_realization,shape_temporal,shape_level,shape_spatial[0],shape_spatial[1]) - return(ret) + """ + :returns: The shape of the field as a five-element tuple: (realization, time, level, row, column) + :rtype: tuple + """ + + shape_realization = get_default_or_apply(self.realization, len, 1) + shape_temporal = get_default_or_apply(self.temporal, len, 1) + shape_level = get_default_or_apply(self.level, len, 1) + shape_spatial = get_default_or_apply(self.spatial, lambda x: x.shape, (1, 1)) + ret = (shape_realization, shape_temporal, shape_level, shape_spatial[0], shape_spatial[1]) + return ret @property def shape_as_dict(self): - return(dict(zip(self._axes,self.shape))) - + """ + :returns: The shape of the field as a dictionary with keys corresponding to axis letter designation defined in + :attr:`~ocgis.interface.base.field.Field._axes` and value as the shape. + :rtype dict: + """ + + return dict(zip(self._axes, self.shape)) + @property def variables(self): - return(self._variables) + return self._variables + @variables.setter - def variables(self,value): - if isinstance(value,Variable): - value = VariableCollection(variables=[value]) - assert_raise(isinstance(value,VariableCollection),exc=ValueError('The "variables" keyword must be a Variable object.')) + def variables(self, value): + if value is None: + value = VariableCollection() + else: + if isinstance(value, Variable): + value = VariableCollection(variables=[value]) + + if not isinstance(value, VariableCollection): + raise ValueError('The value must be a Variable or VariableCollection object.') + self._variables = value for v in value.itervalues(): v._field = self if v._value is not None: - assert(v._value.shape == self.shape) - - def get_between(self,dim,lower,upper): + assert v._value.shape == self.shape + + def get_between(self, dim, lower, upper): pos = self._axis_map[dim] - ref = getattr(self,dim) - ## TODO: minor redundancy in slicing and returning dimension - new_dim,indices = ref.get_between(lower,upper,return_indices=True) + ref = getattr(self, dim) + _, indices = ref.get_between(lower, upper, return_indices=True) slc = get_reduced_slice(indices) - slc_field = [slice(None)]*5 + slc_field = [slice(None)] * 5 slc_field[pos] = slc ret = self[slc_field] - return(ret) + return ret def get_clip(self, polygon, use_spatial_index=True, select_nearest=False): return(self._get_spatial_operation_('get_clip', polygon, use_spatial_index=use_spatial_index, @@ -103,27 +171,23 @@ def get_clip(self, polygon, use_spatial_index=True, select_nearest=False): def get_intersects(self, polygon, use_spatial_index=True, select_nearest=False): return(self._get_spatial_operation_('get_intersects', polygon, use_spatial_index=use_spatial_index, select_nearest=select_nearest)) - - def get_iter(self,add_masked_value=True,value_keys=None): - + + def get_iter(self, add_masked_value=True, value_keys=None): + def _get_dimension_iterator_1d_(target): - attr = getattr(self,target) + attr = getattr(self, target) if attr is None: - ret = [(0,{})] + ret = [(0, {})] else: ret = attr.get_iter() - return(ret) - + return ret + is_masked = np.ma.is_masked - - ## there is not level, these keys will need to be provided a None value - has_level = True if self.level is not None else False - r_level_defaults = dict.fromkeys(constants.level_headers) - - ## value keys occur when the value array is in fact a structured array with - ## field definitions. this occurs with keyed output functions... + + # value keys occur when the value array is in fact a structured array with field definitions. this occurs with + # keyed output functions... has_value_keys = False if value_keys is None else True - + r_gid_name = self.spatial.name_uid r_name = self.name @@ -132,74 +196,47 @@ def _get_dimension_iterator_1d_(target): yld['name'] = r_name ref_value = variable.value masked_value = ref_value.fill_value - iters = map(_get_dimension_iterator_1d_,['realization','temporal','level']) + iters = map(_get_dimension_iterator_1d_, ['realization', 'temporal', 'level']) iters.append(self.spatial.get_geom_iter()) - for [(ridx,rlz),(tidx,t),(lidx,l),(sridx,scidx,geom,gid)] in itertools.product(*iters): + for [(ridx, rlz), (tidx, t), (lidx, l), (sridx, scidx, geom, gid)] in itertools.product(*iters): to_yld = deepcopy(yld) - ref_idx = ref_value[ridx,tidx,lidx,sridx,scidx] - - ## determine if the data is masked + ref_idx = ref_value[ridx, tidx, lidx, sridx, scidx] + + # determine if the data is masked if is_masked(ref_idx): if add_masked_value: ref_idx = masked_value else: continue - - ## realization, time, and level values. + + # realization, time, and level values. to_yld.update(rlz) to_yld.update(t) to_yld.update(l) - - ## add geometries to the output + + # add geometries to the output to_yld['geom'] = geom to_yld[r_gid_name] = gid - - ## if there is no level, defaults are needs to satisfy converters - if not has_level: - to_yld.update(r_level_defaults) - - ## the target value is a structure array, multiple value elements - ## need to be added. these outputs do not a specific value, so - ## it is not added. there may also be multiple elements in the - ## structure which changes how the loop progresses. + + # the target value is a structure array, multiple value elements need to be added. these outputs do not + # a specific value, so it is not added. there may also be multiple elements in the structure which + # changes how the loop progresses. if has_value_keys: for ii in range(ref_idx.shape[0]): for vk in value_keys: try: to_yld[vk] = ref_idx.data[vk][ii] - ## attempt to access the data directly. masked determination - ## is done above. + # attempt to access the data directly. masked determination is done above. except ValueError: to_yld[vk] = ref_idx.data[vk][ii] - yield(to_yld) + yield (to_yld) else: to_yld['value'] = ref_idx - yield(to_yld) + yield to_yld def get_shallow_copy(self): return(copy(self)) - def get_time_region(self,time_region): - ret = copy(self) - ret.temporal,indices = self.temporal.get_time_region(time_region,return_indices=True) - slc = [slice(None),indices,slice(None),slice(None),slice(None)] - variables = self.variables.get_sliced_variables(slc) - ret.variables = variables - return(ret) - - def _get_spatial_operation_(self, attr, polygon, use_spatial_index=True, select_nearest=False): - ref = getattr(self.spatial, attr) - ret = copy(self) - ret.spatial, slc = ref(polygon, return_indices=True, use_spatial_index=use_spatial_index, - select_nearest=select_nearest) - slc = [slice(None), slice(None), slice(None)] + list(slc) - ret.variables = self.variables.get_sliced_variables(slc) - - ## we need to update the value mask with the geometry mask - self._set_new_value_mask_(ret, ret.spatial.get_mask()) - - return(ret) - def get_spatially_aggregated(self,new_spatial_uid=None): def _get_geometry_union_(value): @@ -212,15 +249,15 @@ def _get_geometry_union_(value): else: processed_to_union.append(geom) unioned = cascaded_union(processed_to_union) - + ## convert any unioned points to MultiPoint if isinstance(unioned,Point): unioned = MultiPoint([unioned]) - + ret = np.ma.array([[None]],mask=False,dtype=object) ret[0,0] = unioned return(ret) - + ret = copy(self) ## the spatial dimension needs to be deep copied so the grid may be ## dereferenced. @@ -232,7 +269,7 @@ def _get_geometry_union_(value): unioned = _get_geometry_union_(ret.spatial.geom.point.value) ret.spatial.geom.point._value = unioned ret.spatial.geom.point.uid = new_spatial_uid - + try: if ret.spatial.geom.polygon is not None: unioned = _get_geometry_union_(ret.spatial.geom.polygon.value) @@ -241,7 +278,7 @@ def _get_geometry_union_(value): except ImproperPolygonBoundsError: msg = 'No polygon representation to aggregate.' ocgis_lh(msg=msg,logger='field',level=logging.WARN) - + ## update the spatial uid ret.spatial.uid = new_spatial_uid ## there are no grid objects for aggregated spatial dimensions. @@ -254,7 +291,7 @@ def _get_geometry_union_(value): itrs = [range(dim) for dim in shp[0:3]] weights = self.spatial.weights ref_average = np.ma.average - + ## old values for the variables will be stored in the _raw container, but ## to avoid reference issues, we need to copy the variables new_variables = [] @@ -267,19 +304,147 @@ def _get_geometry_union_(value): new_variable._value = fill new_variables.append(new_variable) ret.variables = VariableCollection(variables=new_variables) - + ## the geometry type of the point dimension is now MultiPoint ret.spatial.geom.point._geom_type = 'MultiPoint' - + ## we want to keep a copy of the raw data around for later calculations. ret._raw = copy(self) - + return(ret) - def _get_value_from_source_(self,*args,**kwds): - raise(NotImplementedError) - ## TODO: remember to apply the geometry mask to fresh values!! + def get_time_region(self,time_region): + ret = copy(self) + ret.temporal,indices = self.temporal.get_time_region(time_region,return_indices=True) + slc = [slice(None),indices,slice(None),slice(None),slice(None)] + variables = self.variables.get_sliced_variables(slc) + ret.variables = variables + return(ret) + + def write_to_netcdf_dataset(self, dataset, file_only=False, **kwargs): + """ + Write the field object to an open netCDF dataset object. + + :param dataset: The open dataset object. + :type dataset: :class:`netCDF4.Dataset` + :param bool file_only: If ``True``, we are not filling the value variables. Only the file schema and dimension + values will be written. + :param kwargs: Extra keyword arguments in addition to ``dimensions`` to pass to ``createVariable``. See + http://unidata.github.io/netcdf4-python/netCDF4.Dataset-class.html#createVariable + :raises: ValueError + """ + + if self.realization is not None: + msg = 'Fields with a realization dimension may not be written to netCDF.' + raise ValueError(msg) + + @contextmanager + def name_scope(target, name, axis): + previous_name = target.name + previous_axis = target.axis + try: + if target.name is None: + target.name = name + if target.axis is None: + target.axis = axis + yield target + finally: + target.name = previous_name + target.axis = previous_axis + + value_dimensions = [] + try: + with name_scope(self.temporal, 'time', 'T'): + self.temporal.write_to_netcdf_dataset(dataset, **kwargs) + value_dimensions.append(self.temporal.name) + except AttributeError: + if self.temporal is not None: + raise + + try: + with name_scope(self.level, 'level', 'Z'): + self.level.write_to_netcdf_dataset(dataset, **kwargs) + if self.level is not None: + value_dimensions.append(self.level.name) + except AttributeError: + if self.level is not None: + raise + + try: + with name_scope(self.spatial.grid.row, 'yc', 'Y'): + with name_scope(self.spatial.grid.col, 'xc', 'X'): + self.spatial.grid.write_to_netcdf_dataset(dataset, **kwargs) + value_dimensions.append(self.spatial.grid.row.name) + value_dimensions.append(self.spatial.grid.col.name) + except AttributeError: + # write the grid.value directly + if self.spatial.grid.row is None or self.spatial.grid.col is None: + self.spatial.grid.write_to_netcdf_dataset(dataset, **kwargs) + value_dimensions.append('yc') + value_dimensions.append('xc') + else: + raise + + try: + variable_crs = self.spatial.crs.write_to_rootgrp(dataset) + except AttributeError: + if self.spatial.crs is not None: + raise + + kwargs['dimensions'] = value_dimensions + for variable in self.variables.itervalues(): + kwargs['fill_value'] = variable.fill_value + nc_variable = dataset.createVariable(variable.alias, variable.dtype, **kwargs) + if not file_only: + nc_variable[:] = variable.value + variable.write_attributes_to_netcdf_object(nc_variable) + try: + nc_variable.grid_mapping = variable_crs._name + except UnboundLocalError: + if self.spatial.crs is not None: + raise + + try: + nc_variable.units = variable.units + except TypeError: + # likely none for the units + if variable.units is None: + nc_variable.units = '' + else: + raise + + self.write_attributes_to_netcdf_object(dataset) + + def _get_spatial_operation_(self, attr, polygon, use_spatial_index=True, select_nearest=False): + ref = getattr(self.spatial, attr) + ret = copy(self) + ret.spatial, slc = ref(polygon, return_indices=True, use_spatial_index=use_spatial_index, + select_nearest=select_nearest) + slc = [slice(None), slice(None), slice(None)] + list(slc) + ret.variables = self.variables.get_sliced_variables(slc) + + ## we need to update the value mask with the geometry mask + self._set_new_value_mask_(ret, ret.spatial.get_mask()) + + return(ret) + + def _get_value_from_source_(self, *args, **kwargs): + raise NotImplementedError + + def _get_variable_iter_yield_(self, variable): + """ + Retrieve variable-level information. Overloaded by derived fields. + + :param variable: The variable containing attributes to extract. + :type variable: :class:`~ocgis.Variable` + :returns: A dictionary containing variable field values mapped to keys. + :rtype: dict + """ + + yld = {'did': self.uid, 'variable': variable.name, 'alias': variable.alias, 'vid': variable.uid} + return yld + @staticmethod def _set_new_value_mask_(field,mask): ret_shp = field.shape @@ -287,37 +452,26 @@ def _set_new_value_mask_(field,mask): rng_temporal = range(ret_shp[1]) rng_level = range(ret_shp[2]) ref_logical_or = np.logical_or - + for var in field.variables.itervalues(): if var._value is not None: v = var._value for idx_r,idx_t,idx_l in itertools.product(rng_realization,rng_temporal,rng_level): ref = v[idx_r,idx_t,idx_l] ref.mask = ref_logical_or(ref.mask,mask) - - def _get_variable_iter_yield_(self,variable): - yld = {} - yld['did'] = self.uid - yld['variable'] = variable.name - yld['alias'] = variable.alias - yld['vid'] = variable.uid - return(yld) class DerivedField(Field): - def _get_variable_iter_yield_(self,variable): - yld = {} - yld['cid'] = variable.uid - yld['calc_key'] = variable.name - yld['calc_alias'] = variable.alias - + def _get_variable_iter_yield_(self, variable): + yld = {'cid': variable.uid, 'calc_key': variable.name, 'calc_alias': variable.alias} + raw_variable = variable.parents.values()[0] yld['did'] = self.uid yld['variable'] = raw_variable.name yld['alias'] = raw_variable.alias yld['vid'] = raw_variable.uid - return(yld) + return yld class DerivedMultivariateField(Field): diff --git a/src/ocgis/interface/base/variable.py b/src/ocgis/interface/base/variable.py index 8dd1d0747..0f7fcaf06 100644 --- a/src/ocgis/interface/base/variable.py +++ b/src/ocgis/interface/base/variable.py @@ -1,4 +1,5 @@ from ocgis.api.collection import AbstractCollection +from ocgis.interface.base.attributes import Attributes from ocgis.util.logging_ocgis import ocgis_lh import abc from collections import OrderedDict @@ -22,13 +23,15 @@ class AbstractValueVariable(object): :type units: str or :class:`cfunits.Units` """ __metaclass__ = abc.ABCMeta + _value = None + _conform_units_to = None def __init__(self, value=None, units=None, dtype=None, fill_value=None, name=None, conform_units_to=None): - ## if the units value is not None, then convert to string. cfunits.Units - ## may be easily handled this way without checking for the module presence. + # if the units value is not None, then convert to string. cfunits.Units may be easily handled this way without + # checking for the module presence. self.units = str(units) if units is not None else None self.conform_units_to = conform_units_to - self._value = value + self.value = value self._dtype = dtype self._fill_value = fill_value self.name = name @@ -58,7 +61,7 @@ def conform_units_to(self, value): def dtype(self): if self._dtype is None: if self._value is None: - raise(ValueError('dtype not specified at object initialization and value has not been loaded.')) + raise ValueError('dtype not specified at object initialization and value has not been loaded.') else: ret = self.value.dtype else: @@ -83,19 +86,12 @@ def shape(self): @property def value(self): if self._value is None: - self._value = self._get_value_() + self._value = self._format_private_value_(self._get_value_()) return self._value - def _get_value_(self): - raise NotImplementedError - - @property - def _value(self): - return self.__value - - @_value.setter - def _value(self, value): - self.__value = self._format_private_value_(value) + @value.setter + def value(self, value): + self._value = self._format_private_value_(value) def _format_private_value_(self, value): if value is not None: @@ -105,6 +101,10 @@ def _format_private_value_(self, value): value = self.cfunits_conform(to_units=self.conform_units_to, value=value, from_units=self.cfunits) return value + @abc.abstractmethod + def _get_value_(self): + """Return the value field.""" + def cfunits_conform(self,to_units,value=None,from_units=None): ''' Conform units of value variable in-place using :mod:`cfunits`. @@ -138,50 +138,36 @@ def cfunits_conform(self,to_units,value=None,from_units=None): return convert_value -class AbstractSourcedVariable(AbstractValueVariable): +class AbstractSourcedVariable(object): __metaclass__ = abc.ABCMeta - def __init__(self,data,src_idx=None,value=None,debug=False,did=None,units=None, - dtype=None,fill_value=None,name=None,conform_units_to=None): - if not debug and value is None and data is None: - ocgis_lh(exc=ValueError('Sourced variables require a data source if no value is passed.')) + def __init__(self, data, src_idx): self._data = data self._src_idx = src_idx - self._debug = debug - self.did = did - - AbstractValueVariable.__init__(self,value=value,units=units,dtype=dtype,fill_value=fill_value, - name=name,conform_units_to=conform_units_to) - + @property def _src_idx(self): - return(self.__src_idx) + return self.__src_idx + @_src_idx.setter - def _src_idx(self,value): + def _src_idx(self, value): self.__src_idx = self._format_src_idx_(value) - def _format_src_idx_(self,value): - if value is None: - ret = value - else: - ret = value - return(ret) - + def _format_src_idx_(self, value): + return np.array(value) + def _get_value_(self): - if self._data is None and self._value is None: - ocgis_lh(exc=ValueError('Values were requested from data source, but no data source is available.')) - elif self._src_idx is None and self._value is None: - ocgis_lh(exc=ValueError('Values were requested from data source, but no source index source is available.')) - else: + if self._value is None: self._set_value_from_source_() - return(self._value) + return self._value @abc.abstractmethod - def _set_value_from_source_(self): pass + def _set_value_from_source_(self): + """Should set ``_value`` using the data source and index.""" -class Variable(AbstractSourcedVariable): - ''' +class Variable(AbstractSourcedVariable, AbstractValueVariable, Attributes): + """ :param name: Representative name for the variable. :type name: str :param alias: Optional unique name for the variable. @@ -205,18 +191,20 @@ class Variable(AbstractSourcedVariable): :type fill_value: int or float :param conform_units_to: Target units for conversion. :type conform_units_to: str convertible to :class:`cfunits.Units` - ''' - - def __init__(self,name=None,alias=None,units=None,meta=None,uid=None, - value=None,did=None,data=None,debug=False,conform_units_to=None, - dtype=None,fill_value=None): + :param dict attrs: A dictionary of arbitrary key-value attributes. + """ + + def __init__(self, name=None, alias=None, units=None, meta=None, uid=None, value=None, did=None, data=None, + conform_units_to=None, dtype=None, fill_value=None, attrs=None): self.alias = alias or name self.meta = meta or {} self.uid = uid + self.did = did - super(Variable,self).__init__(value=value,data=data,debug=debug,did=did, - units=units,dtype=dtype,fill_value=fill_value, - name=name,conform_units_to=conform_units_to) + AbstractSourcedVariable.__init__(self, data, None) + Attributes.__init__(self, attrs=attrs) + AbstractValueVariable.__init__(self, value=value, units=units, dtype=dtype, fill_value=fill_value, name=name, + conform_units_to=conform_units_to) def __getitem__(self,slc): ret = copy(self) @@ -224,9 +212,11 @@ def __getitem__(self,slc): ret._value = self._value[slc] return(ret) - def __repr__(self): - ret = '{0}(alias="{1}",name="{2}",units="{3}")'.format(self.__class__.__name__,self.alias,self.name,self.units) - return(ret) + def __str__(self): + units = '{0}' if self.units is None else '"{0}"' + units = units.format(self.units) + ret = '{0}(name="{1}", alias="{2}", units={3})'.format(self.__class__.__name__, self.alias, self.name, units) + return ret def get_empty_like(self, shape=None): """ @@ -248,7 +238,7 @@ def get_empty_like(self, shape=None): shape = shape or self.value.shape value = np.ma.array(np.zeros(shape), dtype=self.dtype, fill_value=self.fill_value, mask=mask) ret = Variable(name=self.name, units=self.units, meta=deepcopy(self.meta), value=value, did=self.did, - alias=self.alias, uid=self.uid) + alias=self.alias, uid=self.uid, attrs=deepcopy(self.attrs)) return ret def _format_private_value_(self,value): diff --git a/src/ocgis/interface/nc/dimension.py b/src/ocgis/interface/nc/dimension.py index 6c1ef4df7..1a3418fc5 100644 --- a/src/ocgis/interface/nc/dimension.py +++ b/src/ocgis/interface/nc/dimension.py @@ -17,7 +17,7 @@ def _set_value_from_source_(self): ## for the realization/projection axis, there may in fact be no ## value associated with it. in it's place, put a standard integer ## array. - if self._axis == 'R': + if self.axis == 'R': var = self._src_idx + 1 else: ocgis_lh(logger='interface.nc',exc=e) @@ -27,7 +27,7 @@ def _set_value_from_source_(self): self._value = var.__getitem__(self._src_idx) ## now, we should check for bounds here as the inheritance for making ## this process more transparent is not in place. - bounds_name = self._data.source_metadata['dim_map'][self._axis].get('bounds') + bounds_name = self._data.source_metadata['dim_map'][self.axis].get('bounds') if bounds_name is not None: try: self.bounds = ds.variables[bounds_name][self._src_idx,:] diff --git a/src/ocgis/interface/nc/field.py b/src/ocgis/interface/nc/field.py index 5b3756f3b..57539ef9a 100644 --- a/src/ocgis/interface/nc/field.py +++ b/src/ocgis/interface/nc/field.py @@ -16,8 +16,8 @@ def _get_value_from_source_(self,data,variable_name): ## if grid and row are not present on the GridDimesion object. the source ## indices are attached to the grid object itself. except AttributeError: - axis_slc['Y'] = self.spatial.grid._row_src_idx - axis_slc['X'] = self.spatial.grid._col_src_idx + axis_slc['Y'] = self.spatial.grid._src_idx['row'] + axis_slc['X'] = self.spatial.grid._src_idx['col'] if self.realization is not None: axis_slc['R'] = self.realization._src_idx if self.level is not None: diff --git a/src/ocgis/interface/nc/spatial.py b/src/ocgis/interface/nc/spatial.py new file mode 100644 index 000000000..c60931420 --- /dev/null +++ b/src/ocgis/interface/nc/spatial.py @@ -0,0 +1,112 @@ +from itertools import izip +from ocgis import constants +from ocgis.util.helpers import get_formatted_slice +from ocgis.interface.base.dimension.spatial import SpatialGridDimension +from ocgis.interface.base.variable import AbstractSourcedVariable +import numpy as np + + +class NcSpatialGridDimension(AbstractSourcedVariable, SpatialGridDimension): + + def __init__(self, *args, **kwargs): + + AbstractSourcedVariable.__init__(self, kwargs.pop('data', None), kwargs.pop('src_idx', None)) + SpatialGridDimension.__init__(self, *args, **kwargs) + + def __getitem__(self, item): + ret = SpatialGridDimension.__getitem__(self, item) + if ret._src_idx is not None: + slice_row, slice_col = get_formatted_slice(item, 2) + src_idx = {} + for key, slc in izip(['row', 'col'], [slice_row, slice_col]): + src_idx[key] = np.atleast_1d(ret._src_idx[key][slc]) + ret._src_idx = src_idx + return ret + + @staticmethod + def _format_src_idx_(value): + if value is not None: + assert isinstance(value, dict) + assert value['row'] is not None + assert value['col'] is not None + return value + + def _get_uid_(self): + if self._src_idx is not None: + shp = (self._src_idx['row'].shape[0], self._src_idx['col'].shape[0]) + else: + shp = None + return SpatialGridDimension._get_uid_(self, shp=shp) + + def _set_value_from_source_(self): + try: + self.value = SpatialGridDimension._get_value_(self) + except AttributeError: + if self.row is None or self.col is None: + ds = self._data.driver.open() + try: + slices = {k: get_formatted_slice(self._src_idx[k], 1) for k in self._src_idx.keys()} + slice_row = slices['row'] + slice_col = slices['col'] + variable_row = ds.variables[self.name_row] + variable_col = ds.variables[self.name_col] + + # load values ###################################################################################### + + value_row = np.atleast_2d(variable_row[slice_row, slice_col]) + value_col = np.atleast_2d(variable_col[slice_row, slice_col]) + fill = np.zeros([2]+list(value_row.shape), dtype=value_row.dtype) + try: + fill_value = value_row.fill_value + except AttributeError: + fill_value = None + fill = np.ma.array(fill, fill_value=fill_value, mask=False) + fill[0, :, :] = value_row + fill[1, :, :] = value_col + self.value = fill + + # load corners ##################################################################################### + + try: + name_row_corners = variable_row.corners + except AttributeError: + # likely no corners + pass + else: + name_col_corners = variable_col.corners + value_row_corners = ds.variables[name_row_corners][slice_row, slice_col, :] + value_col_corners = ds.variables[name_col_corners][slice_row, slice_col, :] + + # a reshape may be required if this is a singleton slice operation + + def _reshape_corners_(arr): + if arr.ndim < 3: + assert arr.shape == (1, 4) + arr = arr.reshape(1, 1, 4) + return arr + + value_row_corners = _reshape_corners_(value_row_corners) + value_col_corners = _reshape_corners_(value_col_corners) + + fill = np.zeros([2]+list(value_row_corners.shape), dtype=value_row_corners.dtype) + try: + fill_value = value_row_corners.fill_value + except AttributeError: + fill_value = None + fill = np.ma.array(fill, fill_value=fill_value, mask=False) + fill[0, :, :, :] = value_row_corners + fill[1, :, :, :] = value_col_corners + self.corners = fill + + finally: + self._data.driver.close(ds) + else: + raise + + def _validate_(self): + try: + SpatialGridDimension._validate_(self) + except ValueError: + if self._data is None: + msg = 'With no value representations (i.e. row, column, value), a data source is required.' + raise ValueError(msg) \ No newline at end of file diff --git a/src/ocgis/interface/nc/temporal.py b/src/ocgis/interface/nc/temporal.py index 919ce4f32..dd19ec641 100644 --- a/src/ocgis/interface/nc/temporal.py +++ b/src/ocgis/interface/nc/temporal.py @@ -1,289 +1,8 @@ from ocgis.interface.base.dimension.temporal import TemporalDimension from ocgis.interface.nc.dimension import NcVectorDimension -import numpy as np -import netCDF4 as nc -import datetime -from ocgis.util.helpers import iter_array, get_none_or_slice -from ocgis import constants -class NcTemporalDimension(NcVectorDimension,TemporalDimension): - _attrs_slice = ('uid','_value','_src_idx','_value_datetime') - - def __init__(self,*args,**kwds): - self.calendar = kwds.pop('calendar') - self.format_time = kwds.pop('format_time',True) - self._value_datetime = kwds.pop('value_datetime',None) - self._bounds_datetime = kwds.pop('bounds_datetime',None) +class NcTemporalDimension(TemporalDimension, NcVectorDimension): - NcVectorDimension.__init__(self,*args,**kwds) - - assert(self.units != None) - assert(self.calendar != None) - - ## test if the units are the special case with months in the time units - if self.units.startswith('months'): - self._has_months_units = True - else: - self._has_months_units = False - - @property - def bounds_datetime(self): - if self.bounds is not None: - if self._bounds_datetime is None: - self._bounds_datetime = np.atleast_2d(self.get_datetime(self.bounds)) - return(self._bounds_datetime) - @bounds_datetime.setter - def bounds_datetime(self,value): - if value is None: - new = None - else: - new = np.atleast_2d(value).reshape(-1,2) - self._bounds_datetime = new - - @property - def extent_datetime(self): - return(tuple(self.get_datetime(self.extent))) - - @property - def value_datetime(self): - if self._value_datetime is None: - self._value_datetime = np.atleast_1d(self.get_datetime(self.value)) - return(self._value_datetime) - - def get_between(self,lower,upper,return_indices=False): - lower,upper = tuple(self.get_nc_time([lower,upper])) - return(NcVectorDimension.get_between(self,lower,upper,return_indices=return_indices)) - - def get_datetime(self,arr): - ## if there are month units, call the special procedure to convert those - ## to datetime objects - if self._has_months_units == False: - arr = np.atleast_1d(nc.num2date(arr,self.units,calendar=self.calendar)) - dt = datetime.datetime - for idx,t in iter_array(arr,return_value=True): - ## attempt to convert times to datetime objects - try: - arr[idx] = dt(t.year,t.month,t.day, - t.hour,t.minute,t.second) - ## this may fail for some calendars, in that case maintain the instance - ## object returned from netcdftime see: - ## http://netcdf4-python.googlecode.com/svn/trunk/docs/netcdftime.netcdftime.datetime-class.html - except ValueError: - arr[idx] = arr[idx] - else: - arr = get_datetime_from_months_time_units(arr,self.units,month_centroid=constants.calc_month_centroid) - return(arr) - - def get_nc_time(self,values): - try: - ret = np.atleast_1d(nc.date2num(values,self.units,calendar=self.calendar)) - except ValueError: - ## special behavior for conversion of time units with months - if self._has_months_units: - ret = get_num_from_months_time_units(values, self.units, dtype=None) - else: - raise - return(ret) - - def _format_slice_state_(self,state,slc): - state = NcVectorDimension._format_slice_state_(self,state,slc) - state.bounds_datetime = get_none_or_slice(state._bounds_datetime,(slc,slice(None))) - return(state) - - def _get_datetime_bounds_(self): - if self.format_time: - ret = self.bounds_datetime - else: - ret = self.bounds - return(ret) - - def _get_datetime_value_(self): - if self.format_time: - ret = self.value_datetime - else: - ret = self.value - return(ret) - - def _get_temporal_group_dimension_(self,*args,**kwds): - kwds['calendar'] = self.calendar - kwds['units'] = self.units - value = kwds.pop('value') - bounds = kwds.pop('bounds') - kwds['value'] = self.get_nc_time(value) - - try: - kwds['bounds'] = self.get_nc_time(bounds) - ## this may happen if the data has months in the time units. the functions that compute the datetime-numeric - ## conversions did not anticipate bounds. - except AttributeError: - if self._has_months_units: - bounds_fill = np.empty(bounds.shape) - bounds_fill[:,0] = self.get_nc_time(bounds[:,0]) - bounds_fill[:,1] = self.get_nc_time(bounds[:,1]) - kwds['bounds'] = bounds_fill - else: - raise - - kwds['value_datetime'] = value - kwds['bounds_datetime'] = bounds - return(NcTemporalGroupDimension(*args,**kwds)) - - def _set_date_parts_(self,yld,value): - if self.format_time: - TemporalDimension._set_date_parts_(self,yld,value) - else: - yld['year'],yld['month'],yld['day'] = None,None,None - - -class NcTemporalGroupDimension(NcTemporalDimension): - - def __init__(self,*args,**kwds): - self.grouping = kwds.pop('grouping') - self.dgroups = kwds.pop('dgroups') - self.date_parts = kwds.pop('date_parts') - - NcTemporalDimension.__init__(self,*args,**kwds) - - -def get_origin_datetime_from_months_units(units): - ''' - Get the origin Python :class:``datetime.datetime`` object from a month - string. - - :param str units: Source units to parse. - :returns: :class:``datetime.datetime`` - - >>> units = "months since 1978-12" - >>> get_origin_datetime_from_months_units(units) - datetime.datetime(1978, 12, 1, 0, 0) - ''' - origin = ' '.join(units.split(' ')[2:]) - to_try = ['%Y-%m','%Y-%m-%d %H'] - converted = False - for tt in to_try: - try: - origin = datetime.datetime.strptime(origin,tt) - converted = True - break - except ValueError as e: - continue - if converted == False: - raise(e) - return(origin) - -def get_datetime_from_months_time_units(vec,units,month_centroid=16): - ''' - Convert a vector of months offsets into :class:``datetime.datetime`` objects. - - :param vec: Vector of integer month offsets. - :type vec: :class:``np.ndarray`` - :param str units: Source units to parse. - :param month_centroid: The center day of the month to use when creating the - :class:``datetime.datetime`` objects. - - >>> units = "months since 1978-12" - >>> vec = np.array([0,1,2,3]) - >>> get_datetime_from_months_time_units(vec,units) - array([1978-12-16 00:00:00, 1979-01-16 00:00:00, 1979-02-16 00:00:00, - 1979-03-16 00:00:00], dtype=object) - ''' - ## only work with integer inputs - vec = np.array(vec,dtype=int) - - def _get_datetime_(current_year,origin_month,offset_month,current_month_correction,month_centroid): - return(datetime.datetime(current_year,(origin_month+offset_month)-current_month_correction,month_centroid)) - - origin = get_origin_datetime_from_months_units(units) - origin_month = origin.month - current_year = origin.year - current_month_correction = 0 - ret = np.ones(len(vec),dtype=object) - for ii,offset_month in enumerate(vec): - try: - fill = _get_datetime_(current_year,origin_month,offset_month,current_month_correction,month_centroid) - except ValueError: - current_month_correction += 12 - current_year += 1 - fill = _get_datetime_(current_year,origin_month,offset_month,current_month_correction,month_centroid) - ret[ii] = fill - return(ret) - -def get_difference_in_months(origin,target): - ''' - Get the integer difference in months between an origin and target datetime. - - :param :class:``datetime.datetime`` origin: The origin datetime object. - :param :class:``datetime.datetime`` target: The target datetime object. - - >>> get_difference_in_months(datetime.datetime(1978,12,1),datetime.datetime(1979,3,1)) - 3 - >>> get_difference_in_months(datetime.datetime(1978,12,1),datetime.datetime(1978,7,1)) - -5 - ''' - - def _count_(start_month,stop_month,start_year,stop_year,direction): - count = 0 - curr_month = start_month - curr_year = start_year - while True: - if curr_month == stop_month and curr_year == stop_year: - break - else: - pass - - if direction == 'forward': - curr_month += 1 - elif direction == 'backward': - curr_month -= 1 - else: - raise(NotImplementedError(direction)) - - if curr_month == 13: - curr_month = 1 - curr_year += 1 - if curr_month == 0: - curr_month = 12 - curr_year -= 1 - - if direction == 'forward': - count += 1 - else: - count -= 1 - - return(count) - - origin_month,origin_year = origin.month,origin.year - target_month,target_year = target.month,target.year - - if origin <= target: - direction = 'forward' - else: - direction = 'backward' - - diff_months = _count_(origin_month,target_month,origin_year,target_year,direction) - return(diff_months) - -def get_num_from_months_time_units(vec,units,dtype=None): - ''' - Convert a vector of :class:``datetime.datetime`` objects into an integer - vector. - - :param vec: Input vector to convert. - :type vec: :class:``np.ndarray`` - :param str units: Source units to parse. - :param type dtype: Output vector array type. - - >>> units = "months since 1978-12" - >>> vec = np.array([datetime.datetime(1978,12,1),datetime.datetime(1979,1,1)]) - >>> get_num_from_months_time_units(vec,units) - array([0, 1]) - ''' - origin = get_origin_datetime_from_months_units(units) - ret = [get_difference_in_months(origin,target) for target in vec] - return(np.array(ret,dtype=dtype)) - - -if __name__ == "__main__": - import doctest - doctest.testmod() + def __init__(self, *args, **kwargs): + TemporalDimension.__init__(self, *args, **kwargs) diff --git a/src/ocgis/regrid/base.py b/src/ocgis/regrid/base.py index a5affecd8..287b1b045 100644 --- a/src/ocgis/regrid/base.py +++ b/src/ocgis/regrid/base.py @@ -1,19 +1,23 @@ from copy import deepcopy import ESMF import numpy as np +from ocgis import TemporalDimension, Field from ocgis.exc import RegriddingError, CornersInconsistentError from ocgis.interface.base.crs import Spherical +from ocgis.interface.base.dimension.base import VectorDimension from ocgis.interface.base.dimension.spatial import SpatialGridDimension, SpatialDimension -from ocgis.interface.base.variable import VariableCollection +from ocgis.interface.base.variable import VariableCollection, Variable from ocgis.util.helpers import iter_array, make_poly -def get_sdim_from_esmf_grid(egrid): +def get_sdim_from_esmf_grid(egrid, crs=None): """ Create an OCGIS :class:`~ocgis.interface.base.dimension.spatial.SpatialDimension` object from an ESMF :class:`~ESMF.api.grid.Grid`. :type egrid: :class:`ESMF.api.grid.Grid` + :param crs: The coordinate system to attach to the output spatial dimension. + :type crs: :class:`ocgis.interface.base.crs.CoordinateReferenceSystem` :rtype: :class:`~ocgis.interface.base.dimension.spatial.SpatialDimension` """ @@ -67,11 +71,61 @@ def get_sdim_from_esmf_grid(egrid): # make the spatial dimension object ogrid = SpatialGridDimension(value=grid_value, corners=grid_corners) - sdim = SpatialDimension(grid=ogrid) + sdim = SpatialDimension(grid=ogrid, crs=crs) return sdim +def get_ocgis_field_from_esmpy_field(efield, crs=None, dimensions=None): + #todo: doc dimensions + #todo: doc behavior for singleton dimension + """ + :param efield: The ESMPy field object to convert to an OCGIS field. + :type efield: :class:`ESMF.api.field.Field` + :param crs: The coordinate system of the ESMPy field. If ``None``, this will default to + :class:`ocgis.crs.Spherical`. + :returns: An OCGIS field object. + :rtype: :class:`~ocgis.Field` + """ + + assert len(efield.shape) == 5 + + dimensions = dimensions or {} + + try: + realization = dimensions['realization'] + except KeyError: + if efield.shape[0] > 1: + realization_values = np.arange(1, efield.shape[0]+1) + realization = VectorDimension(value=realization_values) + else: + realization = None + + try: + temporal = dimensions['temporal'] + except KeyError: + if efield.shape[1] > 1: + temporal_values = np.array([1]*efield.shape[1]) + temporal = TemporalDimension(value=temporal_values, format_time=False) + else: + temporal = None + + try: + level = dimensions['level'] + except KeyError: + if efield.shape[2] > 1: + level_values = np.arange(1, efield.shape[2]+1) + level = VectorDimension(value=level_values) + else: + level = None + + variable = Variable(name=efield.name, value=efield) + sdim = get_sdim_from_esmf_grid(efield.grid, crs=crs) + field = Field(variables=variable, realization=realization, temporal=temporal, level=level, spatial=sdim) + + return field + + def get_esmf_grid_from_sdim(sdim, with_corners=True, value_mask=None): """ Create an ESMF :class:`~ESMF.api.grid.Grid` object from an OCGIS @@ -103,7 +157,7 @@ def get_esmf_grid_from_sdim(sdim, with_corners=True, value_mask=None): else: value_mask = ogrid.value.mask[0] # follows SCRIP convention where 1 is unmasked and 0 is masked - esmf_mask = np.invert(value_mask).astype(np.int8) + esmf_mask = np.invert(value_mask).astype(np.int32) egrid.add_item(ESMF.GridItem.MASK, staggerloc=ESMF.StaggerLoc.CENTER, from_file=False) egrid.mask[0][:] = esmf_mask @@ -148,6 +202,7 @@ def iter_esmf_fields(ofield, with_corners=True, value_mask=None): :raises: AssertionError """ + #todo: provide other options for calculating value_mask # only one level and realization allowed assert ofield.shape[0] == 1 diff --git a/src/ocgis/test/base.py b/src/ocgis/test/base.py index e7a471320..bb4d9a33a 100644 --- a/src/ocgis/test/base.py +++ b/src/ocgis/test/base.py @@ -1,20 +1,27 @@ +from contextlib import contextmanager import unittest import abc import tempfile import datetime +import subprocess +import itertools +from ocgis.api.collection import SpatialCollection +from ocgis.interface.base.field import Field +from ocgis.interface.base.dimension.spatial import SpatialGridDimension, SpatialDimension from ocgis import env import shutil from copy import deepcopy, copy import os from collections import OrderedDict -import subprocess import ocgis -from warnings import warn -from subprocess import CalledProcessError import numpy as np from ocgis.api.request.base import RequestDataset import netCDF4 as nc +from ocgis.interface.base.dimension.base import VectorDimension +from ocgis.interface.base.dimension.temporal import TemporalDimension +from ocgis.interface.base.variable import Variable from ocgis.util.helpers import get_iter +from ocgis.util.itester import itr_products_keywords class ToTest(Exception): @@ -51,6 +58,9 @@ def path_bin(self): ret = os.path.join(base_dir, 'bin') return ret + def assertAsSetEqual(self, sequence1, sequence2, msg=None): + self.assertSetEqual(set(sequence1), set(sequence2), msg=msg) + def assertDictEqual(self, d1, d2, msg=None): """ Asserts two dictionaries are equal. If they are not, identify the first key/value which are not equal. @@ -73,7 +83,7 @@ def assertDictEqual(self, d1, d2, msg=None): self.assertEqual(v, d2[k], msg=msg) self.assertEqual(set(d1.keys()), set(d2.keys())) - def assertNumpyAll(self, arr1, arr2, check_fill_value_dtype=True, check_arr_dtype=True): + def assertNumpyAll(self, arr1, arr2, check_fill_value_dtype=True, check_arr_dtype=True, check_arr_type=True): """ Asserts arrays are equal according to the test criteria. @@ -83,10 +93,15 @@ def assertNumpyAll(self, arr1, arr2, check_fill_value_dtype=True, check_arr_dtyp :type arr2: :class:`numpy.ndarray` :param bool check_fill_value_dtype: If ``True``, check that the data type for masked array fill values are equal. :param bool check_arr_dtype: If ``True``, check the data types of the arrays are equal. + :param bool check_arr_type: If ``True``, check the types of the incoming arrays: + + >>> type(arr1) == type(arr2) + :raises: AssertionError """ - self.assertEqual(type(arr1), type(arr2)) + if check_arr_type: + self.assertEqual(type(arr1), type(arr2)) self.assertEqual(arr1.shape, arr2.shape) if check_arr_dtype: self.assertEqual(arr1.dtype, arr2.dtype) @@ -101,7 +116,7 @@ def assertNumpyAll(self, arr1, arr2, check_fill_value_dtype=True, check_arr_dtyp self.assertTrue(np.all(arr1 == arr2)) def assertNcEqual(self, uri_src, uri_dest, check_types=True, close=False, metadata_only=False, - ignore_attributes=None): + ignore_attributes=None, ignore_variables=None): """ Assert two netCDF files are equal according to the test criteria. @@ -116,9 +131,11 @@ def assertNcEqual(self, uri_src, uri_dest, check_types=True, close=False, metada >>> ignore_attributes = {'global': ['history']} - :raises: AssertionError + :param list ignore_variables: A list of variable names to ignore. """ + ignore_variables = ignore_variables or [] + src = nc.Dataset(uri_src) dest = nc.Dataset(uri_dest) @@ -130,21 +147,30 @@ def assertNcEqual(self, uri_src, uri_dest, check_types=True, close=False, metada self.assertEqual(set(src.dimensions.keys()), set(dest.dimensions.keys())) for varname, var in src.variables.iteritems(): + + if varname in ignore_variables: + continue + dvar = dest.variables[varname] + + var_value = var[:] + dvar_value = dvar[:] + try: if not metadata_only: if close: - self.assertNumpyAllClose(var[:], dvar[:]) + self.assertNumpyAllClose(var_value, dvar_value) else: - self.assertNumpyAll(var[:], dvar[:], check_arr_dtype=check_types) + self.assertNumpyAll(var_value, dvar_value, check_arr_dtype=check_types) except AssertionError: - cmp = var[:] == dvar[:] + cmp = var_value == dvar_value if cmp.shape == (1,) and cmp.data[0] == True: pass else: raise + if check_types: - self.assertEqual(var[:].dtype, dvar[:].dtype) + self.assertEqual(var_value.dtype, dvar_value.dtype) # check values of attributes on all variables for k, v in var.__dict__.iteritems(): @@ -164,8 +190,36 @@ def assertNcEqual(self, uri_src, uri_dest, check_types=True, close=False, metada self.assertNumpyAll(v, to_test_attr) except AttributeError: self.assertEqual(v, to_test_attr) + + # check values of attributes on all variables + for k, v in dvar.__dict__.iteritems(): + try: + to_test_attr = getattr(var, k) + except AttributeError: + # if the variable and attribute are flagged to ignore, continue to the next attribute + if var._name in ignore_attributes: + if k in ignore_attributes[var._name]: + continue + + # notify if an attribute is missing + msg = 'The attribute "{0}" is not found on the variable "{1}" for URI "{2}".'\ + .format(k, var._name, uri_src) + raise AttributeError(msg) + try: + self.assertNumpyAll(v, to_test_attr) + except AttributeError: + self.assertEqual(v, to_test_attr) + self.assertEqual(var.dimensions, dvar.dimensions) - self.assertEqual(set(src.variables.keys()), set(dest.variables.keys())) + + sets = [set(xx.variables.keys()) for xx in [src, dest]] + for ignore_variable, s in itertools.product(ignore_variables, sets): + try: + s.remove(ignore_variable) + except KeyError: + # likely missing in one or the other + continue + self.assertEqual(*sets) if 'global' not in ignore_attributes: self.assertDictEqual(src.__dict__, dest.__dict__) @@ -237,6 +291,80 @@ def assertNumpyNotAllClose(self, arr1, arr2): else: raise AssertionError('Arrays are equivalent within precision.') + def get_esmf_field(self, **kwargs): + """ + :keyword field: (``=None``) The field object. If ``None``, call :meth:`~ocgis.test.base.TestBase.get_field` + :type field: :class:`~ocgis.Field` + :param kwargs: Other keyword arguments to :meth:`ocgis.test.base.TestBase.get_field`. + :returns: An ESMF field object. + :rtype: :class:`ESMF.Field` + """ + + from ocgis.conv.esmpy import ESMPyConverter + + field = kwargs.pop('field', None) or self.get_field(**kwargs) + coll = SpatialCollection() + coll.add_field(1, None, field) + conv = ESMPyConverter([coll]) + efield = conv.write() + return efield + + def get_field(self, nlevel=None, nrlz=None, crs=None): + """ + :param int nlevel: The number of level elements. + :param int nrlz: The number of realization elements. + :param crs: The coordinate system for the field. + :type crs: :class:`ocgis.interface.base.crs.CoordinateReferenceSystem` + :returns: A small field object for testing. + :rtype: `~ocgis.Field` + """ + + np.random.seed(1) + row = VectorDimension(value=[4., 5.], name='row') + col = VectorDimension(value=[40., 50.], name='col') + grid = SpatialGridDimension(row=row, col=col) + sdim = SpatialDimension(grid=grid, crs=crs) + temporal = TemporalDimension(value=[datetime.datetime(2000, 1, 1), datetime.datetime(2000, 2, 1)]) + + if nlevel is None: + nlevel = 1 + level = None + else: + level = VectorDimension(value=range(1, nlevel+1), name='level') + + if nrlz is None: + nrlz = 1 + realization = None + else: + realization = VectorDimension(value=range(1, nrlz+1), name='realization') + + variable = Variable(name='foo', value=np.random.rand(nrlz, 2, nlevel, 2, 2)) + field = Field(spatial=sdim, temporal=temporal, variables=variable, level=level, realization=realization) + + return field + + def get_netcdf_path_no_row_column(self): + """ + Create a NetCDF with no row and column dimensions. + + :returns: Path to the created NetCDF in the current test directory. + :rtype: str + """ + + field = self.get_field() + field.spatial.grid.row.set_extrapolated_bounds() + field.spatial.grid.col.set_extrapolated_bounds() + field.spatial.grid.value + field.spatial.grid.corners + self.assertIsNotNone(field.spatial.grid.corners) + field.spatial.grid.row = field.spatial.grid.col = None + self.assertIsNone(field.spatial.grid.row) + self.assertIsNone(field.spatial.grid.col) + path = os.path.join(self.current_dir_output, 'foo.nc') + with self.nc_scope(path, 'w') as ds: + field.write_to_netcdf_dataset(ds) + return path + def get_temporary_output_directory(self): """ :returns: A path to a temporary directory with an appropriate prefix. @@ -318,6 +446,21 @@ def get_tst_data(): return test_data + def inspect(self, uri, variable=None): + from ocgis.util.inspect import Inspect + print Inspect(uri, variable=None) + + def iter_product_keywords(self, keywords, as_namedtuple=True): + return itr_products_keywords(keywords, as_namedtuple=as_namedtuple) + + def nautilus(self, path): + if not os.path.isdir(path): + path = os.path.split(path)[0] + subprocess.call(['nautilus', path]) + + def nc_scope(self, *args, **kwargs): + return nc_scope(*args, **kwargs) + def setUp(self): self.current_dir_output = None if self.reset_env: @@ -452,3 +595,31 @@ def update(self, collection, variable, filename, key=None): OrderedDict.update(self, {key or filename: {'collection': collection, 'filename': filename, 'variable': variable}}) + + +@contextmanager +def nc_scope(path, mode='r', format=None): + """ + Provide a transactional scope around a :class:`netCDF4.Dataset` object. + + >>> with nc_scope('/my/file.nc') as ds: + >>> print ds.variables + + :param str path: The full path to the netCDF dataset. + :param str mode: The file mode to use when opening the dataset. + :param str format: The NetCDF format. + :returns: An open dataset object that will be closed after leaving the ``with`` statement. + :rtype: :class:`netCDF4.Dataset` + """ + + kwds = {'mode': mode} + if format is not None: + kwds['format'] = format + + ds = nc.Dataset(path, **kwds) + try: + yield ds + except: + raise + finally: + ds.close() diff --git a/src/ocgis/test/test_base.py b/src/ocgis/test/test_base.py index 27e9c5fb2..e13b6a50f 100644 --- a/src/ocgis/test/test_base.py +++ b/src/ocgis/test/test_base.py @@ -1,4 +1,7 @@ import datetime +from netCDF4 import Dataset +import os +from ocgis.interface.base.field import Field from ocgis.test.base import TestBase, TestData import ocgis from unittest.case import SkipTest @@ -30,55 +33,52 @@ def skip(*args): class TestTestBase(TestBase): - def test_get_time_series(self): - start = datetime.datetime(1900, 1, 1) - end = datetime.datetime(1902, 12, 31) - ret = self.get_time_series(start, end) - self.assertEqual(ret[0], start) - self.assertEqual(ret[-1], end) - self.assertEqual(ret[1]-ret[0], datetime.timedelta(days=1)) + def test_assertNcEqual(self): + + def _write_(fn): + path = os.path.join(self.current_dir_output, fn) + with self.nc_scope(path, 'w') as ds: + var = ds.createVariable('crs', 'c') + var.name = 'something' + return path + + path1 = _write_('foo1.nc') + path2 = _write_('foo2.nc') + self.assertNcEqual(path1, path2) def test_assertNumpyAll_bad_mask(self): arr = np.ma.array([1,2,3],mask=[True,False,True]) arr2 = np.ma.array([1,2,3],mask=[False,True,False]) with self.assertRaises(AssertionError): self.assertNumpyAll(arr,arr2) - + def test_assertNumpyAll_type_differs(self): arr = np.ma.array([1,2,3],mask=[True,False,True]) arr2 = np.array([1,2,3]) with self.assertRaises(AssertionError): self.assertNumpyAll(arr,arr2) - @dev - def test_data_download(self): - ocgis.env.DIR_TEST_DATA = self.current_dir_output - rd1 = self.test_data.get_rd('cancm4_tas') - ocgis.env.reset() - rd2 = self.test_data.get_rd('cancm4_tas') - self.assertEqual(rd1,rd2) - - @dev - def test_multifile_data_download(self): - ocgis.env.DIR_TEST_DATA = self.current_dir_output - ocgis.env.DEBUG = True - constants.test_data_download_url_prefix = 'https://dl.dropboxusercontent.com/u/867854/test_data_download/' - rd = self.test_data.get_rd('narccap_pr_wrfg_ncep') - - @dev - def test_entirely_bad_location(self): - ocgis.env.DIR_TEST_DATA = self.current_dir_output - with self.assertRaises(ValueError): - self.test_data.get_rd('cancm4_tasmax_2011') - - @dev - def test_copy_files(self): - self.test_data.copy_files('/home/local/WX/ben.koziol/htmp/transfer') + def test_get_field(self): + field = self.get_field() + self.assertIsInstance(field, Field) + + def test_get_time_series(self): + start = datetime.datetime(1900, 1, 1) + end = datetime.datetime(1902, 12, 31) + ret = self.get_time_series(start, end) + self.assertEqual(ret[0], start) + self.assertEqual(ret[-1], end) + self.assertEqual(ret[1]-ret[0], datetime.timedelta(days=1)) def test_multifile(self): rd = self.test_data.get_rd('narccap_pr_wrfg_ncep') self.assertEqual(len(rd.uri),2) + def test_ncscope(self): + path = os.path.join(self.current_dir_output, 'foo.nc') + with self.nc_scope(path, 'w') as ds: + self.assertIsInstance(ds, Dataset) + class TestTestData(TestBase): diff --git a/src/ocgis/test/test_ocgis/test_api/test_collection.py b/src/ocgis/test/test_ocgis/test_api/test_collection.py index 55fc0ba26..90683f06e 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_collection.py +++ b/src/ocgis/test/test_ocgis/test_api/test_collection.py @@ -104,36 +104,6 @@ def test_init(self): self.assertIsInstance(sp.properties[25],dict) self.assertEqual(sp[25]['tmax'].variables['tmax'].value.shape,(2, 31, 2, 3, 4)) - def test_get_iter_melted(self): - sp = self.get_collection() - for row in sp.get_iter_melted(): - self.assertEqual(set(['ugid','field_alias','field','variable_alias','variable']),set(row.keys())) - self.assertIsInstance(row['ugid'],int) - self.assertIsInstance(row['field_alias'],basestring) - self.assertIsInstance(row['field'],Field) - self.assertIsInstance(row['variable_alias'],basestring) - self.assertIsInstance(row['variable'],Variable) - - def test_iteration_methods(self): - field = self.get_field(with_value=True) - - field.temporal.name_uid = 'tid' - field.level.name_uid = 'lid' - field.spatial.geom.name_uid = 'gid' - field.spatial.name_uid = 'gid' - - sc = ShpCabinet() - meta = sc.get_meta('state_boundaries') - sp = SpatialCollection(meta=meta,key='state_boundaries') - for row in sc.iter_geoms('state_boundaries'): - sp.add_field(row['properties']['UGID'],row['geom'],field,properties=row['properties']) - for ii,row in enumerate(sp.get_iter_dict()): - if ii == 1: - self.assertDictEqual(row[1],{'lid': 1, 'ugid': 1, 'vid': 1, 'alias': 'tmax', 'did': 1, 'year': 2000, 'value': 0.7203244934421581, 'month': 1, 'variable': 'tmax', 'gid': 2, 'time': datetime.datetime(2000, 1, 1, 12, 0), 'tid': 1, 'level': 50, 'day': 1}) - self.assertIsInstance(row[0],MultiPolygon) - self.assertEqual(len(row),2) - self.assertEqual(len(row[1]),len(constants.raw_headers)) - def test_calculation_iteration(self): field = self.get_field(with_value=True,month_count=2) field.variables.add_variable(Variable(value=field.variables['tmax'].value+5, @@ -141,17 +111,18 @@ def test_calculation_iteration(self): field.temporal.name_uid = 'tid' field.level.name_uid = 'lid' field.spatial.geom.name_uid = 'gid' - + grouping = ['month'] tgd = field.temporal.get_grouping(grouping) mu = Mean(field=field,tgd=tgd,alias='my_mean',dtype=np.float64) ret = mu.execute() - + kwds = copy(field.__dict__) kwds.pop('_raw') kwds.pop('_variables') kwds.pop('_should_regrid') kwds.pop('_has_assigned_coordinate_system') + kwds.pop('_attrs') kwds['name'] = kwds.pop('_name') kwds['temporal'] = tgd kwds['variables'] = ret @@ -159,7 +130,7 @@ def test_calculation_iteration(self): cfield.temporal.name_uid = 'tid' cfield.temporal.name_value = 'time' cfield.spatial.name_uid = 'gid' - + sc = ShpCabinet() meta = sc.get_meta('state_boundaries') sp = SpatialCollection(meta=meta,key='state_boundaries',headers=constants.calc_headers) @@ -171,7 +142,7 @@ def test_calculation_iteration(self): self.assertDictEqual(row[1],{'lid': 1, 'ugid': 1, 'vid': 1, 'cid': 1, 'did': 1, 'year': 2000, 'time': datetime.datetime(2000, 1, 16, 0, 0), 'calc_alias': 'my_mean_tmax', 'value': 0.44808476666433006, 'month': 1, 'alias': 'tmax', 'variable': 'tmax', 'gid': 1, 'calc_key': 'mean', 'tid': 1, 'level': 50, 'day': 16}) self.assertEqual(len(row),2) self.assertEqual(len(row[1]),len(constants.calc_headers)) - + def test_calculation_iteration_two_calculations(self): field = self.get_field(with_value=True,month_count=2) field.variables.add_variable(Variable(value=field.variables['tmax'].value+5, @@ -179,19 +150,20 @@ def test_calculation_iteration_two_calculations(self): field.temporal.name_uid = 'tid' field.level.name_uid = 'lid' field.spatial.geom.name_uid = 'gid' - + grouping = ['month'] tgd = field.temporal.get_grouping(grouping) mu = Mean(field=field,tgd=tgd,alias='my_mean',dtype=np.float64) ret = mu.execute() thresh = Threshold(field=field,vc=ret,tgd=tgd,alias='a_treshold',parms={'operation':'gte','threshold':0.5}) ret = thresh.execute() - + kwds = copy(field.__dict__) kwds.pop('_raw') kwds.pop('_variables') kwds.pop('_should_regrid') kwds.pop('_has_assigned_coordinate_system') + kwds.pop('_attrs') kwds['name'] = kwds.pop('_name') kwds['temporal'] = tgd kwds['variables'] = ret @@ -199,13 +171,13 @@ def test_calculation_iteration_two_calculations(self): cfield.temporal.name_uid = 'tid' cfield.temporal.name_value = 'time' cfield.spatial.name_uid = 'gid' - + sc = ShpCabinet() meta = sc.get_meta('state_boundaries') sp = SpatialCollection(meta=meta,key='state_boundaries',headers=constants.calc_headers) for row in sc.iter_geoms('state_boundaries'): sp.add_field(row['properties']['UGID'],row['geom'],cfield,properties=row['properties']) - + cids = set() for ii,row in enumerate(sp.get_iter_dict()): cids.update([row[1]['cid']]) @@ -216,7 +188,37 @@ def test_calculation_iteration_two_calculations(self): self.assertEqual(len(row[1]),len(constants.calc_headers)) self.assertEqual(ii+1,2*2*2*3*4*51*4) self.assertEqual(len(cids),4) - + + def test_get_iter_melted(self): + sp = self.get_collection() + for row in sp.get_iter_melted(): + self.assertEqual(set(['ugid','field_alias','field','variable_alias','variable']),set(row.keys())) + self.assertIsInstance(row['ugid'],int) + self.assertIsInstance(row['field_alias'],basestring) + self.assertIsInstance(row['field'],Field) + self.assertIsInstance(row['variable_alias'],basestring) + self.assertIsInstance(row['variable'],Variable) + + def test_iteration_methods(self): + field = self.get_field(with_value=True) + + field.temporal.name_uid = 'tid' + field.level.name_uid = 'lid' + field.spatial.geom.name_uid = 'gid' + field.spatial.name_uid = 'gid' + + sc = ShpCabinet() + meta = sc.get_meta('state_boundaries') + sp = SpatialCollection(meta=meta,key='state_boundaries') + for row in sc.iter_geoms('state_boundaries'): + sp.add_field(row['properties']['UGID'],row['geom'],field,properties=row['properties']) + for ii,row in enumerate(sp.get_iter_dict()): + if ii == 1: + self.assertDictEqual(row[1],{'lid': 1, 'ugid': 1, 'vid': 1, 'alias': 'tmax', 'did': 1, 'year': 2000, 'value': 0.7203244934421581, 'month': 1, 'variable': 'tmax', 'gid': 2, 'time': datetime.datetime(2000, 1, 1, 12, 0), 'tid': 1, 'level': 50, 'day': 1}) + self.assertIsInstance(row[0],MultiPolygon) + self.assertEqual(len(row),2) + self.assertEqual(len(row[1]),len(constants.raw_headers)) + def test_multivariate_iteration(self): field = self.get_field(with_value=True,month_count=1) field.variables.add_variable(Variable(value=field.variables['tmax'].value+5, @@ -243,8 +245,3 @@ def test_multivariate_iteration(self): if ii == 0: self.assertDictEqual(row[1],{'lid': 1, 'ugid': 1, 'cid': 1, 'did': None, 'year': 2000, 'time': datetime.datetime(2000, 1, 1, 12, 0), 'calc_alias': 'some_division', 'value': 12.989774984574424, 'month': 1, 'gid': 1, 'calc_key': 'divide', 'tid': 1, 'level': 50, 'day': 1}) self.assertEqual(ii+1,2*31*2*3*4*51) - - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() diff --git a/src/ocgis/test/test_ocgis/test_api/test_operations.py b/src/ocgis/test/test_ocgis/test_api/test_operations.py index 3744e2fd1..c96862c53 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_operations.py +++ b/src/ocgis/test/test_ocgis/test_api/test_operations.py @@ -2,21 +2,22 @@ from datetime import datetime as dt import itertools import datetime +import os +from unittest import SkipTest +import ESMF from numpy import dtype import numpy as np -from ocgis.util.inspect import Inspect -from ocgis.api.parms.definition import RegridOptions +from ocgis.api.parms.definition import RegridOptions, OutputFormat from ocgis.interface.base.crs import CFWGS84 from ocgis.test.base import TestBase from ocgis.exc import DefinitionValidationError, DimensionNotFound, RequestValidationError from ocgis.api.parms import definition -from ocgis import env, constants +from ocgis import constants from ocgis.api.operations import OcgOperations -from ocgis.util.helpers import make_poly, write_geom_dict, bbox_poly +from ocgis.util.helpers import make_poly import ocgis -from ocgis.api.request.base import RequestDataset, RequestDatasetCollection from ocgis.util.shp_cabinet import ShpCabinetIterator @@ -80,10 +81,10 @@ def test_get_base_request_size_test_data(self): rd = self.test_data.get_rd(key) try: ops = OcgOperations(dataset=rd) - ## the project cmip data may raise an exception since projection is - ## not associated with a variable + # the project cmip data may raise an exception since projection is not associated with a variable except DimensionNotFound: - rd = self.test_data.get_rd(key,kwds=dict(dimension_map={'R':'projection','T':'time','X':'longitude','Y':'latitude'})) + rd = self.test_data.get_rd(key, kwds=dict( + dimension_map={'R': 'projection', 'T': 'time', 'X': 'longitude', 'Y': 'latitude'})) ops = OcgOperations(dataset=rd) ret = ops.get_base_request_size() self.assertTrue(ret['total'] > 1) @@ -257,19 +258,17 @@ def test_keyword_conform_units_to_bad_units(self): with self.assertRaises(RequestValidationError): OcgOperations(dataset=rd, conform_units_to='crap') - def test_keyword_dataset(self): - env.DIR_DATA = ocgis.env.DIR_TEST_DATA - reference_rd = self.test_data.get_rd('cancm4_tas') - rd = RequestDataset(reference_rd.uri,reference_rd.variable) - ds = definition.Dataset(rd) - self.assertEqual(ds.value,RequestDatasetCollection([rd])) - - dsa = {'uri':reference_rd.uri,'variable':reference_rd.variable} - ds = definition.Dataset(dsa) - - reference_rd2 = self.test_data.get_rd('narccap_crcm') - dsb = [dsa,{'uri':reference_rd2.uri,'variable':reference_rd2.variable,'alias':'knight'}] - ds = definition.Dataset(dsb) + def test_keyword_dataset_esmf(self): + """Test with operations on an ESMF Field.""" + raise SkipTest + efield = self.get_esmf_field() + output_format = OutputFormat.iter_possible() + for kk in output_format: + ops = OcgOperations(dataset=efield, output_format=kk, prefix=kk) + ret = ops.execute() + # self.inspect(ret) + raise + import ipdb;ipdb.set_trace() def test_keyword_geom(self): geom = make_poly((37.762,38.222),(-102.281,-101.754)) @@ -337,6 +336,38 @@ def test_keyword_level_range(self): for r in ops.dataset.itervalues(): self.assertEqual(r.level_range,tuple(lr)) + def test_keyword_prefix(self): + # the meta output format should not create an output directory + rd = self.test_data.get_rd('cancm4_tas') + ops = OcgOperations(dataset=rd, output_format='meta') + ops.execute() + self.assertEqual(len(os.listdir(self.current_dir_output)), 0) + + def test_keyword_output_format_esmpy(self): + """Test with the ESMPy output format.""" + raise SkipTest + #todo: test spatial subsetting + #todo: test calculations + slc = [None, None, None, [0, 10], [0, 10]] + kwds = dict(as_field=[False, True], + with_slice=[True, False]) + for k in self.iter_product_keywords(kwds): + rd = self.test_data.get_rd('cancm4_tas') + if k.as_field: + rd = rd.get() + if k.with_slice: + slc = slc + else: + slc = None + ops = OcgOperations(dataset=rd, output_format='esmpy', slice=slc) + ret = ops.execute() + self.assertIsInstance(ret, ESMF.Field) + try: + self.assertEqual(ret.shape, (1, 3650, 1, 10, 10)) + except AssertionError: + self.assertFalse(k.with_slice) + self.assertEqual(ret.shape, (1, 3650, 1, 64, 128)) + def test_keyword_output_format_nc_package_validation_raised_first(self): rd = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('rotated_pole_ichec',kwds={'alias':'tas2'}) @@ -446,3 +477,9 @@ def test_keyword_time_region(self): ops.time_region = tr for r in ops.dataset.itervalues(): self.assertEqual(r.time_region,tr) + + def test_validate(self): + # snippets should be allowed for field objects + field = self.test_data.get_rd('cancm4_tas').get() + ops = OcgOperations(dataset=field, snippet=True) + self.assertTrue(ops.snippet) diff --git a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py index 7e2129059..1c20fd5c9 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py +++ b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py @@ -1,15 +1,19 @@ import unittest +import pickle +import tempfile +from unittest import SkipTest + from cfunits import Units +import numpy as np + +from ocgis import env from ocgis.api.parms.definition import * from ocgis.interface.base.dimension.spatial import SpatialDimension, SpatialGeometryPointDimension from ocgis.util.helpers import make_poly -import pickle -import tempfile from ocgis.test.base import TestBase from ocgis.calc.library.statistics import Mean from ocgis.util.itester import itr_products_keywords from ocgis.util.shp_cabinet import ShpCabinet -import numpy as np from ocgis.calc.eval_function import MultivariateEvalFunction @@ -136,56 +140,38 @@ def test_prefix(self): pp.value = ' Old__man ' self.assertEqual(pp.value,'Old__man') - def test_calc_grouping(self): - cg = CalcGrouping(['day','month']) - self.assertEqual(cg.value,('day','month')) - with self.assertRaises(DefinitionValidationError): - cg.value = ['d','foo'] - def test_calc_grouping_all(self): - cg = CalcGrouping('all') - self.assertEqual(cg.value,'all') +class TestAbstraction(TestBase): + create_dir = False - def test_calc_grouping_seasonal_aggregation(self): - cg = CalcGrouping([[1,2,3],[4,5,6]]) - self.assertEqual(cg.value,([1,2,3],[4,5,6])) + def test_init_(self): + K = Abstraction - ## element groups must be composed of unique elements - with self.assertRaises(DefinitionValidationError): - CalcGrouping([[1,2,3],[4,4,6]]) + k = K() + self.assertEqual(k.value,None) + self.assertEqual(str(k),'abstraction="None"') - ## element groups must have an empty intersection - with self.assertRaises(DefinitionValidationError): - CalcGrouping([[1,2,3],[1,4,6]]) + k = K('point') + self.assertEqual(k.value,'point') - ## months must be between 1 and 12 with self.assertRaises(DefinitionValidationError): - CalcGrouping([[1,2,3],[4,5,66]]) + K('pt') - def test_calc_grouping_seasonal_aggregation_with_year(self): - cg = CalcGrouping([[1,2,3],[4,5,6],'year']) - self.assertEqual(cg.value,([1,2,3],[4,5,6],'year')) - def test_calc_grouping_seasonal_aggregation_with_unique(self): - cg = CalcGrouping([[1,2,3],[4,5,6],'unique']) - self.assertEqual(cg.value,([1,2,3],[4,5,6],'unique')) +class TestAggregate(TestBase): + create_dir = False - def test_calc_grouping_seasonal_aggregation_with_bad_flag(self): - with self.assertRaises(DefinitionValidationError): - CalcGrouping([[1,2,3],[4,5,6],'foo']) - with self.assertRaises(DefinitionValidationError): - CalcGrouping([[1,2,3],[4,5,6],'fod']) + def test_init(self): + A = Aggregate - def test_dataset(self): - rd = self.test_data.get_rd('cancm4_tas') - dd = Dataset(rd) + a = A(True) + self.assertEqual(a.value,True) - with open('/tmp/dd.pkl','w') as f: - pickle.dump(dd,f) + a = A(False) + self.assertEqual(a.value,False) - uri = '/a/bad/path' - with self.assertRaises(ValueError): - rd = RequestDataset(uri,'foo') + a = A('True') + self.assertEqual(a.value,True) class TestCalc(TestBase): @@ -299,18 +285,47 @@ def test_bad_key(self): class TestCalcGrouping(TestBase): + create_dir = False - def test_init(self): - A = Aggregate + def init(self): + cg = CalcGrouping(['day', 'month']) + self.assertEqual(cg.value, ('day', 'month')) + with self.assertRaises(DefinitionValidationError): + cg.value = ['d', 'foo'] - a = A(True) - self.assertEqual(a.value,True) + def test_all(self): + cg = CalcGrouping('all') + self.assertEqual(cg.value, 'all') - a = A(False) - self.assertEqual(a.value,False) + def test_seasonal_aggregation(self): + cg = CalcGrouping([[1, 2, 3], [4, 5, 6]]) + self.assertEqual(cg.value, ([1, 2, 3], [4, 5, 6])) - a = A('True') - self.assertEqual(a.value,True) + # # element groups must be composed of unique elements + with self.assertRaises(DefinitionValidationError): + CalcGrouping([[1, 2, 3], [4, 4, 6]]) + + ## element groups must have an empty intersection + with self.assertRaises(DefinitionValidationError): + CalcGrouping([[1, 2, 3], [1, 4, 6]]) + + ## months must be between 1 and 12 + with self.assertRaises(DefinitionValidationError): + CalcGrouping([[1, 2, 3], [4, 5, 66]]) + + def test_seasonal_aggregation_with_year(self): + cg = CalcGrouping([[1, 2, 3], [4, 5, 6], 'year']) + self.assertEqual(cg.value, ([1, 2, 3], [4, 5, 6], 'year')) + + def test_seasonal_aggregation_with_unique(self): + cg = CalcGrouping([[1, 2, 3], [4, 5, 6], 'unique']) + self.assertEqual(cg.value, ([1, 2, 3], [4, 5, 6], 'unique')) + + def test_seasonal_aggregation_with_bad_flag(self): + with self.assertRaises(DefinitionValidationError): + CalcGrouping([[1, 2, 3], [4, 5, 6], 'foo']) + with self.assertRaises(DefinitionValidationError): + CalcGrouping([[1, 2, 3], [4, 5, 6], 'fod']) class TestConformUnitsTo(TestBase): @@ -330,6 +345,141 @@ def test_constructor(self): self.assertTrue(cc.value.equals(Units('celsius'))) +class TestHeaders(TestBase): + create_dir = False + + def test_init(self): + headers = ['did', 'value'] + for htype in [list, tuple]: + hvalue = htype(headers) + hh = Headers(hvalue) + self.assertEqual(hh.value, tuple(constants.required_headers + ['value'])) + + headers = ['foo'] + with self.assertRaises(DefinitionValidationError): + Headers(headers) + + headers = [] + hh = Headers(headers) + self.assertEqual(hh.value, tuple(constants.required_headers)) + + +class TestDataset(TestBase): + create_dir = False + + def test_init(self): + rd = self.test_data.get_rd('cancm4_tas') + dd = Dataset(rd) + + with open('/tmp/dd.pkl', 'w') as f: + pickle.dump(dd, f) + + uri = '/a/bad/path' + with self.assertRaises(ValueError): + RequestDataset(uri, 'foo') + + # test with a dictionary + v = {'uri': rd.uri, 'variable': rd.variable} + dd = Dataset(v) + self.assertEqual(dd.value[rd.variable].variable, rd.variable) + + # test with a list/tuple + v2 = v.copy() + v2['alias'] = 'tas2' + dd = Dataset([v, v2]) + self.assertEqual(set(dd.value.keys()), set([v['variable'], v2['alias']])) + dd = Dataset((v, v2)) + self.assertEqual(set(dd.value.keys()), set([v['variable'], v2['alias']])) + + # test with a request dataset + dd = Dataset(rd) + self.assertIsInstance(dd.value, RequestDatasetCollection) + + # test with a request dataset collection + dd = Dataset(dd.value) + self.assertIsInstance(dd.value, RequestDatasetCollection) + + # test with a bad type + with self.assertRaises(DefinitionValidationError): + Dataset(5) + + # test with field does not load anything + field = self.test_data.get_rd('cancm4_tas').get() + dd = Dataset(field) + rfield = dd.value.first() + self.assertIsNone(rfield.temporal._value) + self.assertIsNone(rfield.spatial.grid._value) + self.assertIsNone(rfield.spatial.grid.row._value) + self.assertIsNone(rfield.spatial.grid.col._value) + self.assertIsNone(rfield.variables.first()._value) + + # test with a Field object + field = self.test_data.get_rd('cancm4_tas').get()[:, 0, :, :, :] + field_value = field.variables.first().value + dd = Dataset(field) + self.assertIsInstance(dd.value, RequestDatasetCollection) + rdc_value = dd.value.first().variables.first().value + # do not do a deep copy on the field object... + self.assertTrue(np.may_share_memory(field_value, rdc_value)) + + # we do want a deepcopy on the request dataset object and request dataset collection + rd = self.test_data.get_rd('cancm4_tas') + dd = Dataset(rd) + self.assertIsInstance(dd.value.first(), RequestDataset) + self.assertNotEqual(id(rd), id(dd.value.first())) + rdc = RequestDatasetCollection(target=[rd]) + dd = Dataset(rdc) + self.assertNotEqual(id(rdc), id(dd.value)) + + # test loading dataset directly from uri with some overloads + reference_rd = self.test_data.get_rd('cancm4_tas') + rd = RequestDataset(reference_rd.uri, reference_rd.variable) + ds = Dataset(rd) + self.assertEqual(ds.value, RequestDatasetCollection([rd])) + dsa = {'uri': reference_rd.uri, 'variable': reference_rd.variable} + Dataset(dsa) + reference_rd2 = self.test_data.get_rd('narccap_crcm') + dsb = [dsa, {'uri': reference_rd2.uri, 'variable': reference_rd2.variable, 'alias': 'knight'}] + Dataset(dsb) + + def test_init_esmf(self): + raise SkipTest + #todo: what to do about time values, units, etc. + efield = self.get_esmf_field() + dd = Dataset(efield) + self.assertIsInstance(dd.value, RequestDatasetCollection) + ofield = dd.value.first() + self.assertIsInstance(ofield, Field) + ofield_value = ofield.variables.first().value + self.assertTrue(np.may_share_memory(ofield_value, efield)) + self.assertNumpyAll(ofield_value, efield) + + def test_get_meta(self): + # test with standard request dataset collection + rd = self.test_data.get_rd('cancm4_tas') + dd = Dataset(rd) + self.assertIsInstance(dd.get_meta(), list) + + # test passing a field object + dd = Dataset(rd.get()) + ret = dd.get_meta() + self.assertEqual(ret, ['* dataset=', 'NcField(name=tas, ...)', '']) + + def test_unfiled(self): + env.DIR_DATA = ocgis.env.DIR_TEST_DATA + reference_rd = self.test_data.get_rd('cancm4_tas') + rd = RequestDataset(reference_rd.uri,reference_rd.variable) + ds = Dataset(rd) + self.assertEqual(ds.value,RequestDatasetCollection([rd])) + + dsa = {'uri':reference_rd.uri,'variable':reference_rd.variable} + Dataset(dsa) + + reference_rd2 = self.test_data.get_rd('narccap_crcm') + dsb = [dsa,{'uri':reference_rd2.uri,'variable':reference_rd2.variable,'alias':'knight'}] + Dataset(dsb) + + class TestGeom(TestBase): create_dir = False @@ -444,13 +594,40 @@ def test_geometry_dictionaries(self): self.assertEqual(sdim.properties['COUNTRY'][0], gdict['properties']['COUNTRY']) -class TestRegridDestination(TestBase): +class TestLevelRange(TestBase): + create_dir = False - @property - def possible_values(self): - rd = self.get_rd() - possible = [None, 'tas', rd, rd.get(), rd.get().spatial] - return possible + def test_constructor(self): + LevelRange() + + def test_normal_int(self): + lr = LevelRange([5,10]) + self.assertEqual(lr.value,(5,10)) + + def test_normal_float(self): + value = [4.5,6.5] + lr = LevelRange(value) + self.assertEqual(tuple(value),lr.value) + + def test_bad_length(self): + with self.assertRaises(DefinitionValidationError): + LevelRange([5,6,7,8]) + + def test_bad_ordination(self): + with self.assertRaises(DefinitionValidationError): + LevelRange([11,10]) + + +class TestOutputFormat(TestBase): + create_dir = False + + def test_init_esmpy(self): + raise SkipTest + oo = OutputFormat('esmpy') + self.assertEqual(oo.value, 'esmpy') + + +class TestRegridDestination(TestBase): @property def possible_datasets(self): @@ -480,6 +657,12 @@ def possible_datasets(self): return datasets + @property + def possible_values(self): + rd = self.get_rd() + possible = [None, 'tas', rd, rd.get(), rd.get().spatial] + return possible + def get_rd(self, **kwargs): rd = self.test_data.get_rd('cancm4_tas', kwds=kwargs) return rd @@ -553,28 +736,17 @@ def test_get_meta(self): ro = RegridOptions({'value_mask': np.array([True])}) self.assertTrue('numpy.ndarray' in ro._get_meta_()) -class TestLevelRange(TestBase): - create_dir = False - - def test_constructor(self): - LevelRange() - def test_normal_int(self): - lr = LevelRange([5,10]) - self.assertEqual(lr.value,(5,10)) - - def test_normal_float(self): - value = [4.5,6.5] - lr = LevelRange(value) - self.assertEqual(tuple(value),lr.value) +class TestSpatialOperation(TestBase): - def test_bad_length(self): - with self.assertRaises(DefinitionValidationError): - LevelRange([5,6,7,8]) + def test_init(self): + values = (None, 'clip', 'intersects') + ast = ('intersects', 'clip', 'intersects') - def test_bad_ordination(self): - with self.assertRaises(DefinitionValidationError): - LevelRange([11,10]) + klass = SpatialOperation + for v, a in zip(values, ast): + obj = klass(v) + self.assertEqual(obj.value, a) class TestTimeRange(TestBase): diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py index 8f84422d7..6b835e3a2 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py @@ -1,16 +1,17 @@ +from copy import deepcopy import unittest import itertools +from ocgis.interface.base.field import Field from ocgis.exc import DefinitionValidationError, NoUnitsError, VariableNotFoundError, RequestValidationError from ocgis.api.request.base import RequestDataset, RequestDatasetCollection, get_tuple, get_is_none import ocgis from ocgis import env, constants from ocgis.interface.base.crs import CoordinateReferenceSystem, CFWGS84 -from ocgis.test.base import TestBase +from ocgis.test.base import TestBase, nc_scope import os import pickle from datetime import datetime as dt import shutil -from ocgis.test.test_simple.test_simple import nc_scope import datetime from ocgis.api.operations import OcgOperations import numpy as np @@ -427,32 +428,15 @@ def test_time_region(self): class TestRequestDatasetCollection(TestBase): - def test_init(self): + def iter_keywords(self): rd1 = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('cancm4_rhs') - keywords = dict(request_datasets=[None, rd1, [rd1], [rd1, rd2], {'uri': rd1.uri, 'variable': rd1.variable}]) + keywords = dict(target=[None, rd1, [rd1], [rd1, rd2], {'uri': rd1.uri, 'variable': rd1.variable}, rd1.get(), + [rd1.get(), rd2.get()], [rd1, rd2.get()]]) for k in itr_products_keywords(keywords, as_namedtuple=True): - rdc = RequestDatasetCollection(request_datasets=k.request_datasets) - if k.request_datasets is not None: - self.assertEqual(len(rdc), len(list(get_iter(k.request_datasets, dtype=(dict, RequestDataset))))) - else: - self.assertEqual(len(rdc), 0) - - def test_str(self): - rd1 = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('cancm4_rhs') - rdc = RequestDatasetCollection(request_datasets=[rd1, rd2]) - ss = str(rdc) - self.assertTrue(ss.startswith('RequestDatasetCollection')) - self.assertGreater(len(ss), 900) - - def test_name_attribute_used_for_keys(self): - rd = self.test_data.get_rd('cancm4_tas') - rd.name = 'hi_there' - rdc = RequestDatasetCollection(request_datasets=[rd]) - self.assertEqual(rdc.keys(), ['hi_there']) + yield k def test(self): env.DIR_DATA = ocgis.env.DIR_TEST_DATA @@ -488,12 +472,100 @@ def test(self): self.assertIsInstance(rdc.first(), RequestDataset) self.assertIsInstance(rdc['a2'], RequestDataset) + def test_init(self): + for k in self.iter_keywords(): + rdc = RequestDatasetCollection(target=k.target) + if k.target is not None: + self.assertEqual(len(rdc), len(list(get_iter(k.target, dtype=(dict, RequestDataset, Field))))) + self.assertTrue(len(rdc) >= 1) + else: + self.assertEqual(len(rdc), 0) + + def test_get_meta_rows(self): + for k in self.iter_keywords(): + rdc = RequestDatasetCollection(target=k.target) + rows = rdc._get_meta_rows_() + self.assertTrue(len(rows) >= 1) + + def test_get_unique_id(self): + rd = self.test_data.get_rd('cancm4_tas') + field = rd.get() + rd_did = deepcopy(rd) + rd_did.did = 1 + field_uid = deepcopy(field) + field_uid.uid = 1 + + for element in [rd, field, rd_did, field_uid]: + uid = RequestDatasetCollection._get_unique_id_(element) + try: + self.assertEqual(uid, 1) + except AssertionError: + try: + self.assertIsNone(element.did) + except AttributeError: + self.assertIsNone(element.uid) + + def test_iter_request_datasets(self): + rd = self.test_data.get_rd('cancm4_tas') + field = rd.get() + field.name = 'foo' + rdc = RequestDatasetCollection(target=[rd, field]) + tt = list(rdc.iter_request_datasets()) + self.assertEqual(len(tt), 1) + self.assertEqual(len(rdc), 2) + self.assertIsInstance(tt[0], RequestDataset) + + def test_name_attribute_used_for_keys(self): + rd = self.test_data.get_rd('cancm4_tas') + rd.name = 'hi_there' + rdc = RequestDatasetCollection(target=[rd]) + self.assertEqual(rdc.keys(), ['hi_there']) + + def test_set_unique_id(self): + rd = self.test_data.get_rd('cancm4_tas') + field = rd.get() + + for element in [rd, field]: + RequestDatasetCollection._set_unique_id_(element, 5) + uid = RequestDatasetCollection._get_unique_id_(element) + self.assertEqual(uid, 5) + + def test_str(self): + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = self.test_data.get_rd('cancm4_rhs') + rdc = RequestDatasetCollection(target=[rd1, rd2]) + ss = str(rdc) + self.assertTrue(ss.startswith('RequestDatasetCollection')) + self.assertGreater(len(ss), 900) + + def test_update(self): + rd = self.test_data.get_rd('cancm4_tas') + rd.did = 10 + field = rd.get() + self.assertEqual(field.uid, 10) + field.uid = 20 + + rdc = RequestDatasetCollection() + rdc.update(rd) + # name is already in collection and should yield a key error + with self.assertRaises(KeyError): + rdc.update(field) + field.name = 'tas2' + rdc.update(field) + + # add another object and check the increment + field2 = deepcopy(field) + field2.name = 'hanzel' + field2.uid = None + rdc.update(field2) + self.assertEqual(field2.uid, 21) + def test_with_overloads(self): rd = self.test_data.get_rd('cancm4_tas') field = rd.get() - ## loaded calendar should match file metadata + # loaded calendar should match file metadata self.assertEqual(field.temporal.calendar, '365_day') - ## the overloaded calendar in the request dataset should still be None + # the overloaded calendar in the request dataset should still be None self.assertEqual(rd.t_calendar, None) dataset = [{'time_region': None, @@ -504,10 +576,10 @@ def test_with_overloads(self): 't_calendar': u'will_not_work'}] rdc = RequestDatasetCollection(dataset) rd2 = RequestDataset(**dataset[0]) - ## the overloaded calendar should be passed to the request dataset + # the overloaded calendar should be passed to the request dataset self.assertEqual(rd2.t_calendar, 'will_not_work') self.assertEqual(rdc.first().t_calendar, 'will_not_work') - ## when this bad calendar value is used it should raise an exception + # when this bad calendar value is used it should raise an exception with self.assertRaises(ValueError): rdc.first().get().temporal.value_datetime @@ -517,39 +589,37 @@ def test_with_overloads(self): 't_units': u'days since 1940-01-01 00:00:00', 'variable': u'tas'}] rdc = RequestDatasetCollection(dataset) - ## ensure the overloaded units are properly passed + # ensure the overloaded units are properly passed self.assertEqual(rdc.first().get().temporal.units, 'days since 1940-01-01 00:00:00') - ## the calendar was not overloaded and the value should be read from - ## the metadata + # the calendar was not overloaded and the value should be read from the metadata self.assertEqual(rdc.first().get().temporal.calendar, '365_day') def test_with_overloads_real_data(self): - ## copy the test file as the calendar attribute will be modified + # copy the test file as the calendar attribute will be modified rd = self.test_data.get_rd('cancm4_tas') filename = os.path.split(rd.uri)[1] dest = os.path.join(self.current_dir_output, filename) shutil.copy2(rd.uri, dest) - ## modify the calendar attribute + # modify the calendar attribute with nc_scope(dest, 'a') as ds: self.assertEqual(ds.variables['time'].calendar, '365_day') ds.variables['time'].calendar = '365_days' - ## assert the calendar is in fact changed on the source file + # assert the calendar is in fact changed on the source file with nc_scope(dest, 'r') as ds: self.assertEqual(ds.variables['time'].calendar, '365_days') rd2 = RequestDataset(uri=dest, variable='tas') field = rd2.get() - ## the bad calendar will raise a value error when the datetimes are - ## converted. + # the bad calendar will raise a value error when the datetimes are converted. with self.assertRaises(ValueError): field.temporal.value_datetime - ## overload the calendar and confirm the datetime values are the same - ## as the datetime values from the original good file + # overload the calendar and confirm the datetime values are the same as the datetime values from the original + # good file rd3 = RequestDataset(uri=dest, variable='tas', t_calendar='365_day') field = rd3.get() self.assertNumpyAll(field.temporal.value_datetime, rd.get().temporal.value_datetime) - ## pass as a dataset collection to operations and confirm the data may - ## be written to a flat file. dates are converted in the process. + # pass as a dataset collection to operations and confirm the data may be written to a flat file. dates are + # converted in the process. time_range = (datetime.datetime(2001, 1, 1, 0, 0), datetime.datetime(2011, 1, 1, 0, 0)) dataset = [{'time_region': None, 'uri': dest, @@ -561,8 +631,3 @@ def test_with_overloads_real_data(self): rdc = RequestDatasetCollection(dataset) ops = OcgOperations(dataset=rdc, geom='state_boundaries', select_ugid=[25], output_format='csv+') ops.execute() - - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index 2ea01690d..505349ba0 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -1,3 +1,5 @@ +from ocgis.interface.nc.spatial import NcSpatialGridDimension +from ocgis.interface.base.dimension.base import VectorDimension from ocgis import constants from copy import deepcopy import os @@ -7,7 +9,7 @@ from ocgis import RequestDataset from ocgis.api.request.driver.nc import DriverNetcdf, get_dimension_map from ocgis.interface.metadata import NcMetadata -from ocgis.test.base import TestBase +from ocgis.test.base import TestBase, nc_scope import netCDF4 as nc from ocgis.interface.base.crs import WGS84, CFWGS84, CFLambertConformal import numpy as np @@ -20,7 +22,6 @@ import datetime from unittest.case import SkipTest import ocgis -from ocgis.test.test_simple.test_simple import nc_scope from importlib import import_module from collections import OrderedDict from ocgis.util.logging_ocgis import ocgis_lh @@ -81,21 +82,34 @@ def test_get_dimensioned_variables_two_variables_in_target_dataset(self): self.assertEqual(rd.variable, ('tas', 'tasmax')) self.assertEqual(rd.variable, rd.alias) - def test_load_dtype_on_dimensions(self): - rd = self.test_data.get_rd('cancm4_tas') - field = rd.get() - with nc_scope(rd.uri) as ds: - test_dtype_temporal = ds.variables['time'].dtype - test_dtype_value = ds.variables['tas'].dtype - self.assertEqual(field.temporal.dtype,test_dtype_temporal) - self.assertEqual(field.variables['tas'].dtype,test_dtype_value) - self.assertEqual(field.temporal.dtype,np.float64) - - def test_load(self): + def test_get_field(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) field = rd.get() + + self.assertIsInstance(field.spatial.grid, NcSpatialGridDimension) + + # test names are correctly set when creating the field + self.assertEqual(field.temporal.name, 'time') + self.assertEqual(field.temporal.name_value, 'time') + self.assertEqual(field.temporal.name_bounds, 'time_bnds') + row = field.spatial.grid.row + self.assertEqual(row.name, 'lat') + self.assertEqual(row.name_value, 'lat') + self.assertEqual(row.name_bounds, 'lat_bnds') + col = field.spatial.grid.col + self.assertEqual(col.name, 'lon') + self.assertEqual(col.name_value, 'lon') + self.assertEqual(col.name_bounds, 'lon_bnds') + + # test attributes are loaded + self.assertEqual(len(field.attrs), 31) + self.assertEqual(len(field.variables['tas'].attrs), 10) + self.assertEqual(len(field.temporal.attrs), 6) + self.assertEqual(len(field.spatial.grid.row.attrs), 5) + self.assertEqual(len(field.spatial.grid.col.attrs), 5) + ds = nc.Dataset(uri,'r') self.assertEqual(field.level,None) @@ -118,14 +132,50 @@ def test_load(self): ds.close() - def test_multifile_load(self): - uri = self.test_data.get_uri('narccap_pr_wrfg_ncep') - rd = RequestDataset(uri,'pr') + def test_get_field_different_dimension_names_and_values(self): + """Test dimension names and dimension values are correctly read from netCDF.""" + + path = os.path.join(self.current_dir_output, 'foo.nc') + with nc_scope(path, 'w') as ds: + ds.createDimension('lat', 1) + ds.createDimension('lon', 1) + ds.createDimension('tme', 1) + ds.createDimension('the_bounds', 2) + latitude = ds.createVariable('latitude', int, dimensions=('lat',)) + longitude = ds.createVariable('longitude', int, dimensions=('lon',)) + time = ds.createVariable('time', int, dimensions=('tme',)) + time_bounds = ds.createVariable('long_live_the_bounds', int, dimensions=('tme', 'the_bounds')) + time.units = 'days since 0000-01-01' + time.bounds = 'long_live_the_bounds' + value = ds.createVariable('value', int, dimensions=('tme', 'lat', 'lon')) + + latitude[:] = 5 + longitude[:] = 6 + time[:] = 6 + value[:] = np.array([7]).reshape(1, 1, 1) + + rd = RequestDataset(path) + driver = DriverNetcdf(rd) + field = driver._get_field_() + self.assertEqual(field.temporal.name, 'tme') + self.assertEqual(field.temporal.name_value, 'time') + self.assertEqual(field.spatial.grid.row.name, 'lat') + self.assertEqual(field.spatial.grid.row.name_value, 'latitude') + self.assertEqual(field.spatial.grid.col.name, 'lon') + self.assertEqual(field.spatial.grid.col.name_value, 'longitude') + self.assertEqual(field.temporal.name_bounds, 'long_live_the_bounds') + + def test_get_field_dtype_on_dimensions(self): + rd = self.test_data.get_rd('cancm4_tas') field = rd.get() - self.assertEqual(field.temporal.extent_datetime,(datetime.datetime(1981, 1, 1, 0, 0), datetime.datetime(1991, 1, 1, 0, 0))) - self.assertAlmostEqual(field.temporal.resolution,0.125) + with nc_scope(rd.uri) as ds: + test_dtype_temporal = ds.variables['time'].dtype + test_dtype_value = ds.variables['tas'].dtype + self.assertEqual(field.temporal.dtype,test_dtype_temporal) + self.assertEqual(field.variables['tas'].dtype,test_dtype_value) + self.assertEqual(field.temporal.dtype,np.float64) - def test_load_dtype_fill_value(self): + def test_get_field_dtype_fill_value(self): rd = self.test_data.get_rd('cancm4_tas') field = rd.get() ## dtype and fill_value should be read from metadata. when accessed they @@ -134,7 +184,7 @@ def test_load_dtype_fill_value(self): self.assertEqual(field.variables['tas'].fill_value,np.float32(1e20)) self.assertEqual(field.variables['tas']._value,None) - def test_load_datetime_slicing(self): + def test_get_field_datetime_slicing(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) @@ -147,7 +197,7 @@ def test_load_datetime_slicing(self): self.assertEqual(slced.temporal.value_datetime,np.array([dt(2001,8,28,12)])) self.assertNumpyAll(slced.temporal.bounds_datetime,np.array([dt(2001,8,28),dt(2001,8,29)]).reshape(1, 2)) - def test_load_value_datetime_after_slicing(self): + def test_get_field_value_datetime_after_slicing(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) @@ -155,7 +205,7 @@ def test_load_value_datetime_after_slicing(self): slced = field[:,10:130,:,4:7,100:37] self.assertEqual(slced.temporal.value_datetime.shape,(120,)) - def test_load_bounds_datetime_after_slicing(self): + def test_get_field_bounds_datetime_after_slicing(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) @@ -163,7 +213,7 @@ def test_load_bounds_datetime_after_slicing(self): slced = field[:,10:130,:,4:7,100:37] self.assertEqual(slced.temporal.bounds_datetime.shape,(120,2)) - def test_load_slice(self): + def test_get_field_slice(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri) @@ -186,7 +236,7 @@ def test_load_slice(self): ds.close() - def test_load_time_range(self): + def test_get_field_time_range(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(variable=ref_test['variable'],uri=uri,time_range=[dt(2005,2,15),dt(2007,4,18)]) @@ -195,7 +245,7 @@ def test_load_time_range(self): self.assertEqual(field.temporal.value_datetime[-1],dt(2007, 4, 18, 12, 0)) self.assertEqual(field.shape,(1,793,1,64,128)) - def test_load_time_region(self): + def test_get_field_time_region(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') ds = nc.Dataset(uri,'r') @@ -217,7 +267,7 @@ def test_load_time_region(self): ds.close() - def test_load_time_region_with_years(self): + def test_get_field_time_region_with_years(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') ds = nc.Dataset(uri,'r') @@ -239,7 +289,7 @@ def test_load_time_region_with_years(self): ds.close() - def test_load_geometry_subset(self): + def test_get_field_geometry_subset(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') @@ -272,7 +322,7 @@ def test_load_geometry_subset(self): with self.assertRaises(ImportError): import_module('rtree') - def test_load_time_region_slicing(self): + def test_get_field_time_region_slicing(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') @@ -293,7 +343,7 @@ def test_load_time_region_slicing(self): sub2 = field[:,:,:,0,1] self.assertEqual(sub2.shape,(1, 124, 1, 1, 1)) - def test_load_remote(self): + def test_get_field_remote(self): raise(SkipTest("server IO errors")) uri = 'http://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml' variable = 'sresa1b_bccr-bcm2-0_1_Tavg' @@ -313,7 +363,7 @@ def test_load_remote(self): finally: ds.close() - def test_load_with_projection(self): + def test_get_field_with_projection(self): uri = self.test_data.get_uri('narccap_wrfg') rd = RequestDataset(uri,'pr') field = rd.get() @@ -328,7 +378,7 @@ def test_load_with_projection(self): self.assertAlmostEqual(field.spatial.geom.point.value[0,100].x,278.52630062012787) self.assertAlmostEqual(field.spatial.geom.point.value[0,100].y,21.4615681252577) - def test_load_projection_axes(self): + def test_get_field_projection_axes(self): uri = self.test_data.get_uri('cmip3_extraction') variable = 'Tavg' rd = RequestDataset(uri,variable) @@ -347,7 +397,7 @@ def test_load_projection_axes(self): self.assertNumpyAll(to_test[:],field.variables['Tavg'].value.squeeze()) ds.close() - def test_load_projection_axes_slicing(self): + def test_get_field_projection_axes_slicing(self): uri = self.test_data.get_uri('cmip3_extraction') variable = 'Tavg' rd = RequestDataset(uri,variable,dimension_map={'R':'projection','T':'time','X':'longitude','Y':'latitude'}) @@ -360,7 +410,14 @@ def test_load_projection_axes_slicing(self): self.assertNumpyAll(to_test[15,:,:,:],sub.variables[variable].value.squeeze()) ds.close() - def test_load_climatology_bounds(self): + def test_get_field_multifile_load(self): + uri = self.test_data.get_uri('narccap_pr_wrfg_ncep') + rd = RequestDataset(uri,'pr') + field = rd.get() + self.assertEqual(field.temporal.extent_datetime,(datetime.datetime(1981, 1, 1, 0, 0), datetime.datetime(1991, 1, 1, 0, 0))) + self.assertAlmostEqual(field.temporal.resolution,0.125) + + def test_get_field_climatology_bounds(self): rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd,output_format='nc',geom='state_boundaries', select_ugid=[27],calc=[{'func':'mean','name':'mean'}], @@ -370,6 +427,87 @@ def test_load_climatology_bounds(self): field = rd.get() self.assertNotEqual(field.temporal.bounds,None) + def test_get_field_without_row_column_vectors(self): + """Test loading a field objects without row and column vectors.""" + + path = self.get_netcdf_path_no_row_column() + + rd = RequestDataset(path) + driver = DriverNetcdf(rd) + new_field = driver.get_field() + self.assertIsNotNone(new_field.crs) + grid = new_field.spatial.grid + self.assertIsNone(grid.row) + self.assertIsNone(grid.col) + self.assertEqual(grid.name_row, 'yc') + self.assertEqual(grid.name_col, 'xc') + self.assertIsNone(grid._value) + actual = np.ma.array([[[4.0, 4.0], [5.0, 5.0]], [[40.0, 50.0], [40.0, 50.0]]]) + self.assertNumpyAll(grid.value, actual) + var = new_field.variables.first() + self.assertEqual(var.shape, (1, 2, 1, 2, 2)) + self.assertEqual(var.value.shape, (1, 2, 1, 2, 2)) + + new_field = driver.get_field() + grid = new_field.spatial.grid + self.assertEqual(grid.shape, (2, 2)) + self.assertIsNone(grid._value) + sub = new_field[:, :, :, 0, 1] + sub_grid = sub.spatial.grid + self.assertIsInstance(sub_grid, NcSpatialGridDimension) + self.assertEqual(sub.shape, (1, 2, 1, 1, 1)) + self.assertIsNone(sub_grid._value) + self.assertEqual(sub_grid.shape, (1, 1)) + actual = np.ma.array([[[4.0]], [[50.0]]]) + self.assertNumpyAll(actual, sub_grid.value) + sub_var = sub.variables.first() + self.assertEqual(sub_var.shape, (1, 2, 1, 1, 1)) + self.assertEqual(sub_var.value.shape, (1, 2, 1, 1, 1)) + + path2 = os.path.join(self.current_dir_output, 'foo2.nc') + with self.nc_scope(path2, 'w') as ds: + new_field.write_to_netcdf_dataset(ds) + self.assertNcEqual(path, path2, ignore_attributes={'foo': ['grid_mapping']}, + ignore_variables=['latitude_longitude']) + + def test_get_vector_dimension(self): + # test exception raised with no row and column + path = self.get_netcdf_path_no_row_column() + rd = RequestDataset(path) + driver = DriverNetcdf(rd) + k = 'row' + v = {'name_uid': 'yc_id', 'axis': 'Y', 'adds': {'interpolate_bounds': False}, 'name': 'yc', 'cls': VectorDimension} + source_metadata = rd.source_metadata + res = driver._get_vector_dimension_(k, v, source_metadata) + self.assertEqual(res['name'], 'yc') + + def test_get_name_bounds_suffix(self): + rd = self.test_data.get_rd('cancm4_tas') + source_metadata = rd.source_metadata + res = DriverNetcdf._get_name_bounds_suffix_(source_metadata) + self.assertEqual(res, 'bnds') + + # remove any mention of bounds from the dimension map and try again + for value in source_metadata['dim_map'].itervalues(): + try: + value['bounds'] = None + except TypeError: + # likely a nonetype + if value is not None: + raise + res = DriverNetcdf._get_name_bounds_suffix_(source_metadata) + self.assertIsNone(res) + + # now remove the bounds key completely + for value in source_metadata['dim_map'].itervalues(): + try: + value.pop('bounds') + except AttributeError: + if value is not None: + raise + res = DriverNetcdf._get_name_bounds_suffix_(source_metadata) + self.assertIsNone(res) + def test_open(self): # test a multifile dataset where the variable does not appear in all datasets uri1 = self.test_data.get_uri('cancm4_tas') @@ -422,7 +560,3 @@ def test_get_dimension_map_3(self): self.assertTrue('lat_bnds' in list(ocgis_lh.duplicates)[0]) finally: ocgis_lh.shutdown() - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() diff --git a/src/ocgis/test/test_ocgis/test_api/test_subset.py b/src/ocgis/test/test_ocgis/test_api/test_subset.py index eaaaf12db..7b03d2302 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_subset.py +++ b/src/ocgis/test/test_ocgis/test_api/test_subset.py @@ -1,18 +1,25 @@ from copy import deepcopy +import csv +import os import pickle +import itertools + +import ESMF +import numpy as np + +from ocgis.api.parms.definition import OutputFormat +from ocgis.interface.base.field import Field +from ocgis.api.operations import OcgOperations from ocgis.conv.numpy_ import NumpyConverter -from ocgis.exc import DimensionNotFound -from ocgis.interface.base.crs import Spherical, CFWGS84, CFPolarStereographic -from ocgis.interface.base.dimension.spatial import SpatialDimension, SpatialGeometryPointDimension +from ocgis.interface.base.crs import Spherical, CFWGS84, CFPolarStereographic, WGS84 +from ocgis.interface.base.dimension.spatial import SpatialDimension from ocgis.test.base import TestBase import ocgis from ocgis.api.subset import SubsetOperation from ocgis.api.collection import SpatialCollection -import itertools from ocgis.test.test_ocgis.test_api.test_parms.test_definition import TestGeom from ocgis.util.itester import itr_products_keywords from ocgis.util.logging_ocgis import ProgressOcgOperations -import numpy as np from ocgis import env @@ -31,6 +38,34 @@ def get_subset_operation(self): subset = SubsetOperation(ops) return subset + def test_init(self): + for rb, p in itertools.product([True, False], [None, ProgressOcgOperations()]): + sub = SubsetOperation(self.get_operations(), request_base_size_only=rb, progress=p) + for ii, coll in enumerate(sub): + self.assertIsInstance(coll, SpatialCollection) + self.assertEqual(ii, 0) + + def test_process_subsettables(self): + # test extrapolating spatial bounds with no row and column + for with_corners in [False, True]: + field = self.get_field() + field.spatial.grid.value + if with_corners: + field.spatial.grid.set_extrapolated_corners() + field.spatial.grid.row = None + field.spatial.grid.col = None + if with_corners: + self.assertIsNotNone(field.spatial.grid.corners) + else: + self.assertIsNone(field.spatial.grid.corners) + ops = OcgOperations(dataset=field, interpolate_spatial_bounds=True) + so = SubsetOperation(ops) + rds = ops.dataset.values() + res = list(so._process_subsettables_(rds)) + self.assertEqual(len(res), 1) + coll = res[0] + self.assertIsNotNone(coll[1][field.name].spatial.grid.corners) + def test_abstraction_not_available(self): """Test appropriate exception is raised when a selected abstraction is not available.""" @@ -39,12 +74,104 @@ def test_abstraction_not_available(self): with self.assertRaises(ValueError): ops.execute() - def test_init(self): - for rb, p in itertools.product([True, False], [None, ProgressOcgOperations()]): - sub = SubsetOperation(self.get_operations(), request_base_size_only=rb, progress=p) - for ii, coll in enumerate(sub): - self.assertIsInstance(coll, SpatialCollection) - self.assertEqual(ii, 0) + def test_dataset_as_field(self): + """Test with dataset as field not loaded from file - hence, no metadata.""" + + kwds = dict(output_format=list(OutputFormat.iter_possible()), + crs=[None, WGS84()]) + + for ii, k in enumerate(self.iter_product_keywords(kwds)): + field = self.get_field(crs=k.crs) + + ops = OcgOperations(dataset=field) + ret = ops.execute() + self.assertNumpyAll(ret.gvu(1, 'foo'), field.variables['foo'].value) + + ops = OcgOperations(dataset=field, output_format=k.output_format, prefix=str(ii)) + try: + ret = ops.execute() + except ValueError as ve: + self.assertIsNone(k.crs) + self.assertIn(k.output_format, ['csv', 'csv+', 'geojson', 'shp']) + continue + + if k.output_format == 'numpy': + self.assertIsInstance(ret[1]['foo'], Field) + continue + if k.output_format == 'meta': + self.assertIsInstance(ret, basestring) + self.assertTrue(len(ret) > 50) + continue + if k.output_format == 'esmpy': + self.assertIsInstance(ret, ESMF.Field) + continue + + folder = os.path.split(ret)[0] + + path_did = os.path.join(folder, '{0}_did.csv'.format(ops.prefix)) + with open(path_did, 'r') as f: + rows = list(csv.DictReader(f)) + self.assertEqual(rows, [{'ALIAS': 'foo', 'DID': '1', 'URI': '', 'UNITS': '', 'STANDARD_NAME': '', 'VARIABLE': 'foo', 'LONG_NAME': ''}]) + + path_source_metadata = os.path.join(folder, '{0}_source_metadata.txt'.format(ops.prefix)) + with open(path_source_metadata, 'r') as f: + rows = f.readlines() + self.assertEqual(rows, []) + + if k.output_format == 'nc': + with self.nc_scope(ret) as ds: + variables_expected = [u'time', u'row', u'col', u'foo'] + try: + self.assertAsSetEqual(ds.variables.keys(), variables_expected) + except AssertionError: + self.assertIsNotNone(k.crs) + variables_expected.append('latitude_longitude') + self.assertAsSetEqual(ds.variables.keys(), variables_expected) + self.assertNumpyAll(ds.variables['time'][:], field.temporal.value_numtime) + self.assertNumpyAll(ds.variables['row'][:], field.spatial.grid.row.value) + self.assertNumpyAll(ds.variables['col'][:], field.spatial.grid.col.value) + self.assertNumpyAll(ds.variables['foo'][:], field.variables['foo'].value.data.squeeze()) + + contents = os.listdir(folder) + + expected_contents = [xx.format(ops.prefix) for xx in '{0}_source_metadata.txt', '{0}_did.csv', '{0}.log', '{0}_metadata.txt'] + if k.output_format == 'nc': + expected_contents.append('{0}.nc'.format(ops.prefix)) + self.assertAsSetEqual(contents, expected_contents) + elif k.output_format == 'csv+': + expected_contents.append('{0}.csv'.format(ops.prefix)) + expected_contents.append('shp') + self.assertAsSetEqual(contents, expected_contents) + elif k.output_format == 'shp': + expected_contents = ['{0}.shp', '{0}.dbf', '{0}.shx', '{0}.cpg', '{0}.log', '{0}_metadata.txt', '{0}_source_metadata.txt', '{0}_did.csv', '{0}.prj'] + expected_contents = [xx.format(ops.prefix) for xx in expected_contents] + self.assertAsSetEqual(contents, expected_contents) + + def test_dataset_as_field_from_file(self): + """Test with dataset argument coming in as a field as opposed to a request dataset collection.""" + + rd = self.test_data.get_rd('cancm4_tas') + geom = 'state_boundaries' + select_ugid = [23] + field = rd.get() + ops = OcgOperations(dataset=field, snippet=True, geom=geom, select_ugid=select_ugid) + ret = ops.execute() + field_out_from_field = ret[23]['tas'] + self.assertEqual(field_out_from_field.shape, (1, 1, 1, 4, 3)) + ops = OcgOperations(dataset=rd, snippet=True, geom=geom, select_ugid=select_ugid) + ret = ops.execute() + field_out_from_rd = ret[23]['tas'] + self.assertNumpyAll(field_out_from_field.variables['tas'].value, field_out_from_rd.variables['tas'].value) + + def test_geometry_dictionary(self): + """Test geometry dictionaries come out properly as collections.""" + + subset = self.get_subset_operation() + conv = NumpyConverter(subset, None, None) + coll = conv.write() + actual = "ccollections\nOrderedDict\np0\n((lp1\n(lp2\ncnumpy.core.multiarray\nscalar\np3\n(cnumpy\ndtype\np4\n(S'i8'\np5\nI0\nI1\ntp6\nRp7\n(I3\nS'<'\np8\nNNNI-1\nI-1\nI0\ntp9\nbS'\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00'\np10\ntp11\nRp12\nacnumpy.core.multiarray\n_reconstruct\np13\n(cnumpy\nndarray\np14\n(I0\ntp15\nS'b'\np16\ntp17\nRp18\n(I1\n(I1\ntp19\ng4\n(S'V16'\np20\nI0\nI1\ntp21\nRp22\n(I3\nS'|'\np23\nN(S'COUNTRY'\np24\nS'UGID'\np25\ntp26\n(dp27\ng24\n(g4\n(S'O8'\np28\nI0\nI1\ntp29\nRp30\n(I3\nS'|'\np31\nNNNI-1\nI-1\nI63\ntp32\nbI0\ntp33\nsg25\n(g7\nI8\ntp34\nsI16\nI1\nI27\ntp35\nbI00\n(lp36\n(S'France'\np37\nI1\ntp38\natp39\nbaa(lp40\ng3\n(g7\nS'\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00'\np41\ntp42\nRp43\nag13\n(g14\n(I0\ntp44\ng16\ntp45\nRp46\n(I1\n(I1\ntp47\ng22\nI00\n(lp48\n(S'Germany'\np49\nI2\ntp50\natp51\nbaa(lp52\ng3\n(g7\nS'\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00'\np53\ntp54\nRp55\nag13\n(g14\n(I0\ntp56\ng16\ntp57\nRp58\n(I1\n(I1\ntp59\ng22\nI00\n(lp60\n(S'Italy'\np61\nI3\ntp62\natp63\nbaatp64\nRp65\n." + actual = pickle.loads(actual) + self.assertEqual(coll.properties, actual) def test_regridding_bounding_box_wrapped(self): """Test subsetting with a wrapped bounding box with the target as a 0-360 global grid.""" @@ -63,16 +190,6 @@ def test_regridding_bounding_box_wrapped(self): self.assertIsNotNone(field.spatial.grid.corners) self.assertAlmostEqual(field.variables.first().value.mean(), 262.08338758680554) - def test_geometry_dictionary(self): - """Test geometry dictionaries come out properly as collections.""" - - subset = self.get_subset_operation() - conv = NumpyConverter(subset, None, None) - coll = conv.write() - actual = "ccollections\nOrderedDict\np0\n((lp1\n(lp2\ncnumpy.core.multiarray\nscalar\np3\n(cnumpy\ndtype\np4\n(S'i8'\np5\nI0\nI1\ntp6\nRp7\n(I3\nS'<'\np8\nNNNI-1\nI-1\nI0\ntp9\nbS'\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00'\np10\ntp11\nRp12\nacnumpy.core.multiarray\n_reconstruct\np13\n(cnumpy\nndarray\np14\n(I0\ntp15\nS'b'\np16\ntp17\nRp18\n(I1\n(I1\ntp19\ng4\n(S'V16'\np20\nI0\nI1\ntp21\nRp22\n(I3\nS'|'\np23\nN(S'COUNTRY'\np24\nS'UGID'\np25\ntp26\n(dp27\ng24\n(g4\n(S'O8'\np28\nI0\nI1\ntp29\nRp30\n(I3\nS'|'\np31\nNNNI-1\nI-1\nI63\ntp32\nbI0\ntp33\nsg25\n(g7\nI8\ntp34\nsI16\nI1\nI27\ntp35\nbI00\n(lp36\n(S'France'\np37\nI1\ntp38\natp39\nbaa(lp40\ng3\n(g7\nS'\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00'\np41\ntp42\nRp43\nag13\n(g14\n(I0\ntp44\ng16\ntp45\nRp46\n(I1\n(I1\ntp47\ng22\nI00\n(lp48\n(S'Germany'\np49\nI2\ntp50\natp51\nbaa(lp52\ng3\n(g7\nS'\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00'\np53\ntp54\nRp55\nag13\n(g14\n(I0\ntp56\ng16\ntp57\nRp58\n(I1\n(I1\ntp59\ng22\nI00\n(lp60\n(S'Italy'\np61\nI3\ntp62\natp63\nbaatp64\nRp65\n." - actual = pickle.loads(actual) - self.assertEqual(coll.properties, actual) - def test_regridding_same_field(self): """Test regridding operations with same field used to regrid the source.""" diff --git a/src/ocgis/test/test_ocgis/test_calc/test_base.py b/src/ocgis/test/test_ocgis/test_calc/test_base.py index 3019d7f37..17bcdfb7f 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_base.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_base.py @@ -1,40 +1,70 @@ from copy import deepcopy + +from cfunits.cfunits import Units +import numpy as np + +from ocgis.interface.base.variable import VariableCollection, DerivedVariable + from ocgis.test.base import TestBase from ocgis.test.test_ocgis.test_interface.test_base.test_field import AbstractTestField -from ocgis.calc.base import AbstractUnivariateFunction,\ - AbstractUnivariateSetFunction, AbstractFunction, AbstractMultivariateFunction +from ocgis.calc.base import AbstractUnivariateFunction, AbstractUnivariateSetFunction, AbstractFunction, \ + AbstractMultivariateFunction from ocgis import constants, OcgOperations, FunctionRegistry -from cfunits.cfunits import Units from ocgis.exc import UnitsValidationError, DefinitionValidationError -import numpy as np class FooNeedsUnits(AbstractUnivariateFunction): description = 'calculation with units' dtype = constants.np_float key = 'fnu' - required_units = ['K','kelvin'] + required_units = ['K', 'kelvin'] standard_name = 'foo_needs_units' long_name = 'Foo Needs Units' - def calculate(self,values): - return(values) + def calculate(self, values): + return values class FooNeedsUnitsSet(AbstractUnivariateSetFunction): description = 'calculation with units' dtype = constants.np_float key = 'fnu' - required_units = ['K','kelvin'] + required_units = ['K', 'kelvin'] standard_name = '' long_name = '' def calculate(self,values): - return(np.ma.mean(values,axis=0)) + return np.ma.mean(values,axis=0) + + +class FooSampleSize(FooNeedsUnitsSet): + standard_name = 'the_standard' + long_name = 'the_standard_long_name' class TestAbstractFunction(AbstractTestField): + def test_add_to_collection(self): + kwds = dict(calc_sample_size=[False, True]) + + for k in self.iter_product_keywords(kwds): + field = self.get_field(with_value=True) + tgd = field.temporal.get_grouping(['month']) + fb = FooSampleSize(field=field, calc_sample_size=k.calc_sample_size, tgd=tgd) + res = fb.execute() + variable = res.first() + self.assertIsInstance(res, VariableCollection) + self.assertIsInstance(variable, DerivedVariable) + attrs = {'standard_name': fb.standard_name, 'long_name': fb.long_name} + self.assertDictEqual(attrs, variable.attrs) + + if k.calc_sample_size: + alias = 'n_{0}'.format(variable.alias) + ss = res[alias] + attrs = {'standard_name': constants.default_sample_size_standard_name, + 'long_name': constants.default_sample_size_long_name} + self.assertDictEqual(ss.attrs, attrs) + def test_execute_meta_attrs(self): """Test overloaded metadata attributes are appropriately applied.""" @@ -52,12 +82,12 @@ def test_execute_meta_attrs(self): fb = FooNeedsUnits(field=field, meta_attrs=meta_attrs) ret = fb.execute() if oload: - actual = {'attrs': {'long_name': 'Foo Needs Units', 'standard_name': 'never!', - 'something_new': 'is about to happen'}} + actual = {'long_name': 'Foo Needs Units', 'standard_name': 'never!', + 'something_new': 'is about to happen'} else: - actual = {'attrs': {'long_name': 'Foo Needs Units', 'standard_name': 'foo_needs_units', - 'something_new': 'is about to happen'}} - self.assertEqual(ret['fnu'].meta, actual) + actual = {'long_name': 'Foo Needs Units', 'standard_name': 'foo_needs_units', + 'something_new': 'is about to happen'} + self.assertDictEqual(ret['fnu'].attrs, actual) if oload: self.assertDictEqual(meta_attrs, {'something_new': 'is about to happen', 'standard_name': 'never!'}) else: diff --git a/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py b/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py index 115d6fa31..dc858cfba 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py @@ -118,34 +118,36 @@ def test_time_region(self): ret = ops.execute() threshold = ret[2762]['tasmax'].variables['threshold'].value self.assertEqual(threshold.flatten()[0],62) - + def test_computational_nc_output(self): - rd = self.test_data.get_rd('cancm4_tasmax_2011',kwds={'time_range':[datetime.datetime(2011,1,1),datetime.datetime(2011,12,31)]}) - calc = [{'func':'mean','name':'tasmax_mean'}] - calc_grouping = ['month','year'] + """Test writing a computation to netCDF.""" - ops = ocgis.OcgOperations(rd,calc=calc,calc_grouping=calc_grouping, + rd = self.test_data.get_rd('cancm4_tasmax_2011', kwds={ + 'time_range': [datetime.datetime(2011, 1, 1), datetime.datetime(2011, 12, 31)]}) + calc = [{'func': 'mean', 'name': 'tasmax_mean'}] + calc_grouping = ['month', 'year'] + + ops = ocgis.OcgOperations(rd, calc=calc, calc_grouping=calc_grouping, output_format='nc') ret = ops.execute() - ds = nc.Dataset(ret,'r') - ref = ds.variables['time'] - self.assertEqual(ref.climatology,'climatology_bounds') - self.assertEqual(len(ref[:]),12) - ref = ds.variables['climatology_bounds'] - self.assertEqual(ref[:].shape[0],12) - ds.close() - ops = ocgis.OcgOperations(dataset={'uri':ret,'variable':calc[0]['name']}, - output_format='nc',prefix='subset_climatology') + with self.nc_scope(ret) as ds: + ref = ds.variables['time'] + self.assertEqual(ref.climatology, 'climatology_bounds') + self.assertEqual(len(ref[:]), 12) + ref = ds.variables['climatology_bounds'] + self.assertEqual(ref[:].shape[0], 12) + + ops = ocgis.OcgOperations(dataset={'uri': ret, 'variable': calc[0]['name']}, + output_format='nc', prefix='subset_climatology') ret = ops.execute() - - ds = nc.Dataset(ret,'r') - ref = ds.variables['time'][:] - self.assertEqual(len(ref),12) - self.assertEqual(set(ds.variables['tasmax_mean'].ncattrs()), - set([u'_FillValue', u'units', u'long_name', u'standard_name'])) - ds.close() - + + with self.nc_scope(ret) as ds: + ref = ds.variables['time'][:] + self.assertEqual(len(ref), 12) + self.assertEqual(set(ds.variables['tasmax_mean'].ncattrs()), + set([u'_FillValue', u'units', u'long_name', u'standard_name', 'grid_mapping'])) + def test_frequency_percentiles(self): ## data comes in as 4-dimensional array. (time,level,row,column) diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py index e26f6ab8d..d6d7ad962 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py @@ -1,12 +1,11 @@ from ocgis.api.operations import OcgOperations from ocgis.api.request.base import RequestDataset import netCDF4 as nc -from ocgis.test.base import TestBase +from ocgis.test.base import TestBase, nc_scope import itertools import datetime import numpy as np from ocgis.calc.library.index.dynamic_kernel_percentile import DynamicDailyKernelPercentileThreshold -from ocgis.test.test_simple.test_simple import nc_scope from ocgis.test.test_base import longrunning diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py index 4d4698f37..55609d3cd 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_math.py @@ -186,7 +186,6 @@ def test_execute_same(self): def test_execute_valid(self): """Test convolution with the 'valid' mode.""" - #todo: add to docs field = self.get_convolve1d_field(slice_stop=4) parms = {'v': np.array([1, 1, 1]), 'mode': 'valid'} cd = Convolve1D(field=field, parms=parms) diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py index 5cb742451..e4da7512a 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py @@ -6,8 +6,8 @@ from ocgis.api.parms.definition import Calc from ocgis.calc.library.statistics import Mean, FrequencyPercentile, MovingWindow from ocgis.interface.base.variable import DerivedVariable, Variable +from ocgis.test.base import nc_scope from ocgis.test.test_ocgis.test_interface.test_base.test_field import AbstractTestField -from ocgis.test.test_simple.test_simple import nc_scope import ocgis from ocgis.util.itester import itr_products_keywords @@ -28,7 +28,6 @@ def test_calculate(self): self.assertEqual(ret[4], np.mean(values[2:7])) def test_execute(self): - #todo: add to docs field = self.get_field(month_count=1, with_value=True) field = field[:, 0:4, :, :, :] field.variables['tmax'].value[:] = 1 diff --git a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py index 828d0f0fe..0913b543c 100644 --- a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py +++ b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py @@ -1,5 +1,5 @@ import unittest -from ocgis.test.base import TestBase +from ocgis.test.base import TestBase, nc_scope from ocgis.contrib.library_icclim import IcclimTG, IcclimSU, AbstractIcclimFunction,\ IcclimDTR, IcclimETR, IcclimTN, IcclimTX,\ AbstractIcclimUnivariateSetFunction, AbstractIcclimMultivariateFunction @@ -9,7 +9,6 @@ from ocgis.exc import DefinitionValidationError, UnitsValidationError from ocgis.api.operations import OcgOperations from ocgis.calc.library.thresholds import Threshold -from ocgis.test.test_simple.test_simple import nc_scope import ocgis from ocgis.test.test_base import longrunning import numpy as np @@ -196,7 +195,7 @@ def test_calculation_operations_to_nc(self): u'{"institution": "CCCma (Canadian Centre for Climate Modelling and Analysis, Victoria, BC, Canada)", "institute_id": "CCCma", "experiment_id": "decadal2000", "source": "CanCM4 2010 atmosphere: CanAM4 (AGCM15i, T63L35) ocean: CanOM4 (OGCM4.0, 256x192L40) sea ice: CanSIM1 (Cavitating Fluid, T63 Gaussian Grid) land: CLASS2.7", "model_id": "CanCM4", "forcing": "GHG,Oz,SA,BC,OC,LU,Sl,Vl (GHG includes CO2,CH4,N2O,CFC11,effective CFC12)", "parent_experiment_id": "N/A", "parent_experiment_rip": "N/A", "branch_time": 0.0, "contact": "cccma_info@ec.gc.ca", "references": "http://www.cccma.ec.gc.ca/models", "initialization_method": 1, "physics_version": 1, "tracking_id": "fac7bd83-dd7a-425b-b4dc-b5ab2e915939", "branch_time_YMDH": "2001:01:01:00", "CCCma_runid": "DHFP1B_E002_I2001_M01", "CCCma_parent_runid": "DHFP1_E002", "CCCma_data_licence": "1) GRANT OF LICENCE - The Government of Canada (Environment Canada) is the \\nowner of all intellectual property rights (including copyright) that may exist in this Data \\nproduct. You (as \\"The Licensee\\") are hereby granted a non-exclusive, non-assignable, \\nnon-transferable unrestricted licence to use this data product for any purpose including \\nthe right to share these data with others and to make value-added and derivative \\nproducts from it. This licence is not a sale of any or all of the owner\'s rights.\\n2) NO WARRANTY - This Data product is provided \\"as-is\\"; it has not been designed or \\nprepared to meet the Licensee\'s particular requirements. Environment Canada makes no \\nwarranty, either express or implied, including but not limited to, warranties of \\nmerchantability and fitness for a particular purpose. In no event will Environment Canada \\nbe liable for any indirect, special, consequential or other damages attributed to the \\nLicensee\'s use of the Data product.", "product": "output", "experiment": "10- or 30-year run initialized in year 2000", "frequency": "day", "creation_date": "2011-05-08T01:01:51Z", "history": "2011-05-08T01:01:51Z CMOR rewrote data to comply with CF standards and CMIP5 requirements.", "Conventions": "CF-1.4", "project_id": "CMIP5", "table_id": "Table day (28 March 2011) f9d6cfec5981bb8be1801b35a81002f0", "title": "CanCM4 model output prepared for CMIP5 10- or 30-year run initialized in year 2000", "parent_experiment": "N/A", "modeling_realm": "atmos", "realization": 2, "cmor_version": "2.5.4"}') ## load the original source attributes from the JSON string json.loads(ds.__dict__[AbstractIcclimFunction._global_attribute_source_name]) - self.assertEqual(dict(var.__dict__),{'_FillValue':np.float32(1e20),u'units': u'K', u'standard_name': AbstractIcclimFunction.standard_name, u'long_name': u'Mean of daily mean temperature'}) + self.assertEqual(dict(var.__dict__),{'_FillValue':np.float32(1e20),u'units': u'K', 'grid_mapping': 'latitude_longitude', u'standard_name': AbstractIcclimFunction.standard_name, u'long_name': u'Mean of daily mean temperature'}) def test_calculate(self): rd = self.test_data.get_rd('cancm4_tas') @@ -237,21 +236,28 @@ def test_calculation_operations_bad_units(self): def test_calculation_operations_to_nc(self): rd = self.test_data.get_rd('cancm4_tasmax_2011') - slc = [None,None,None,[0,10],[0,10]] - ops_ocgis = OcgOperations(calc=[{'func':'icclim_SU','name':'SU'}], - calc_grouping=['month'], - slice=slc, - dataset=rd, - output_format='nc') + slc = [None, None, None, [0, 10], [0, 10]] + ops_ocgis = OcgOperations(calc=[{'func': 'icclim_SU', 'name': 'SU'}], calc_grouping=['month'], slice=slc, + dataset=rd, output_format='nc') ret = ops_ocgis.execute() with nc_scope(ret) as ds: to_test = deepcopy(ds.__dict__) history = to_test.pop('history') - self.assertEqual(history[111:187],' Calculation of SU indice (monthly climatology) from 2011-1-1 to 2020-12-31.') - self.assertDictEqual(to_test,OrderedDict([(u'source_data_global_attributes', u'{"institution": "CCCma (Canadian Centre for Climate Modelling and Analysis, Victoria, BC, Canada)", "institute_id": "CCCma", "experiment_id": "decadal2010", "source": "CanCM4 2010 atmosphere: CanAM4 (AGCM15i, T63L35) ocean: CanOM4 (OGCM4.0, 256x192L40) sea ice: CanSIM1 (Cavitating Fluid, T63 Gaussian Grid) land: CLASS2.7", "model_id": "CanCM4", "forcing": "GHG,Oz,SA,BC,OC,LU,Sl,Vl (GHG includes CO2,CH4,N2O,CFC11,effective CFC12)", "parent_experiment_id": "N/A", "parent_experiment_rip": "N/A", "branch_time": 0.0, "contact": "cccma_info@ec.gc.ca", "references": "http://www.cccma.ec.gc.ca/models", "initialization_method": 1, "physics_version": 1, "tracking_id": "64384802-3f0f-4ab4-b569-697bd5430854", "branch_time_YMDH": "2011:01:01:00", "CCCma_runid": "DHFP1B_E002_I2011_M01", "CCCma_parent_runid": "DHFP1_E002", "CCCma_data_licence": "1) GRANT OF LICENCE - The Government of Canada (Environment Canada) is the \\nowner of all intellectual property rights (including copyright) that may exist in this Data \\nproduct. You (as \\"The Licensee\\") are hereby granted a non-exclusive, non-assignable, \\nnon-transferable unrestricted licence to use this data product for any purpose including \\nthe right to share these data with others and to make value-added and derivative \\nproducts from it. This licence is not a sale of any or all of the owner\'s rights.\\n2) NO WARRANTY - This Data product is provided \\"as-is\\"; it has not been designed or \\nprepared to meet the Licensee\'s particular requirements. Environment Canada makes no \\nwarranty, either express or implied, including but not limited to, warranties of \\nmerchantability and fitness for a particular purpose. In no event will Environment Canada \\nbe liable for any indirect, special, consequential or other damages attributed to the \\nLicensee\'s use of the Data product.", "product": "output", "experiment": "10- or 30-year run initialized in year 2010", "frequency": "day", "creation_date": "2012-03-28T15:32:08Z", "history": "2012-03-28T15:32:08Z CMOR rewrote data to comply with CF standards and CMIP5 requirements.", "Conventions": "CF-1.4", "project_id": "CMIP5", "table_id": "Table day (28 March 2011) f9d6cfec5981bb8be1801b35a81002f0", "title": "CanCM4 model output prepared for CMIP5 10- or 30-year run initialized in year 2010", "parent_experiment": "N/A", "modeling_realm": "atmos", "realization": 2, "cmor_version": "2.8.0"}'), (u'title', u'ECA heat indice SU'), (u'references', u'ATBD of the ECA indices calculation (http://eca.knmi.nl/documents/atbd.pdf)'), (u'institution', u'Climate impact portal (http://climate4impact.eu)'), (u'comment', u' ')])) + self.assertEqual(history[111:187], + ' Calculation of SU indice (monthly climatology) from 2011-1-1 to 2020-12-31.') + self.assertDictEqual(to_test, OrderedDict([(u'source_data_global_attributes', + u'{"institution": "CCCma (Canadian Centre for Climate Modelling and Analysis, Victoria, BC, Canada)", "institute_id": "CCCma", "experiment_id": "decadal2010", "source": "CanCM4 2010 atmosphere: CanAM4 (AGCM15i, T63L35) ocean: CanOM4 (OGCM4.0, 256x192L40) sea ice: CanSIM1 (Cavitating Fluid, T63 Gaussian Grid) land: CLASS2.7", "model_id": "CanCM4", "forcing": "GHG,Oz,SA,BC,OC,LU,Sl,Vl (GHG includes CO2,CH4,N2O,CFC11,effective CFC12)", "parent_experiment_id": "N/A", "parent_experiment_rip": "N/A", "branch_time": 0.0, "contact": "cccma_info@ec.gc.ca", "references": "http://www.cccma.ec.gc.ca/models", "initialization_method": 1, "physics_version": 1, "tracking_id": "64384802-3f0f-4ab4-b569-697bd5430854", "branch_time_YMDH": "2011:01:01:00", "CCCma_runid": "DHFP1B_E002_I2011_M01", "CCCma_parent_runid": "DHFP1_E002", "CCCma_data_licence": "1) GRANT OF LICENCE - The Government of Canada (Environment Canada) is the \\nowner of all intellectual property rights (including copyright) that may exist in this Data \\nproduct. You (as \\"The Licensee\\") are hereby granted a non-exclusive, non-assignable, \\nnon-transferable unrestricted licence to use this data product for any purpose including \\nthe right to share these data with others and to make value-added and derivative \\nproducts from it. This licence is not a sale of any or all of the owner\'s rights.\\n2) NO WARRANTY - This Data product is provided \\"as-is\\"; it has not been designed or \\nprepared to meet the Licensee\'s particular requirements. Environment Canada makes no \\nwarranty, either express or implied, including but not limited to, warranties of \\nmerchantability and fitness for a particular purpose. In no event will Environment Canada \\nbe liable for any indirect, special, consequential or other damages attributed to the \\nLicensee\'s use of the Data product.", "product": "output", "experiment": "10- or 30-year run initialized in year 2010", "frequency": "day", "creation_date": "2012-03-28T15:32:08Z", "history": "2012-03-28T15:32:08Z CMOR rewrote data to comply with CF standards and CMIP5 requirements.", "Conventions": "CF-1.4", "project_id": "CMIP5", "table_id": "Table day (28 March 2011) f9d6cfec5981bb8be1801b35a81002f0", "title": "CanCM4 model output prepared for CMIP5 10- or 30-year run initialized in year 2010", "parent_experiment": "N/A", "modeling_realm": "atmos", "realization": 2, "cmor_version": "2.8.0"}'), + (u'title', u'ECA heat indice SU'), ( + u'references', u'ATBD of the ECA indices calculation (http://eca.knmi.nl/documents/atbd.pdf)'), ( + u'institution', + u'Climate impact portal (http://climate4impact.eu)'), + (u'comment', u' ')])) var = ds.variables['SU'] to_test = dict(var.__dict__) - self.assertEqual(to_test,{'_FillValue':999999,u'units': u'days', u'standard_name': AbstractIcclimFunction.standard_name, u'long_name': 'Summer days (number of days where daily maximum temperature > 25 degrees)'}) + self.assertEqual(to_test, {'_FillValue': 999999, u'units': u'days', + u'standard_name': AbstractIcclimFunction.standard_name, + u'long_name': 'Summer days (number of days where daily maximum temperature > 25 degrees)', + 'grid_mapping': 'latitude_longitude'}) @longrunning def test_calculate_opendap(self): diff --git a/src/ocgis/test/test_ocgis/test_conv/test_base.py b/src/ocgis/test/test_ocgis/test_conv/test_base.py index d756413ae..691ed9c4d 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_base.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_base.py @@ -1,5 +1,5 @@ from csv import DictReader -from ocgis.test.base import TestBase +from ocgis.test.base import TestBase, nc_scope from ocgis.api.collection import SpatialCollection from ocgis.conv.csv_ import CsvConverter, CsvPlusConverter import ocgis @@ -9,7 +9,6 @@ import itertools from copy import deepcopy import tempfile -from ocgis.test.test_simple.test_simple import nc_scope import numpy as np @@ -20,7 +19,7 @@ def get_spatial_collection(self, field=None): field = field or rd.get()[:, 0, :, 0, 0] coll = SpatialCollection() coll.add_field(1, None, field) - return(coll) + return coll class TestAbstractConverter(AbstractTestConverter): diff --git a/src/ocgis/test/test_ocgis/test_conv/test_esmpy.py b/src/ocgis/test/test_ocgis/test_conv/test_esmpy.py new file mode 100644 index 000000000..02bbcd81d --- /dev/null +++ b/src/ocgis/test/test_ocgis/test_conv/test_esmpy.py @@ -0,0 +1,97 @@ +from copy import deepcopy +from unittest import SkipTest + +import ESMF +import numpy as np + +from ocgis.exc import DefinitionValidationError +from ocgis import SpatialCollection, OcgOperations +from ocgis.conv.base import AbstractConverter +from ocgis.conv.esmpy import ESMPyConverter +from ocgis.test.test_ocgis.test_conv.test_base import AbstractTestConverter + + +class TestESMPyConverter(AbstractTestConverter): + + def setUp(self): + raise SkipTest + + def get_conv(self, with_corners=True, value_mask=None, esmf_field_name=None, field=None): + coll = self.get_spatial_collection(field=field) + conv = ESMPyConverter([coll], with_corners=with_corners, value_mask=value_mask, esmf_field_name=esmf_field_name) + return conv + + def test_init(self): + conv = self.get_conv() + self.assertIsInstance(conv, AbstractConverter) + + def test_iter(self): + conv = self.get_conv() + res = list(conv) + self.assertEqual(len(res), 1) + self.assertIsInstance(res[0], SpatialCollection) + + def test_write(self): + #todo: test with multiple collections + #todo: test with multiple variables + #todo: test with multiple fields + #todo: test with mask on field + + kwds = dict(nlevel=[1, 3], + nrlz=[1, 5], + esmf_field_name=[None, 'foo'], + value_mask=[None, True]) + + for k in self.iter_product_keywords(kwds): + ofield = self.get_field(nlevel=k.nlevel, nrlz=k.nrlz) + ovariable = ofield.variables.first() + + if k.value_mask is not None: + value_mask = np.zeros(ofield.shape[-2:], dtype=bool) + value_mask[0, 1] = True + else: + value_mask = None + + conv = self.get_conv(field=ofield, with_corners=True, value_mask=value_mask, + esmf_field_name=k.esmf_field_name) + efield = conv.write() + + try: + self.assertEqual(efield.name, k.esmf_field_name) + except AssertionError: + self.assertIsNone(k.esmf_field_name) + self.assertEqual(efield.name, ovariable.alias) + self.assertIsInstance(efield, ESMF.Field) + self.assertFalse(np.may_share_memory(ovariable.value, efield)) + # field are currently always 64-bit in ESMF... + self.assertEqual(efield.dtype, np.float64) + self.assertNumpyAll(ovariable.value, efield, check_arr_type=False, check_arr_dtype=False) + + if k.value_mask: + self.assertTrue(np.any(efield.grid.mask[0] == 0)) + self.assertTrue(efield.mask.any()) + + def test_validate_ops(self): + rd = self.test_data.get_rd('cancm4_tas') + rd2 = deepcopy(rd) + rd2.alias = 'tas2' + dataset = [rd, rd2] + + # only one dataset for output + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=dataset, output_format='esmpy') + + # clip not allowed + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=rd, output_format='esmpy', spatial_operation='clip') + + # only one select_ugid + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=rd, output_format='esmpy', geom='state_boundaries', select_ugid=[4, 5]) + # more than one is allowed if agg_selection is true + OcgOperations(dataset=rd, output_format='esmpy', geom='state_boundaries', select_ugid=[4, 5], + agg_selection=True) + + # no spatial aggregation + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=rd, output_format='esmpy', aggregate=True) \ No newline at end of file diff --git a/src/ocgis/test/test_ocgis/test_conv/test_meta.py b/src/ocgis/test/test_ocgis/test_conv/test_meta.py index 01cff7f0d..92639fe2a 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_meta.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_meta.py @@ -1,3 +1,4 @@ +import os from ocgis import OcgOperations from ocgis.conv.meta import MetaConverter from ocgis.test.base import TestBase @@ -6,7 +7,13 @@ class TestMetaConverter(TestBase): def test_init(self): + rd = self.test_data.get_rd('cancm4_tas') + ops = OcgOperations(dataset=rd) + MetaConverter(ops) + + def test_write(self): rd = self.test_data.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd) conv = MetaConverter(ops) self.assertTrue(len(conv.write()) > 4000) + self.assertEqual(len(os.listdir(self.current_dir_output)), 0) diff --git a/src/ocgis/test/test_ocgis/test_conv/test_nc.py b/src/ocgis/test/test_ocgis/test_conv/test_nc.py index 071168836..25b7ac38d 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_nc.py @@ -1,11 +1,15 @@ +from ocgis.test.base import nc_scope +from ocgis.util.itester import itr_products_keywords +from ocgis.api.operations import OcgOperations from ocgis.conv.nc import NcConverter import numpy as np -from ocgis.test.test_simple.test_simple import nc_scope from ocgis.test.test_ocgis.test_conv.test_base import AbstractTestConverter import ocgis +from ocgis import constants +from datetime import datetime as dt -class Test(AbstractTestConverter): +class TestNcConverter(AbstractTestConverter): def test_fill_value_modified(self): ## test the fill value is appropriately copied if reset inside the field @@ -29,4 +33,59 @@ def test_fill_value_copied(self): ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format='nc') ret = ops.execute() with nc_scope(ret) as ds: - self.assertEqual(fill_value_test,ds.variables['tas']._FillValue) \ No newline at end of file + self.assertEqual(fill_value_test,ds.variables['tas']._FillValue) + + def test_get_file_format(self): + # use a field as the input dataset + coll = self.get_spatial_collection(field=self.get_field()) + conv = NcConverter([coll], self.current_dir_output, 'foo') + file_format = conv._get_file_format_() + self.assertEqual(file_format, constants.netCDF_default_data_model) + + # add operations with a field as the dataset + ops = OcgOperations(dataset=coll[1]['foo'], output_format='nc') + conv = NcConverter([coll], self.current_dir_output, 'foo', ops=ops) + file_format = conv._get_file_format_() + self.assertEqual(file_format, constants.netCDF_default_data_model) + + # add operations and use a request dataset + coll = self.get_spatial_collection() + rd = self.test_data.get_rd('cancm4_tas') + ops = OcgOperations(dataset=rd, output_format='nc') + conv = NcConverter([coll], self.current_dir_output, 'foo', ops=ops) + file_format = conv._get_file_format_() + with nc_scope(rd.uri) as ds: + self.assertEqual(file_format, ds.file_format) + + def test_write_coll(self): + # use a field as the input dataset + field = self.get_field() + coll = self.get_spatial_collection(field=field) + + kwds = dict(with_ops=[False, True], + file_only=[False, True]) + + for k in itr_products_keywords(kwds, as_namedtuple=True): + + if k.with_ops: + ops = OcgOperations(dataset=self.test_data.get_rd('cancm4_tas'), file_only=k.file_only, + output_format='nc', calc=[{'name': 'mean', 'func': 'mean'}], calc_grouping=['month']) + else: + ops = None + + conv = NcConverter([coll], self.current_dir_output, 'foo', ops=ops, overwrite=True) + + with nc_scope(conv.path, 'w') as ds: + conv._write_coll_(ds, coll) + with nc_scope(conv.path) as ds: + value_nc = ds.variables['foo'][:] + value_field = field.variables['foo'].value.squeeze() + try: + self.assertNumpyAll(value_field, np.ma.array(value_nc)) + except AssertionError: + self.assertTrue(k.file_only) + self.assertTrue(k.with_ops) + self.assertTrue(value_nc.mask.all()) + self.assertIn('ocgis', ds.history) + if k.with_ops: + self.assertIn('OcgOperations', ds.history) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_attributes.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_attributes.py new file mode 100644 index 000000000..a508aaf67 --- /dev/null +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_attributes.py @@ -0,0 +1,42 @@ +from collections import OrderedDict +import os +from ocgis.interface.base.attributes import Attributes +from ocgis.test.base import TestBase, nc_scope + + +class TestAttributes(TestBase): + + def get_attributes(self): + attrs = {'a': 5, 'b': 6} + a = Attributes(attrs=attrs) + return a, attrs + + def test_init(self): + a = Attributes() + self.assertEqual(a.attrs, OrderedDict()) + + a, attrs = self.get_attributes() + self.assertIsInstance(a.attrs, OrderedDict) + self.assertEqual(a.attrs, attrs) + attrs['c'] = 'another' + self.assertNotIn('c', a.attrs) + + def test_write_to_netcdf_object(self): + path = os.path.join(self.current_dir_output, 'foo.nc') + + a, attrs = self.get_attributes() + + # write to dataset object + with nc_scope(path, 'w') as ds: + a.write_attributes_to_netcdf_object(ds) + with nc_scope(path, 'r') as ds: + self.assertDictEqual(ds.__dict__, a.attrs) + + # write to variable object + with nc_scope(path, 'w') as ds: + ds.createDimension('foo') + var = ds.createVariable('foo', int, dimensions=('foo',)) + a.write_attributes_to_netcdf_object(var) + with nc_scope(path, 'r') as ds: + var = ds.variables['foo'] + self.assertDictEqual(var.__dict__, a.attrs) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py index 546684ba9..a1eeaddae 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py @@ -1,18 +1,21 @@ +import os import unittest -import itertools +from copy import deepcopy +import netCDF4 as nc + from shapely.geometry import Point, MultiPoint +import numpy as np +from shapely.geometry.multipolygon import MultiPolygon + from ocgis.interface.base.crs import CoordinateReferenceSystem, WGS84,\ - CFAlbersEqualArea, CFLambertConformal, CFRotatedPole, CFWGS84, Spherical, WrappableCoordinateReferenceSystem + CFAlbersEqualArea, CFLambertConformal, CFRotatedPole, CFWGS84, Spherical, WrappableCoordinateReferenceSystem, \ + CFCoordinateReferenceSystem from ocgis.interface.base.dimension.base import VectorDimension from ocgis.interface.base.dimension.spatial import SpatialGridDimension,\ SpatialDimension from ocgis.exc import SpatialWrappingError -from ocgis.test.base import TestBase -import numpy as np -from copy import deepcopy -from shapely.geometry.multipolygon import MultiPolygon -from ocgis.util.helpers import get_temp_path, write_geom_dict, make_poly -import netCDF4 as nc +from ocgis.test.base import TestBase, nc_scope +from ocgis.util.helpers import make_poly from ocgis.interface.metadata import NcMetadata import ocgis from ocgis.util.itester import itr_products_keywords @@ -41,6 +44,12 @@ def test_init(self): self.assertEqual(crs, prev_crs) prev_crs = deepcopy(crs) + # test with a name parameter + crs = CoordinateReferenceSystem(epsg=4326) + self.assertEqual(crs.name, constants.default_coordinate_system_name) + crs = CoordinateReferenceSystem(epsg=4326, name='foo') + self.assertEqual(crs.name, 'foo') + def test_ne(self): crs1 = CoordinateReferenceSystem(epsg=4326) crs2 = CoordinateReferenceSystem(epsg=2136) @@ -54,6 +63,14 @@ def test_ne(self): self.assertNotEqual(None, crs1) self.assertNotEqual('input', crs1) + def test_write_to_rootgrp(self): + crs = CoordinateReferenceSystem(epsg=4326, name='hello_world') + path = os.path.join(self.current_dir_output, 'foo.nc') + with nc_scope(path, 'w') as ds: + variable = crs.write_to_rootgrp(ds) + self.assertIsInstance(variable, nc.Variable) + self.assertEqual(variable.proj4, crs.proj4) + class TestWrappableCoordinateSystem(TestBase): create_dir = False @@ -201,6 +218,7 @@ def test_init(self): crs = Spherical(semi_major_axis=6370998.1) self.assertDictEqual(crs.value, {'a': 6370998.1, 'no_defs': True, 'b': 6370998.1, 'proj': 'longlat', 'towgs84': '0,0,0,0,0,0,0'}) + self.assertEqual(crs.name, 'latitude_longitude') def test_place_prime_meridian_array(self): arr = np.array([123, 180, 200, 180], dtype=float) @@ -320,6 +338,16 @@ def test_init(self): self.assertEqual(WGS84(), CoordinateReferenceSystem(epsg=4326)) self.assertIsInstance(WGS84(), WrappableCoordinateReferenceSystem) self.assertNotIsInstance(WGS84(), Spherical) + self.assertEqual(WGS84().name, 'latitude_longitude') + + +class TestCFWGS84(TestBase): + + def test_init(self): + crs = CFWGS84() + self.assertEqual(crs.map_parameters_values, {}) + self.assertIsInstance(crs, WGS84) + self.assertIsInstance(crs, CFCoordinateReferenceSystem) class TestCFAlbersEqualArea(TestBase): @@ -348,34 +376,49 @@ def test_load_from_metadata(self): ds = nc.Dataset(uri,'r') meta = NcMetadata(ds) crs = CFLambertConformal.load_from_metadata('pr',meta) + self.assertEqual(crs.name, 'Lambert_Conformal') self.assertEqual(crs.value,{'lon_0': -97, 'ellps': 'WGS84', 'y_0': 2700000, 'no_defs': True, 'proj': 'lcc', 'x_0': 3325000, 'units': 'm', 'lat_2': 60, 'lat_1': 30, 'lat_0': 47.5}) self.assertIsInstance(crs,CFLambertConformal) self.assertEqual(['xc','yc'],[crs.projection_x_coordinate,crs.projection_y_coordinate]) self.assertNumpyAll(np.array([ 30., 60.]),crs.map_parameters_values.pop('standard_parallel')) self.assertEqual(crs.map_parameters_values,{u'latitude_of_projection_origin': 47.5, u'longitude_of_central_meridian': -97.0, u'false_easting': 3325000.0, u'false_northing': 2700000.0, 'units': u'm'}) ds.close() - + + def test_write_to_rootgrp(self): + uri = self.test_data.get_uri('narccap_wrfg') + ds = nc.Dataset(uri,'r') + meta = NcMetadata(ds) + ds.close() + crs = CFLambertConformal.load_from_metadata('pr',meta) + path = os.path.join(self.current_dir_output, 'foo.nc') + with nc_scope(path, 'w') as ds: + variable = crs.write_to_rootgrp(ds) + self.assertEqual(variable.grid_mapping_name, crs.grid_mapping_name) + for k, v in crs.map_parameters_values.iteritems(): + variable_v = variable.__dict__[k] + try: + self.assertEqual(variable_v, v) + except ValueError: + self.assertNumpyAll(variable_v, v) + + with nc_scope(path) as ds: + meta2 = NcMetadata(ds) + meta['variables']['Lambert_Conformal'] = meta2['variables']['Lambert_Conformal'] + crs2 = CFLambertConformal.load_from_metadata('pr', meta) + self.assertEqual(crs, crs2) + + path2 = os.path.join(self.current_dir_output, 'foo2.nc') + with nc_scope(path2, 'w') as ds: + crs2.write_to_rootgrp(ds) + class TestCFRotatedPole(TestBase): - def test_load_from_metadata(self): - rd = self.test_data.get_rd('rotated_pole_ichec') - self.assertIsInstance(rd.get().spatial.crs, CFRotatedPole) - def test_equal(self): rd = self.test_data.get_rd('rotated_pole_ichec') rd2 = deepcopy(rd) self.assertEqual(rd.get().spatial.crs, rd2.get().spatial.crs) - def test_in_operations(self): - rd = self.test_data.get_rd('rotated_pole_ichec') - rd2 = deepcopy(rd) - rd2.alias = 'tas2' - # # these projections are equivalent so it is okay to write them to a - ## common output file - ops = ocgis.OcgOperations(dataset=[rd, rd2], output_format='csv', snippet=True) - ops.execute() - def test_get_rotated_pole_transformation(self): """Test SpatialDimension objects are appropriately transformed.""" @@ -413,6 +456,10 @@ def test_get_rotated_pole_transformation(self): self.assertNumpyAll(field.variables['tas'].value, field_copy.variables['tas'].value) inverse_spatial = original_crs.get_rotated_pole_transformation(new_spatial, inverse=True) + for attr in ['row', 'col']: + target = getattr(inverse_spatial.grid, attr) + target_actual = getattr(spatial.grid, attr) + self.assertDictEqual(target.attrs, target_actual.attrs) inverse_spatial.assert_uniform_mask() self.assertNumpyAll(inverse_spatial.uid, spatial.uid) @@ -422,8 +469,25 @@ def test_get_rotated_pole_transformation(self): self.assertEqual(spatial.grid.row.name, inverse_spatial.grid.row.name) self.assertDictEqual(spatial.grid.col.meta, inverse_spatial.grid.col.meta) self.assertEqual(spatial.grid.col.name, inverse_spatial.grid.col.name) - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() + def test_in_operations(self): + rd = self.test_data.get_rd('rotated_pole_ichec') + rd2 = deepcopy(rd) + rd2.alias = 'tas2' + # these projections are equivalent so it is okay to write them to a common output file + ops = ocgis.OcgOperations(dataset=[rd, rd2], output_format='csv', snippet=True) + ops.execute() + + def test_load_from_metadata(self): + rd = self.test_data.get_rd('rotated_pole_ichec') + self.assertIsInstance(rd.get().spatial.crs, CFRotatedPole) + + def test_write_to_rootgrp(self): + rd = self.test_data.get_rd('narccap_rotated_pole') + path = os.path.join(self.current_dir_output, 'foo.nc') + + with nc_scope(path, 'w') as ds: + variable = rd.crs.write_to_rootgrp(ds) + self.assertIsInstance(variable, nc.Variable) + self.assertEqual(variable.proj4, '') + self.assertEqual(variable.proj4_transform, rd.crs._trans_proj) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py index 318a47bb1..4c0010b74 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py @@ -1,24 +1,123 @@ -import unittest -import numpy as np -from ocgis import constants -from ocgis.exc import EmptySubsetError, ResolutionError -from ocgis.interface.base.dimension.base import VectorDimension +from collections import OrderedDict +import os from copy import deepcopy + +import numpy as np from cfunits.cfunits import Units -from ocgis.test.base import TestBase + +from ocgis.interface.base.attributes import Attributes +from ocgis.interface.base.variable import AbstractSourcedVariable, AbstractValueVariable +from ocgis import constants +from ocgis.exc import EmptySubsetError, ResolutionError, BoundsAlreadyAvailableError +from ocgis.interface.base.dimension.base import VectorDimension, AbstractUidValueDimension, AbstractValueDimension, \ + AbstractDimension, AbstractUidDimension +from ocgis.test.base import TestBase, nc_scope from ocgis.util.helpers import get_bounds_from_1d +from ocgis.util.itester import itr_products_keywords -class TestVectorDimension(TestBase): +class FakeAbstractDimension(AbstractDimension): + _ndims = None + _attrs_slice = None + + +class TestAbstractDimension(TestBase): create_dir = False - def test_set_extrapolated_bounds(self): - value = np.array([1, 2, 3, 4], dtype=float) - vd = VectorDimension(value=value) - self.assertIsNone(vd.bounds) - vd.set_extrapolated_bounds() - actual = np.array([[0.5, 1.5], [1.5, 2.5], [2.5, 3.5], [3.5, 4.5]], dtype=float) - self.assertNumpyAll(vd.bounds, actual) + def test_init(self): + ad = FakeAbstractDimension() + self.assertEqual(ad.name, None) + + ad = FakeAbstractDimension(name='time') + self.assertEqual(ad.name, 'time') + + self.assertEqual(ad.meta, {}) + + +class FakeAbstractUidDimension(AbstractUidDimension): + _attrs_slice = None + _ndims = 1 + + +class TestAbstractUidDimension(TestBase): + + def test_init(self): + au = FakeAbstractUidDimension(uid=[1, 2, 3]) + self.assertEqual(au.name_uid, 'None_uid') + au = FakeAbstractUidDimension(uid=[1, 2, 3], name='foo') + self.assertEqual(au.name_uid, 'foo_uid') + self.assertIsNone(au._name_uid) + au = FakeAbstractUidDimension(uid=[1, 2, 3], name='foo', name_uid='hello') + self.assertEqual(au.name_uid, 'hello') + + +class FakeAbstractValueDimension(AbstractValueDimension): + + def _get_value_(self): + pass + + +class TestAbstractValueDimension(TestBase): + create_dir = False + + def test_init(self): + FakeAbstractValueDimension() + self.assertEqual(AbstractValueDimension.__bases__, (AbstractValueVariable,)) + + def test_name_value(self): + name_value = 'foo' + avd = FakeAbstractValueDimension(name_value=name_value) + self.assertEqual(avd.name_value, name_value) + + name = 'foobar' + avd = FakeAbstractValueDimension(name=name) + self.assertEqual(avd.name_value, name) + self.assertIsNone(avd._name_value) + + +class TestVectorDimension(TestBase): + + def test_init(self): + self.assertEqual(VectorDimension.__bases__, (AbstractSourcedVariable, AbstractUidValueDimension, Attributes)) + + vd = VectorDimension(value=[4, 5]) + self.assertIsInstance(vd.attrs, OrderedDict) + self.assertIsNone(vd.name) + self.assertIsNone(vd.name_value) + self.assertEqual(vd.name_uid, 'None_uid') + self.assertEqual(vd.name_bounds, 'None_bounds') + self.assertEqual(vd.name_bounds_suffix, constants.ocgis_bounds) + self.assertIsNone(vd.axis) + + # test passing attributes to the constructor + attrs = {'something': 'underground'} + vd = VectorDimension(value=[4, 5], attrs=attrs, axis='D') + self.assertEqual(vd.attrs, attrs) + self.assertEqual(vd.axis, 'D') + + with self.assertRaises(ValueError): + VectorDimension() + + def test_bad_dtypes(self): + vd = VectorDimension(value=181.5,bounds=[181,182]) + self.assertEqual(vd.value.dtype,vd.bounds.dtype) + + with self.assertRaises(ValueError): + VectorDimension(value=181.5,bounds=['a','b']) + + def test_bad_keywords(self): + ## there should be keyword checks on the bad keywords names + with self.assertRaises(ValueError): + VectorDimension(value=40,bounds=[38,42],ddtype=float) + + def test_boolean_slice(self): + """Test slicing with boolean values.""" + + vdim = VectorDimension(value=[4, 5, 6], bounds=[[3, 5], [4, 6], [5, 7]]) + vdim_slc = vdim[np.array([True, False, True])] + self.assertFalse(len(vdim_slc) > 2) + self.assertNumpyAll(vdim_slc.value, np.array([4, 6])) + self.assertNumpyAll(vdim_slc.bounds, np.array([[3, 5], [5, 7]])) def test_bounds_only_two_dimensional(self): value = [10,20,30,40,50] @@ -30,45 +129,58 @@ def test_bounds_only_two_dimensional(self): for b in bounds: with self.assertRaises(ValueError): VectorDimension(value=value,bounds=b) - + def test_dtype(self): value = [10,20,30,40,50] vdim = VectorDimension(value=value) self.assertEqual(vdim.dtype,np.array(value).dtype) - - def test_interpolate_bounds(self): - value = [10,20,30,40,50] - - vdim = VectorDimension(value=value) - self.assertEqual(vdim.bounds,None) - - vdim = VectorDimension(value=value,interpolate_bounds=True) - self.assertEqual(vdim.bounds.tostring(),'\x05\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x19\x00\x00\x00\x00\x00\x00\x00\x19\x00\x00\x00\x00\x00\x00\x00#\x00\x00\x00\x00\x00\x00\x00#\x00\x00\x00\x00\x00\x00\x00-\x00\x00\x00\x00\x00\x00\x00-\x00\x00\x00\x00\x00\x00\x007\x00\x00\x00\x00\x00\x00\x00') - + def test_get_iter(self): - vdim = VectorDimension(value=[10,20,30,40,50]) + vdim = VectorDimension(value=[10, 20, 30, 40, 50]) with self.assertRaises(ValueError): list(vdim.get_iter()) - - vdim = VectorDimension(value=[10,20,30,40,50],name='foo') + + vdim = VectorDimension(value=[10, 20, 30, 40, 50], name='foo') tt = list(vdim.get_iter()) - self.assertEqual(tt[3],(3, {'foo_uid': 4, 'foo': 40, 'foo_bnds_lower': None, 'foo_bnds_upper': None})) - - vdim = VectorDimension(value=[10,20,30,40,50],bounds=[(ii-5,ii+5) for ii in [10,20,30,40,50]],name='foo',name_uid='hi') + self.assertEqual(tt[3], (3, {'foo_uid': 4, 'foo': 40, 'foo_bounds_lower': None, 'foo_bounds_upper': None})) + + vdim = VectorDimension(value=[10, 20, 30, 40, 50], bounds=[(ii - 5, ii + 5) for ii in [10, 20, 30, 40, 50]], + name='foo', name_uid='hi') tt = list(vdim.get_iter()) - self.assertEqual(tt[3],(3, {'hi': 4, 'foo': 40, 'foo_bnds_lower': 35, 'foo_bnds_upper': 45})) - - def test_bad_keywords(self): - ## there should be keyword checks on the bad keywords names - with self.assertRaises(ValueError): - VectorDimension(value=40,bounds=[38,42],ddtype=float) - - def test_bad_dtypes(self): - vd = VectorDimension(value=181.5,bounds=[181,182]) - self.assertEqual(vd.value.dtype,vd.bounds.dtype) - - with self.assertRaises(ValueError): - VectorDimension(value=181.5,bounds=['a','b']) + self.assertEqual(tt[3], (3, {'hi': 4, 'foo': 40, 'foo_bounds_lower': 35, 'foo_bounds_upper': 45})) + + def test_interpolate_bounds(self): + value = [10,20,30,40,50] + + vdim = VectorDimension(value=value) + self.assertEqual(vdim.bounds,None) + + vdim = VectorDimension(value=value) + vdim.set_extrapolated_bounds() + self.assertEqual(vdim.bounds.tostring(),'\x05\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x19\x00\x00\x00\x00\x00\x00\x00\x19\x00\x00\x00\x00\x00\x00\x00#\x00\x00\x00\x00\x00\x00\x00#\x00\x00\x00\x00\x00\x00\x00-\x00\x00\x00\x00\x00\x00\x00-\x00\x00\x00\x00\x00\x00\x007\x00\x00\x00\x00\x00\x00\x00') + + def test_load_from_source(self): + """Test loading from a fake data source.""" + + vdim = VectorDimension(src_idx=[0, 1, 2, 3], data='foo') + self.assertNumpyAll(vdim.uid, np.array([1, 2, 3, 4], dtype=constants.np_int)) + with self.assertRaises(NotImplementedError): + vdim.value + with self.assertRaises(NotImplementedError): + vdim.resolution + + def test_name_bounds(self): + vd = VectorDimension(value=[5, 6], name='hello') + self.assertEqual(vd.name_bounds, 'hello_bounds') + self.assertIsNone(vd._name_bounds) + + vd = VectorDimension(value=[5, 6], name='hello', name_bounds_suffix='suffit') + self.assertEqual(vd.name_bounds, 'hello_suffit') + + vd = VectorDimension(value=[5, 6], name_bounds='hello') + self.assertEqual(vd.name_bounds, 'hello') + self.assertEqual(vd._name_bounds, 'hello') + self.assertIsNone(vd.name) def test_one_value(self): """Test passing a single value.""" @@ -87,24 +199,29 @@ def test_one_value(self): self.assertIsNone(vdim[0].bounds) with self.assertRaises(ResolutionError): vdim.resolution - - def test_with_bounds(self): - """Test passing bounds to the constructor.""" - vdim = VectorDimension(value=[4, 5, 6], bounds=[[3, 5], [4, 6], [5, 7]]) - self.assertNumpyAll(vdim.bounds, np.array([[3, 5], [4, 6], [5, 7]])) - self.assertNumpyAll(vdim.uid, np.array([1, 2, 3], dtype=constants.np_int)) - self.assertEqual(vdim.resolution, 2.0) - - def test_boolean_slice(self): - """Test slicing with boolean values.""" + def test_resolution_with_units(self): + vdim = VectorDimension(value=[5,10,15],units='large') + self.assertEqual(vdim.resolution,5.0) + + def test_set_extrapolated_bounds(self): + value = np.array([1, 2, 3, 4], dtype=float) + vd = VectorDimension(value=value) + self.assertIsNone(vd.bounds) + self.assertFalse(vd._has_interpolated_bounds) + vd.set_extrapolated_bounds() + self.assertTrue(vd._has_interpolated_bounds) + actual = np.array([[0.5, 1.5], [1.5, 2.5], [2.5, 3.5], [3.5, 4.5]], dtype=float) + self.assertNumpyAll(vd.bounds, actual) + + # attempt to extrapolate when the bound are already present + value = np.array([1.5]) + bounds = np.array([[1.0, 2.0]]) + vd = VectorDimension(value=value, bounds=bounds) + self.assertFalse(vd._has_interpolated_bounds) + with self.assertRaises(BoundsAlreadyAvailableError): + vd.set_extrapolated_bounds() - vdim = VectorDimension(value=[4, 5, 6], bounds=[[3, 5], [4, 6], [5, 7]]) - vdim_slc = vdim[np.array([True, False, True])] - self.assertFalse(len(vdim_slc) > 2) - self.assertNumpyAll(vdim_slc.value, np.array([4, 6])) - self.assertNumpyAll(vdim_slc.bounds, np.array([[3, 5], [5, 7]])) - def test_set_reference(self): """Test setting values on the internal value array using indexing.""" @@ -120,77 +237,144 @@ def test_set_reference(self): self.assertNumpyAll(vdim.value, vdim_slc2.value) vdim_slc2._value[2] = 1000 self.assertNumpyAll(vdim.value, vdim_slc2.value) - + def test_slice_source_idx_only(self): vdim = VectorDimension(src_idx=[4,5,6],data='foo') vdim_slice = vdim[0] self.assertEqual(vdim_slice._src_idx[0],4) - - def test_resolution_with_units(self): - vdim = VectorDimension(value=[5,10,15],units='large') - self.assertEqual(vdim.resolution,5.0) - + + def test_units_with_bounds(self): + value = [5.,10.,15.] + vdim = VectorDimension(value=value,units='celsius', + bounds=get_bounds_from_1d(np.array(value))) + vdim.cfunits_conform(Units('kelvin')) + self.assertNumpyAll(vdim.bounds,np.array([[275.65,280.65],[280.65,285.65],[285.65,290.65]])) + + def test_with_bounds(self): + """Test passing bounds to the constructor.""" + + vdim = VectorDimension(value=[4, 5, 6], bounds=[[3, 5], [4, 6], [5, 7]]) + self.assertNumpyAll(vdim.bounds, np.array([[3, 5], [4, 6], [5, 7]])) + self.assertNumpyAll(vdim.uid, np.array([1, 2, 3], dtype=constants.np_int)) + self.assertEqual(vdim.resolution, 2.0) + def test_with_units(self): vdim = VectorDimension(value=[5,10,15],units='celsius') self.assertEqual(vdim.cfunits,Units('celsius')) vdim.cfunits_conform(Units('kelvin')) self.assertNumpyAll(vdim.value,np.array([278.15,283.15,288.15])) - - def test_with_units_and_bounds_interpolation(self): - vdim = VectorDimension(value=[5.,10.,15.],units='celsius',interpolate_bounds=True) + + def test_with_units_and_bounds_convert_after_load(self): + vdim = VectorDimension(value=[5.,10.,15.],units='celsius') + vdim.set_extrapolated_bounds() vdim.cfunits_conform(Units('kelvin')) self.assertNumpyAll(vdim.bounds,np.array([[275.65,280.65],[280.65,285.65],[285.65,290.65]])) - - def test_with_units_and_bounds_convert_after_load(self): - vdim = VectorDimension(value=[5.,10.,15.],units='celsius',interpolate_bounds=True) - vdim.bounds + + def test_with_units_and_bounds_interpolation(self): + vdim = VectorDimension(value=[5.,10.,15.],units='celsius') + vdim.set_extrapolated_bounds() vdim.cfunits_conform(Units('kelvin')) self.assertNumpyAll(vdim.bounds,np.array([[275.65,280.65],[280.65,285.65],[285.65,290.65]])) - - def test_units_with_bounds(self): - for i in [True,False]: - value = [5.,10.,15.] - vdim = VectorDimension(value=value,units='celsius', - bounds=get_bounds_from_1d(np.array(value)), - interpolate_bounds=i) - vdim.cfunits_conform(Units('kelvin')) - self.assertNumpyAll(vdim.bounds,np.array([[275.65,280.65],[280.65,285.65],[285.65,290.65]])) - - def test_load_from_source(self): - """Test loading from a fake data source.""" - vdim = VectorDimension(src_idx=[0, 1, 2, 3], data='foo') - self.assertNumpyAll(vdim.uid, np.array([1, 2, 3, 4], dtype=constants.np_int)) - with self.assertRaises(NotImplementedError): - vdim.value - with self.assertRaises(NotImplementedError): - vdim.resolution + def test_write_to_netcdf_dataset(self): + path = os.path.join(self.current_dir_output, 'foo.nc') + + other_bounds_name = 'bnds' + keywords = dict(with_bounds=[True, False], + with_attrs=[True, False], + unlimited=[False, True], + kwargs=[{}, {'zlib': True}], + bounds_dimension_name=[None, other_bounds_name], + name_bounds_suffix=[None, 'asuffix'], + axis=[None, 'GG'], + name=[None, 'temporal'], + name_bounds=[None, 'time_bounds'], + name_value=[None, 'time'], + format=[None, 'NETCDF4_CLASSIC']) + + for k in itr_products_keywords(keywords, as_namedtuple=True): + if k.with_attrs: + attrs = {'a': 5, 'b': np.array([5, 6])} + else: + attrs = None + vd = VectorDimension(value=[2., 4.], attrs=attrs, name=k.name, name_bounds=k.name_bounds, + name_value=k.name_value, name_bounds_suffix=k.name_bounds_suffix, axis=k.axis) + if k.with_bounds: + vd.set_extrapolated_bounds() + with nc_scope(path, 'w') as ds: + try: + vd.write_to_netcdf_dataset(ds, unlimited=k.unlimited, bounds_dimension_name=k.bounds_dimension_name, + **k.kwargs) + except ValueError: + self.assertIsNone(vd.name) + continue + + with nc_scope(path, 'r') as ds: + var = ds.variables[vd.name_value] + + if k.axis is None: + axis_actual = '' + else: + axis_actual = vd.axis + self.assertEqual(var.axis, axis_actual) + + try: + self.assertIn(constants.ocgis_bounds, ds.dimensions) + except AssertionError: + try: + self.assertFalse(k.with_bounds) + except AssertionError: + try: + self.assertEqual(k.bounds_dimension_name, other_bounds_name) + except AssertionError: + self.assertIsNotNone(k.name_bounds_suffix) + self.assertIsNone(k.bounds_dimension_name) + self.assertIn(k.name_bounds_suffix, ds.variables[vd.name_bounds].dimensions) + try: + self.assertFalse(ds.dimensions[vd.name].isunlimited()) + except AssertionError: + self.assertTrue(k.unlimited) + + try: + self.assertEqual(var.a, attrs['a']) + self.assertNumpyAll(var.b, attrs['b']) + except AttributeError: + self.assertFalse(k.with_attrs) + try: + self.assertEqual(var.bounds, vd.name_bounds) + self.assertNumpyAll(vd.bounds, ds.variables[vd.name_bounds][:]) + except (AttributeError, KeyError): + self.assertFalse(k.with_bounds) + self.assertEqual(var._name, vd.name_value) + self.assertEqual(var.dimensions, (vd.name,)) + self.assertNumpyAll(vd.value, var[:]) + + def test_write_to_netcdf_dataset_bounds_dimension_exists(self): + """Test writing with bounds when the bounds dimension has already been created.""" + + vd = VectorDimension(value=[3., 7.], name='one') + vd.set_extrapolated_bounds() + vd2 = VectorDimension(value=[5., 6.], name='two') + vd2.set_extrapolated_bounds() + path = os.path.join(self.current_dir_output, 'foo.nc') + with nc_scope(path, 'w') as ds: + vd.write_to_netcdf_dataset(ds) + vd2.write_to_netcdf_dataset(ds) + self.assertEqual(ds.variables.keys(), ['one', 'one_bounds', 'two', 'two_bounds']) - def test_empty(self): - with self.assertRaises(ValueError): - VectorDimension() - - def test_get_between_use_bounds(self): - value = [3.,5.] - bounds = [[2.,4.],[4.,6.]] - vdim = VectorDimension(value=value,bounds=bounds) - ret = vdim.get_between(3,4.5,use_bounds=False) - self.assertNumpyAll(ret.value,np.array([3.])) - self.assertNumpyAll(ret.bounds,np.array([[2.,4.]])) - def test_get_between(self): vdim = VectorDimension(value=[0]) with self.assertRaises(EmptySubsetError): vdim.get_between(100,200) - + vdim = VectorDimension(value=[100,200,300,400]) vdim_between = vdim.get_between(100,200) self.assertEqual(len(vdim_between),2) - + def test_get_between_bounds(self): value = [0.,5.,10.] bounds = [[-2.5,2.5],[2.5,7.5],[7.5,12.5]] - + ## a reversed copy of these bounds are created here value_reverse = deepcopy(value) value_reverse.reverse() @@ -198,13 +382,13 @@ def test_get_between_bounds(self): bounds_reverse.reverse() for ii in range(len(bounds)): bounds_reverse[ii].reverse() - + data = {'original':{'value':value,'bounds':bounds}, 'reversed':{'value':value_reverse,'bounds':bounds_reverse}} for key in ['original','reversed']: vdim = VectorDimension(value=data[key]['value'], bounds=data[key]['bounds']) - + vdim_between = vdim.get_between(1,3) self.assertEqual(len(vdim_between),2) if key == 'original': @@ -212,7 +396,7 @@ def test_get_between_bounds(self): else: self.assertEqual(vdim_between.bounds.tostring(),'\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00\x04@\x00\x00\x00\x00\x00\x00\x04@\x00\x00\x00\x00\x00\x00\x04\xc0') self.assertEqual(vdim.resolution,5.0) - + ## preference is given to the lower bound in the case of "ties" where ## the value could be assumed part of the lower or upper cell vdim_between = vdim.get_between(2.5,2.5) @@ -221,19 +405,22 @@ def test_get_between_bounds(self): self.assertNumpyAll(vdim_between.bounds,np.array([[2.5,7.5]])) else: self.assertNumpyAll(vdim_between.bounds,np.array([[7.5,2.5]])) - + ## if the interval is closed and the subset range falls only on bounds ## value then the subset will be empty with self.assertRaises(EmptySubsetError): vdim.get_between(2.5,2.5,closed=True) - + vdim_between = vdim.get_between(2.5,7.5) if key == 'original': self.assertEqual(vdim_between.bounds.tostring(),'\x00\x00\x00\x00\x00\x00\x04@\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00)@') else: self.assertEqual(vdim_between.bounds.tostring(),'\x00\x00\x00\x00\x00\x00)@\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00\x04@') - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() \ No newline at end of file + def test_get_between_use_bounds(self): + value = [3.,5.] + bounds = [[2.,4.],[4.,6.]] + vdim = VectorDimension(value=value,bounds=bounds) + ret = vdim.get_between(3,4.5,use_bounds=False) + self.assertNumpyAll(ret.value,np.array([3.])) + self.assertNumpyAll(ret.bounds,np.array([[2.,4.]])) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py index c4eca2a98..7639228f2 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py @@ -1,28 +1,27 @@ from copy import deepcopy, copy -import unittest +import os import itertools +from importlib import import_module +from unittest.case import SkipTest + import numpy as np -from ocgis import constants, ShpCabinet from shapely import wkt -from ocgis.interface.base.dimension.spatial import SpatialDimension,\ - SpatialGeometryDimension, SpatialGeometryPolygonDimension,\ - SpatialGridDimension, SpatialGeometryPointDimension, SingleElementRetriever -from ocgis.util.helpers import iter_array, make_poly, get_bounds_from_1d,\ - get_date_list, write_geom_dict, bbox_poly import fiona from fiona.crs import from_epsg from shapely.geometry import shape, mapping, Polygon from shapely.geometry.point import Point -from ocgis.exc import EmptySubsetError, SpatialWrappingError, MultipleElementsFound + +from ocgis import constants, ShpCabinet +from ocgis.interface.base.dimension.spatial import SpatialDimension, SpatialGeometryDimension, \ + SpatialGeometryPolygonDimension, SpatialGridDimension, SpatialGeometryPointDimension, SingleElementRetriever +from ocgis.util.helpers import iter_array, make_poly +from ocgis.exc import EmptySubsetError, SpatialWrappingError, MultipleElementsFound, BoundsAlreadyAvailableError from ocgis.test.base import TestBase +from ocgis.interface.base.dimension.base import AbstractUidValueDimension from ocgis.interface.base.crs import CoordinateReferenceSystem, WGS84, CFWGS84, CFRotatedPole, \ WrappableCoordinateReferenceSystem from ocgis.interface.base.dimension.base import VectorDimension -import datetime -from importlib import import_module -from unittest.case import SkipTest from ocgis.util.itester import itr_products_keywords -from ocgis.util.spatial.wrap import Wrapper class AbstractTestSpatialDimension(TestBase): @@ -55,10 +54,10 @@ def get_row(self,bounds=True): row = VectorDimension(value=value,bounds=bounds,name='row') return(row) - def get_sdim(self, bounds=True, crs=None): + def get_sdim(self, bounds=True, crs=None, name=None): row = self.get_row(bounds=bounds) col = self.get_col(bounds=bounds) - sdim = SpatialDimension(row=row, col=col, crs=crs) + sdim = SpatialDimension(row=row, col=col, crs=crs, name=name) return sdim @property @@ -144,6 +143,8 @@ def get_spatial_dimension_from_records(self): def test_init(self): sdim = self.get_sdim(bounds=True) + self.assertEqual(sdim.name, 'spatial') + self.assertEqual(sdim.name_uid, 'gid') self.assertIsNone(sdim.abstraction) self.assertNumpyAll(sdim.grid.value, self.grid_value_regular) @@ -157,6 +158,9 @@ def _almost_equals_(a, b): self.assertTrue(to_test.all()) self.assertFalse(sdim.geom.polygon.value.mask.any()) + sdim = self.get_sdim(name='foobuar') + self.assertEqual(sdim.name, 'foobuar') + def test_abstraction(self): sdim = self.get_sdim() self.assertIsNone(sdim.abstraction) @@ -246,28 +250,19 @@ def iter_geom(): self.assertGeometriesAlmostEquals(geom.polygon.value, self.polygon_value_alternate_ordering) self.assertNumpyAll(geom.polygon.uid, self.uid_value) else: - try: - if k['polygon'] is None and k['grid'].corners is None: - if k['grid'].row is None or k['grid'].col is None: - continue - except CornersUnavailable: - continue + if k['polygon'] is None and k['grid'].corners is None: + if k['grid'].row is None or k['grid'].col is None: + continue if geom.grid.corners is None: if geom.grid.row.bounds is None or geom.grid.col.bounds is None: continue raise - try: - polygon = geom.polygon - except ImproperPolygonBoundsError: - self.assertIsNone(k['grid']) - polygon = None - yield(dict(geom=geom, grid=grid_dict.get('grid'), row=grid_dict.get('row'), col=grid_dict.get('col'), - polygon=polygon, + polygon=geom.polygon, point=geom.point)) def geom_iterator(): @@ -1070,6 +1065,10 @@ def test_init(self): gdim2 = SpatialGeometryDimension(point=gdim.point) self.assertIsNone(gdim2.abstraction) + self.assertEqual(gdim.name, 'geometry') + self.assertEqual(gdim.point.name, 'point') + self.assertEqual(gdim.polygon.name, 'polygon') + def test_abstraction(self): gdim = self.get() with self.assertRaises(ValueError): @@ -1112,6 +1111,13 @@ def test_get_highest_order_abstraction(self): class TestSpatialGeometryPointDimension(AbstractTestSpatialDimension): + def test_init(self): + row = VectorDimension(value=[5]) + col = VectorDimension(value=[7]) + grid = SpatialGridDimension(row=row, col=col) + sgpd = SpatialGeometryPointDimension(grid=grid) + self.assertEqual(sgpd.name, 'point') + def test_get_intersects_masked(self): sdim = self.get_sdim(crs=WGS84()) self.assertIsNotNone(sdim.grid) @@ -1148,6 +1154,15 @@ def test_init(self): with self.assertRaises(ValueError): SpatialGeometryPolygonDimension(grid=grid) + row = VectorDimension(value=[2, 3]) + row.set_extrapolated_bounds() + col = VectorDimension(value=[4, 5]) + col.set_extrapolated_bounds() + grid = SpatialGridDimension(row=row, col=col) + gd = SpatialGeometryPolygonDimension(grid=grid) + self.assertEqual(gd.name, 'polygon') + self.assertIsInstance(gd, SpatialGeometryPointDimension) + def test_get_value(self): # the ordering of vertices when creating from corners is slightly different @@ -1261,31 +1276,20 @@ def iter_grid_combinations_for_corners(self): yield sdim.grid def test_init(self): + self.assertEqual(SpatialGridDimension.__bases__, (AbstractUidValueDimension,)) + with self.assertRaises(ValueError): SpatialGridDimension() + row = VectorDimension(value=[5]) + col = VectorDimension(value=[6]) + grid = SpatialGridDimension(row=row, col=col) + self.assertEqual(grid.name, 'grid') + self.assertEqual(grid.row.name, 'yc') + self.assertEqual(grid.col.name, 'xc') - def test_corners(self): - for grid in self.iter_grid_combinations_for_corners(): - try: - self.assertGridCorners(grid) - except AssertionError: - if grid.row is None or grid.row.bounds is None: - continue - else: - raise - - def test_extent_and_extent_polygon(self): - for grid in self.iter_grid_combinations_for_corners(): - extent = grid.extent - self.assertEqual(len(extent), 4) - self.assertTrue(extent[0] < extent[2]) - self.assertTrue(extent[1] < extent[3]) - self.assertEqual(extent, grid.extent_polygon.bounds) - - def test_corners_esmf(self): - sdim = self.get_sdim() - actual = np.array([[[40.5, 40.5, 40.5, 40.5, 40.5], [39.5, 39.5, 39.5, 39.5, 39.5], [38.5, 38.5, 38.5, 38.5, 38.5], [37.5, 37.5, 37.5, 37.5, 37.5]], [[-100.5, -99.5, -98.5, -97.5, -96.5], [-100.5, -99.5, -98.5, -97.5, -96.5], [-100.5, -99.5, -98.5, -97.5, -96.5], [-100.5, -99.5, -98.5, -97.5, -96.5]]], dtype=sdim.grid.value.dtype) - self.assertNumpyAll(actual, sdim.grid.corners_esmf) + grid = SpatialGridDimension(row=row, col=col, name_row='foo', name_col='whatever') + self.assertEqual(grid.name_row, 'foo') + self.assertEqual(grid.name_col, 'whatever') def test_assert_uniform_mask(self): """Test masks are uniform across major spatial components.""" @@ -1316,7 +1320,17 @@ def test_assert_uniform_mask(self): sdim.assert_uniform_mask() sdim.geom.polygon.value.mask[2, 2] = False - def test_with_corners(self): + def test_corners(self): + for grid in self.iter_grid_combinations_for_corners(): + try: + self.assertGridCorners(grid) + except AssertionError: + if grid.row is None or grid.row.bounds is None: + continue + else: + raise + + def test_corners_as_parameter(self): """Test passing bounds during initialization.""" grid = SpatialGridDimension(value=self.grid_value_regular, corners=self.grid_corners_regular) @@ -1325,35 +1339,62 @@ def test_with_corners(self): actual = np.ma.array([[[[39.5, 39.5, 38.5, 38.5]]], [[[-98.5, -97.5, -97.5, -98.5]]]], mask=False) self.assertNumpyAll(sub.corners, actual) - def test_without_row_and_column(self): - row = np.arange(39,42.5,0.5) - col = np.arange(-104,-95,0.5) - x,y = np.meshgrid(col,row) - value = np.zeros([2]+list(x.shape)) - value = np.ma.array(value,mask=False) - value[0,:,:] = y - value[1,:,:] = x - minx,miny,maxx,maxy = x.min(),y.min(),x.max(),y.max() - grid = SpatialGridDimension(value=value) - sub = grid.get_subset_bbox(minx,miny,maxx,maxy,closed=False) - self.assertNumpyAll(sub.value,value) - + def test_corners_esmf(self): + sdim = self.get_sdim() + actual = np.array([[[40.5, 40.5, 40.5, 40.5, 40.5], [39.5, 39.5, 39.5, 39.5, 39.5], [38.5, 38.5, 38.5, 38.5, 38.5], [37.5, 37.5, 37.5, 37.5, 37.5]], [[-100.5, -99.5, -98.5, -97.5, -96.5], [-100.5, -99.5, -98.5, -97.5, -96.5], [-100.5, -99.5, -98.5, -97.5, -96.5], [-100.5, -99.5, -98.5, -97.5, -96.5]]], dtype=sdim.grid.value.dtype) + self.assertNumpyAll(actual, sdim.grid.corners_esmf) + + def test_extent_and_extent_polygon(self): + for grid in self.iter_grid_combinations_for_corners(): + extent = grid.extent + self.assertEqual(len(extent), 4) + self.assertTrue(extent[0] < extent[2]) + self.assertTrue(extent[1] < extent[3]) + self.assertEqual(extent, grid.extent_polygon.bounds) + def test_load_from_source_grid_slicing(self): - row = VectorDimension(src_idx=[10,20,30,40],name='row',data='foo') - self.assertEqual(row.name,'row') - col = VectorDimension(src_idx=[100,200,300],name='col',data='foo') - grid = SpatialGridDimension(row=row,col=col,name='grid') - self.assertEqual(grid.shape,(4,3)) - grid_slc = grid[1,2] - self.assertEqual(grid_slc.shape,(1,1)) + row = VectorDimension(src_idx=[10, 20, 30, 40], name='row', data='foo') + self.assertEqual(row.name, 'row') + col = VectorDimension(src_idx=[100, 200, 300], name='col', data='foo') + grid = SpatialGridDimension(row=row, col=col, name='grid') + self.assertEqual(grid.shape, (4, 3)) + grid_slc = grid[1, 2] + self.assertEqual(grid_slc.shape, (1, 1)) with self.assertRaises(NotImplementedError): grid_slc.value with self.assertRaises(NotImplementedError): grid_slc.row.bounds - self.assertNumpyAll(grid_slc.row._src_idx,np.array([20])) - self.assertNumpyAll(grid_slc.col._src_idx,np.array([300])) - self.assertEqual(grid_slc.row.name,'row') - self.assertEqual(grid_slc.uid,np.array([[6]],dtype=np.int32)) + self.assertNumpyAll(grid_slc.row._src_idx, np.array([20])) + self.assertNumpyAll(grid_slc.col._src_idx, np.array([300])) + self.assertEqual(grid_slc.row.name, 'row') + self.assertEqual(grid_slc.uid, np.array([[6]], dtype=np.int32)) + + def test_set_extrapolated_corners(self): + sdim = self.get_sdim(bounds=False) + self.assertIsNone(sdim.grid.corners) + sdim.grid.set_extrapolated_corners() + sdim2 = self.get_sdim() + self.assertNumpyAll(sdim2.grid.corners, sdim.grid.corners) + + # test with a mask + np.random.seed(1) + sdim = self.get_sdim(bounds=False) + mask = np.random.randint(0, 2, size=sdim.shape).astype(bool) + sdim.set_mask(mask) + self.assertIsNone(sdim.grid.corners) + sdim.grid.set_extrapolated_corners() + self.assertTrue(sdim.grid.corners.mask.any()) + for (ii, jj), val in iter_array(mask, return_value=True): + ref = sdim.grid.corners[:, ii, jj, :] + if val: + self.assertTrue(ref.mask.all()) + else: + self.assertFalse(ref.mask.any()) + + # test with corners already available + sdim = self.get_sdim() + with self.assertRaises(BoundsAlreadyAvailableError): + sdim.grid.set_extrapolated_corners() def test_singletons(self): row = VectorDimension(value=10,name='row') @@ -1361,7 +1402,63 @@ def test_singletons(self): grid = SpatialGridDimension(row=row,col=col,name='grid') self.assertNumpyAll(grid.value,np.ma.array([[[10]],[[100]]],mask=False)) + def test_validate(self): + with self.assertRaises(ValueError): + SpatialGridDimension() + + def test_without_row_and_column(self): + row = np.arange(39,42.5,0.5) + col = np.arange(-104,-95,0.5) + x,y = np.meshgrid(col,row) + value = np.zeros([2]+list(x.shape)) + value = np.ma.array(value,mask=False) + value[0,:,:] = y + value[1,:,:] = x + minx,miny,maxx,maxy = x.min(),y.min(),x.max(),y.max() + grid = SpatialGridDimension(value=value) + sub = grid.get_subset_bbox(minx,miny,maxx,maxy,closed=False) + self.assertNumpyAll(sub.value,value) -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() + def test_write_to_netcdf_dataset(self): + path = os.path.join(self.current_dir_output, 'foo.nc') + + kwds = dict(with_rc=[True, False], + with_corners=[False, True]) + + for k in self.iter_product_keywords(kwds): + row = VectorDimension(value=[4., 5.]) + col = VectorDimension(value=[6., 7.]) + grid = SpatialGridDimension(row=row, col=col) + + if k.with_corners: + row.set_extrapolated_bounds() + col.set_extrapolated_bounds() + grid.corners + + if not k.with_rc: + grid.value + grid.row = None + grid.col = None + + with self.nc_scope(path, mode='w') as ds: + grid.write_to_netcdf_dataset(ds) + with self.nc_scope(path) as ds: + if k.with_rc: + self.assertNumpyAll(ds.variables[grid.row.name][:], row.value) + self.assertNumpyAll(ds.variables[grid.col.name][:], col.value) + else: + yc = ds.variables[constants.default_name_row_coordinates] + xc = ds.variables[constants.default_name_col_coordinates] + self.assertNumpyAll(yc[:], grid.value[0].data) + self.assertNumpyAll(xc[:], grid.value[1].data) + self.assertEqual(yc.axis, 'Y') + self.assertEqual(xc.axis, 'X') + if k.with_corners and not k.with_rc: + name_yc_corners, name_xc_corners = ['{0}_corners'.format(xx) for xx in + [constants.default_name_row_coordinates, + constants.default_name_col_coordinates]] + for idx, name in zip([0, 1], [name_yc_corners, name_xc_corners]): + var = ds.variables[name] + self.assertNumpyAll(var[:], grid.corners[idx].data) + self.assertEqual(ds.variables[constants.default_name_row_coordinates].corners, name_yc_corners) + self.assertEqual(ds.variables[constants.default_name_col_coordinates].corners, name_xc_corners) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py index f5cd105dd..4312c6c31 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py @@ -1,18 +1,98 @@ -from ocgis import constants -from ocgis.test.base import TestBase +from copy import deepcopy +from netCDF4 import num2date, date2num +import os from datetime import datetime as dt -from ocgis.interface.base.dimension.temporal import TemporalDimension,\ - get_is_interannual, get_sorted_seasons, get_time_regions,\ - iter_boolean_groups_from_time_regions -import numpy as np -from ocgis.util.helpers import get_date_list import datetime from collections import deque import itertools -from ocgis.exc import IncompleteSeasonError +import netcdftime +import numpy as np + +from ocgis.util.itester import itr_products_keywords +from ocgis import constants +from ocgis.test.base import TestBase, nc_scope +from ocgis.interface.base.dimension.temporal import TemporalDimension, get_is_interannual, get_sorted_seasons, \ + get_time_regions, iter_boolean_groups_from_time_regions, get_datetime_conversion_state, \ + get_datetime_from_months_time_units, get_difference_in_months, get_num_from_months_time_units, \ + get_origin_datetime_from_months_units +from ocgis.util.helpers import get_date_list +from ocgis.exc import IncompleteSeasonError, CannotFormatTimeError +from ocgis.interface.base.dimension.base import VectorDimension + + +class TestFunctions(TestBase): + + def test_get_datetime_conversion_state(self): + archetypes = [45.5, datetime.datetime(2000, 1, 1), netcdftime.datetime(2000, 4, 5)] + for archetype in archetypes: + res = get_datetime_conversion_state(archetype) + try: + self.assertFalse(res) + except AssertionError: + self.assertEqual(type(archetype), float) + + def test_get_datetime_from_months_time_units(self): + units = "months since 1978-12" + vec = range(0, 36) + datetimes = get_datetime_from_months_time_units(vec, units) + test_datetimes = [datetime.datetime(1978, 12, 16, 0, 0), datetime.datetime(1979, 1, 16, 0, 0), + datetime.datetime(1979, 2, 16, 0, 0), datetime.datetime(1979, 3, 16, 0, 0), + datetime.datetime(1979, 4, 16, 0, 0), datetime.datetime(1979, 5, 16, 0, 0), + datetime.datetime(1979, 6, 16, 0, 0), datetime.datetime(1979, 7, 16, 0, 0), + datetime.datetime(1979, 8, 16, 0, 0), datetime.datetime(1979, 9, 16, 0, 0), + datetime.datetime(1979, 10, 16, 0, 0), datetime.datetime(1979, 11, 16, 0, 0), + datetime.datetime(1979, 12, 16, 0, 0), datetime.datetime(1980, 1, 16, 0, 0), + datetime.datetime(1980, 2, 16, 0, 0), datetime.datetime(1980, 3, 16, 0, 0), + datetime.datetime(1980, 4, 16, 0, 0), datetime.datetime(1980, 5, 16, 0, 0), + datetime.datetime(1980, 6, 16, 0, 0), datetime.datetime(1980, 7, 16, 0, 0), + datetime.datetime(1980, 8, 16, 0, 0), datetime.datetime(1980, 9, 16, 0, 0), + datetime.datetime(1980, 10, 16, 0, 0), datetime.datetime(1980, 11, 16, 0, 0), + datetime.datetime(1980, 12, 16, 0, 0), datetime.datetime(1981, 1, 16, 0, 0), + datetime.datetime(1981, 2, 16, 0, 0), datetime.datetime(1981, 3, 16, 0, 0), + datetime.datetime(1981, 4, 16, 0, 0), datetime.datetime(1981, 5, 16, 0, 0), + datetime.datetime(1981, 6, 16, 0, 0), datetime.datetime(1981, 7, 16, 0, 0), + datetime.datetime(1981, 8, 16, 0, 0), datetime.datetime(1981, 9, 16, 0, 0), + datetime.datetime(1981, 10, 16, 0, 0), datetime.datetime(1981, 11, 16, 0, 0)] + self.assertNumpyAll(datetimes, np.array(test_datetimes)) + + def test_get_difference_in_months(self): + distance = get_difference_in_months(datetime.datetime(1978, 12, 1), datetime.datetime(1979, 3, 1)) + self.assertEqual(distance, 3) + distance = get_difference_in_months(datetime.datetime(1978, 12, 1), datetime.datetime(1978, 7, 1)) + self.assertEqual(distance, -5) + distance = get_difference_in_months(datetime.datetime(1978, 12, 1), datetime.datetime(1978, 12, 1)) + self.assertEqual(distance, 0) + + def test_get_is_interannual(self): + self.assertTrue(get_is_interannual([11, 12, 1])) + self.assertFalse(get_is_interannual([10, 11, 12])) + + def test_get_num_from_months_time_units_1d_array(self): + units = "months since 1978-12" + vec = range(0, 36) + datetimes = get_datetime_from_months_time_units(vec, units) + num = get_num_from_months_time_units(datetimes, units, dtype=np.int32) + self.assertNumpyAll(num, np.array(vec, dtype=np.int32)) + self.assertEqual(num.dtype, np.int32) + + def test_get_origin_datetime_from_months_units(self): + units = "months since 1978-12" + self.assertEqual(get_origin_datetime_from_months_units(units), datetime.datetime(1978, 12, 1)) + units = "months since 1979-1-1 0" + self.assertEqual(get_origin_datetime_from_months_units(units), datetime.datetime(1979, 1, 1)) + + def test_get_sorted_seasons(self): + calc_grouping = [[9, 10, 11], [12, 1, 2], [6, 7, 8]] + methods = ['max', 'min'] -class Test(TestBase): + for method in methods: + for perm in itertools.permutations(calc_grouping, r=3): + ret = get_sorted_seasons(perm, method=method) + if method == 'max': + self.assertEqual(ret, [[6, 7, 8], [9, 10, 11], [12, 1, 2]]) + else: + self.assertEqual(ret, [[12, 1, 2], [6, 7, 8], [9, 10, 11]]) def test_iter_boolean_groups_from_time_regions(self): time_regions = [[{'month': [12], 'year': [1900]}, {'month': [2, 1], 'year': [1901]}]] @@ -34,22 +114,178 @@ def test_iter_boolean_groups_from_time_regions(self): class TestTemporalDimension(TestBase): - - def get_temporal_dimension(self,add_bounds=True,start=None,stop=None,days=1): - start = start or datetime.datetime(1899,1,1,12) - stop = stop or datetime.datetime(1901,12,31,12) - dates = get_date_list(start,stop,days=days) + + def get_temporal_dimension(self, add_bounds=True, start=None, stop=None, days=1, name=None, format_time=True): + start = start or datetime.datetime(1899, 1, 1, 12) + stop = stop or datetime.datetime(1901, 12, 31, 12) + dates = get_date_list(start, stop, days=days) if add_bounds: delta = datetime.timedelta(hours=12) lower = np.array(dates) - delta upper = np.array(dates) + delta - bounds = np.empty((lower.shape[0],2),dtype=object) - bounds[:,0] = lower - bounds[:,1] = upper + bounds = np.empty((lower.shape[0], 2), dtype=object) + bounds[:, 0] = lower + bounds[:, 1] = upper else: bounds = None - td = TemporalDimension(value=dates,bounds=bounds) - return(td) + td = TemporalDimension(value=dates, bounds=bounds, name=name, format_time=format_time) + return td + + def test_init(self): + td = TemporalDimension(value=[datetime.datetime(2000, 1, 1)]) + self.assertEqual(td.axis, 'T') + self.assertEqual(td.name, 'time') + self.assertEqual(td.name_uid, 'tid') + self.assertEqual(td.calendar, constants.default_temporal_calendar) + self.assertEqual(td.units, constants.default_temporal_units) + self.assertIsInstance(td, VectorDimension) + self.assertFalse(td._has_months_units) + self.assertTrue(td.format_time) + + td = TemporalDimension(value=[datetime.datetime(2000, 1, 1)], units="months since 1978-12", axis='foo') + self.assertTrue(td._has_months_units) + self.assertEqual(td.axis, 'foo') + + def test_getitem(self): + td = self.get_temporal_dimension() + self.assertIsNotNone(td.value_datetime) + self.assertIsNotNone(td.value_numtime) + sub = td[3] + self.assertEqual(sub.value_datetime.shape, (1,)) + self.assertEqual(sub.value_numtime.shape, (1,)) + + def test_360_day_calendar(self): + months = range(1, 13) + days = range(1, 31) + vec = [] + for month in months: + for day in days: + vec.append(netcdftime.datetime(2000, month, day)) + num = date2num(vec, 'days since 1900-01-01', calendar='360_day') + td = TemporalDimension(value=num, calendar='360_day', units='days since 1900-01-01') + self.assertNumpyAll(np.array(vec), td.value_datetime) + + def test_bounds_datetime_and_bounds_numtime(self): + value_datetime = np.array([dt(2000, 1, 15), dt(2000, 2, 15)]) + bounds_datetime = np.array([[dt(2000, 1, 1), dt(2000, 2, 1)], + [dt(2000, 2, 1), dt(2000, 3, 1)]]) + value = date2num(value_datetime, constants.default_temporal_units, calendar=constants.default_temporal_calendar) + bounds_num = date2num(bounds_datetime, constants.default_temporal_units, calendar=constants.default_temporal_calendar) + bounds_options = [None, bounds_num, bounds_datetime] + value_options = [value, value, value_datetime] + for format_time in [True, False]: + for value, bounds in zip(value_options, bounds_options): + td = TemporalDimension(value=value, bounds=bounds, format_time=format_time) + try: + try: + self.assertNumpyAll(td.bounds_datetime, bounds_datetime) + except CannotFormatTimeError: + self.assertFalse(format_time) + self.assertNumpyAll(td.bounds_numtime, bounds_num) + except AssertionError: + self.assertIsNone(bounds) + self.assertIsNone(td.bounds) + try: + self.assertIsNone(td.bounds_datetime) + except CannotFormatTimeError: + self.assertFalse(format_time) + + def test_extent_datetime_and_extent_numtime(self): + value_numtime = np.array([6000., 6001., 6002]) + value_datetime = TemporalDimension(value=value_numtime).value_datetime + + for format_time in [True, False]: + for value in [value_numtime, value_datetime]: + td = TemporalDimension(value=value, format_time=format_time) + try: + self.assertEqual(td.extent_datetime, (min(value_datetime), max(value_datetime))) + except CannotFormatTimeError: + self.assertFalse(format_time) + self.assertEqual(td.extent_numtime, (6000., 6002.)) + + def test_format_slice_state(self): + td = self.get_temporal_dimension() + elements = [td.bounds_datetime, td.bounds_numtime] + for element in elements: + self.assertIsNotNone(element) + sub = td[2] + elements = [sub.bounds_datetime, sub.bounds_numtime] + for element in elements: + self.assertEqual(element.shape, (1, 2)) + + def test_get_between(self): + keywords = dict(as_datetime=[False, True]) + + for k in itr_products_keywords(keywords, as_namedtuple=True): + td = self.get_temporal_dimension() + if not k.as_datetime: + td._value = td.value_numtime + td._bounds = td.bounds_numtime + td._value_datetime = None + td._bounds_datetime = None + self.assertTrue(get_datetime_conversion_state(td.value[0])) + res = td.get_between(dt(1899, 1, 4, 12, 0), dt(1899, 1, 10, 12, 0), return_indices=False) + self.assertEqual(res.shape, (7,)) + self.assertIsNone(td._value_datetime) + self.assertIsNone(td._bounds_datetime) + + def test_get_boolean_groups_from_time_regions(self): + dates = get_date_list(dt(2012,1,1),dt(2013,12,31),1) + seasons = [[3,4,5],[6,7,8],[9,10,11],[12,1,2]] + td = TemporalDimension(value=dates) + time_regions = get_time_regions(seasons,dates,raise_if_incomplete=False) + + dgroups = list(iter_boolean_groups_from_time_regions(time_regions,td)) + ## the last winter season is not complete as it does not have enough years + self.assertEqual(len(dgroups),7) + + to_test = [] + for dgroup in dgroups: + sub = td[dgroup] + ## (upper and lower values of time vector, count of elements in time group, the middle value of the vector) + to_test.append([sub.extent, sub.shape[0], sub[sub.shape[0]/2].value[0]]) + correct = [[(datetime.datetime(2012, 3, 1, 0, 0), datetime.datetime(2012, 5, 31, 0, 0)), 92, datetime.datetime(2012, 4, 16, 0, 0)], [(datetime.datetime(2012, 6, 1, 0, 0), datetime.datetime(2012, 8, 31, 0, 0)), 92, datetime.datetime(2012, 7, 17, 0, 0)], [(datetime.datetime(2012, 9, 1, 0, 0), datetime.datetime(2012, 11, 30, 0, 0)), 91, datetime.datetime(2012, 10, 16, 0, 0)], [(datetime.datetime(2012, 12, 1, 0, 0), datetime.datetime(2013, 2, 28, 0, 0)), 90, datetime.datetime(2013, 1, 15, 0, 0)], [(datetime.datetime(2013, 3, 1, 0, 0), datetime.datetime(2013, 5, 31, 0, 0)), 92, datetime.datetime(2013, 4, 16, 0, 0)], [(datetime.datetime(2013, 6, 1, 0, 0), datetime.datetime(2013, 8, 31, 0, 0)), 92, datetime.datetime(2013, 7, 17, 0, 0)], [(datetime.datetime(2013, 9, 1, 0, 0), datetime.datetime(2013, 11, 30, 0, 0)), 91, datetime.datetime(2013, 10, 16, 0, 0)]] + self.assertEqual(to_test,correct) + + def test_get_datetime(self): + td = TemporalDimension(value=[5, 6]) + dts = np.array([dt(2000, 1, 15, 12), dt(2000, 2, 15, 12)]) + arr = date2num(dts, 'days since 0001-01-01 00:00:00') + res = td.get_datetime(arr) + self.assertNumpyAll(dts, res) + + td = TemporalDimension(value=[5, 6], units='months since 1978-12') + res = td.get_datetime(td.value) + self.assertEqual(res[0], dt(1979, 5, 16)) + + units = 'days since 0001-01-01 00:00:00' + calendar = '365_day' + ndt = netcdftime.datetime + ndts = np.array([ndt(0000, 2, 30), ndt(0000, 2, 31)]) + narr = date2num(ndts, units, calendar=calendar) + td = TemporalDimension(value=narr, units=units, calendar=calendar) + res = td.get_datetime(td.value) + self.assertTrue(all([isinstance(element, ndt) for element in res.flat])) + + def test_getiter(self): + for format_time in [True, False]: + td = self.get_temporal_dimension(name='time', format_time=format_time) + for idx, values in td.get_iter(): + to_test = (values['day'], values['month'], values['year']) + try: + self.assertTrue(all([element is not None for element in to_test])) + self.assertIsInstance(values['time'], dt) + except AssertionError: + self.assertTrue(all([element is None for element in to_test])) + self.assertIsInstance(values['time'], float) + + def test_get_numtime(self): + units_options = [constants.default_temporal_units, 'months since 1960-5'] + value_options = [np.array([5000., 5001]), np.array([5, 6, 7])] + for units, value in zip(units_options, value_options): + td = TemporalDimension(value=value, units=units) + nums = td.get_numtime(td.value_datetime) + self.assertNumpyAll(nums, value) def test_get_grouping(self): td = self.get_temporal_dimension() @@ -121,17 +357,29 @@ def test_get_grouping_seasonal(self): td = TemporalDimension(value=field.temporal.value_datetime) tg = td.get_grouping([[3,4,5]]) self.assertEqual(tg.value[0],dt(2005,4,16)) - - def test_get_grouping_season_empty_with_year_missing_month(self): - dt1 = datetime.datetime(1900,01,01) - dt2 = datetime.datetime(1903,1,31) - dates = get_date_list(dt1,dt2,days=1) + + def test_get_grouping_seasonal_empty_with_year_missing_month(self): + dt1 = datetime.datetime(1900, 01, 01) + dt2 = datetime.datetime(1903, 1, 31) + dates = get_date_list(dt1, dt2, days=1) td = TemporalDimension(value=dates) - group = [[12,1,2],'unique'] + group = [[12, 1, 2], 'unique'] tg = td.get_grouping(group) - ## there should be a month missing from the last season (february) and it should not be - ## considered complete - self.assertEqual(tg.value.shape[0],2) + # there should be a month missing from the last season (february) and it should not be considered complete + self.assertEqual(tg.value.shape[0], 2) + + def test_get_grouping_seasonal_real_data_all_seasons(self): + """Test with real data and full seasons.""" + + calc_grouping = [[12, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11]] + rd = self.test_data.get_rd('cancm4_tas') + field = rd.get() + tgd = field.temporal.get_grouping(calc_grouping) + self.assertEqual(tgd.shape, (4,)) + self.assertEqual([xx[1] for xx in calc_grouping], [xx.month for xx in tgd.value.flat]) + self.assertEqual(set([xx.day for xx in tgd.value.flat]), {constants.calc_month_centroid}) + self.assertEqual([2006, 2005, 2005, 2005], [xx.year for xx in tgd.value.flat]) + self.assertNumpyAll(tgd.bounds_numtime, np.array([[55152.0, 58804.0], [55211.0, 58590.0], [55303.0, 58682.0], [55395.0, 58773.0]])) def test_get_grouping_seasonal_unique_flag(self): """Test the unique flag for seasonal groups.""" @@ -170,18 +418,6 @@ def test_get_grouping_seasonal_unique_flag(self): self.assertNumpyAll(tg.dgroups[ii], dgroups[ii]) self.assertEqual(len(tg.dgroups), len(dgroups)) - def test_get_grouping_seasonal_unique_flag_winter_season(self): - """Test with a single winter season using the unique flag.""" - - dt1 = datetime.datetime(1900, 01, 01) - dt2 = datetime.datetime(1902, 12, 31) - dates = get_date_list(dt1, dt2, days=1) - td = TemporalDimension(value=dates) - group = [[12, 1, 2], 'unique'] - tg = td.get_grouping(group) - self.assertEqual(tg.value.shape[0], 2) - self.assertEqual(tg.bounds.tolist(), [[datetime.datetime(1900, 12, 1, 0, 0), datetime.datetime(1901, 2, 28, 0, 0)], [datetime.datetime(1901, 12, 1, 0, 0), datetime.datetime(1902, 2, 28, 0, 0)]]) - def test_get_grouping_seasonal_unique_flag_all_seasons(self): """Test unique flag with all seasons.""" @@ -203,6 +439,18 @@ def test_get_grouping_seasonal_unique_flag_all_seasons(self): self.assertEqual(group.value.tolist(), [datetime.datetime(1900, 4, 16, 0, 0), datetime.datetime(1900, 7, 17, 0, 0), datetime.datetime(1900, 10, 16, 0, 0), datetime.datetime(1901, 1, 15, 0, 0), datetime.datetime(1901, 4, 16, 0, 0), datetime.datetime(1901, 7, 17, 0, 0), datetime.datetime(1901, 10, 16, 0, 0), datetime.datetime(1902, 1, 15, 0, 0), datetime.datetime(1902, 4, 16, 0, 0), datetime.datetime(1902, 7, 17, 0, 0), datetime.datetime(1902, 10, 16, 0, 0)]) self.assertEqual(group.bounds.tolist(), [[datetime.datetime(1900, 3, 1, 0, 0), datetime.datetime(1900, 5, 31, 0, 0)], [datetime.datetime(1900, 6, 1, 0, 0), datetime.datetime(1900, 8, 31, 0, 0)], [datetime.datetime(1900, 9, 1, 0, 0), datetime.datetime(1900, 11, 30, 0, 0)], [datetime.datetime(1900, 12, 1, 0, 0), datetime.datetime(1901, 2, 28, 0, 0)], [datetime.datetime(1901, 3, 1, 0, 0), datetime.datetime(1901, 5, 31, 0, 0)], [datetime.datetime(1901, 6, 1, 0, 0), datetime.datetime(1901, 8, 31, 0, 0)], [datetime.datetime(1901, 9, 1, 0, 0), datetime.datetime(1901, 11, 30, 0, 0)], [datetime.datetime(1901, 12, 1, 0, 0), datetime.datetime(1902, 2, 28, 0, 0)], [datetime.datetime(1902, 3, 1, 0, 0), datetime.datetime(1902, 5, 31, 0, 0)], [datetime.datetime(1902, 6, 1, 0, 0), datetime.datetime(1902, 8, 31, 0, 0)], [datetime.datetime(1902, 9, 1, 0, 0), datetime.datetime(1902, 11, 30, 0, 0)]]) + def test_get_grouping_seasonal_unique_flag_winter_season(self): + """Test with a single winter season using the unique flag.""" + + dt1 = datetime.datetime(1900, 01, 01) + dt2 = datetime.datetime(1902, 12, 31) + dates = get_date_list(dt1, dt2, days=1) + td = TemporalDimension(value=dates) + group = [[12, 1, 2], 'unique'] + tg = td.get_grouping(group) + self.assertEqual(tg.value.shape[0], 2) + self.assertEqual(tg.bounds.tolist(), [[datetime.datetime(1900, 12, 1, 0, 0), datetime.datetime(1901, 2, 28, 0, 0)], [datetime.datetime(1901, 12, 1, 0, 0), datetime.datetime(1902, 2, 28, 0, 0)]]) + def test_get_grouping_seasonal_year_flag(self): ## test with year flag dates = get_date_list(dt(2012,1,1),dt(2013,12,31),1) @@ -238,23 +486,53 @@ def test_get_grouping_seasonal_year_flag(self): # '[datetime.datetime(2013, 1, 1, 0, 0) datetime.datetime(2013, 1, 2, 0, 0)\n datetime.datetime(2013, 1, 3, 0, 0) datetime.datetime(2013, 1, 4, 0, 0)\n datetime.datetime(2013, 1, 5, 0, 0) datetime.datetime(2013, 1, 6, 0, 0)\n datetime.datetime(2013, 1, 7, 0, 0) datetime.datetime(2013, 1, 8, 0, 0)\n datetime.datetime(2013, 1, 9, 0, 0) datetime.datetime(2013, 1, 10, 0, 0)\n datetime.datetime(2013, 1, 11, 0, 0) datetime.datetime(2013, 1, 12, 0, 0)\n datetime.datetime(2013, 1, 13, 0, 0) datetime.datetime(2013, 1, 14, 0, 0)\n datetime.datetime(2013, 1, 15, 0, 0) datetime.datetime(2013, 1, 16, 0, 0)\n datetime.datetime(2013, 1, 17, 0, 0) datetime.datetime(2013, 1, 18, 0, 0)\n datetime.datetime(2013, 1, 19, 0, 0) datetime.datetime(2013, 1, 20, 0, 0)\n datetime.datetime(2013, 1, 21, 0, 0) datetime.datetime(2013, 1, 22, 0, 0)\n datetime.datetime(2013, 1, 23, 0, 0) datetime.datetime(2013, 1, 24, 0, 0)\n datetime.datetime(2013, 1, 25, 0, 0) datetime.datetime(2013, 1, 26, 0, 0)\n datetime.datetime(2013, 1, 27, 0, 0) datetime.datetime(2013, 1, 28, 0, 0)\n datetime.datetime(2013, 1, 29, 0, 0) datetime.datetime(2013, 1, 30, 0, 0)\n datetime.datetime(2013, 1, 31, 0, 0) datetime.datetime(2013, 2, 1, 0, 0)\n datetime.datetime(2013, 2, 2, 0, 0) datetime.datetime(2013, 2, 3, 0, 0)\n datetime.datetime(2013, 2, 4, 0, 0) datetime.datetime(2013, 2, 5, 0, 0)\n datetime.datetime(2013, 2, 6, 0, 0) datetime.datetime(2013, 2, 7, 0, 0)\n datetime.datetime(2013, 2, 8, 0, 0) datetime.datetime(2013, 2, 9, 0, 0)\n datetime.datetime(2013, 2, 10, 0, 0) datetime.datetime(2013, 2, 11, 0, 0)\n datetime.datetime(2013, 2, 12, 0, 0) datetime.datetime(2013, 2, 13, 0, 0)\n datetime.datetime(2013, 2, 14, 0, 0) datetime.datetime(2013, 2, 15, 0, 0)\n datetime.datetime(2013, 2, 16, 0, 0) datetime.datetime(2013, 2, 17, 0, 0)\n datetime.datetime(2013, 2, 18, 0, 0) datetime.datetime(2013, 2, 19, 0, 0)\n datetime.datetime(2013, 2, 20, 0, 0) datetime.datetime(2013, 2, 21, 0, 0)\n datetime.datetime(2013, 2, 22, 0, 0) datetime.datetime(2013, 2, 23, 0, 0)\n datetime.datetime(2013, 2, 24, 0, 0) datetime.datetime(2013, 2, 25, 0, 0)\n datetime.datetime(2013, 2, 26, 0, 0) datetime.datetime(2013, 2, 27, 0, 0)\n datetime.datetime(2013, 2, 28, 0, 0) datetime.datetime(2013, 12, 1, 0, 0)\n datetime.datetime(2013, 12, 2, 0, 0) datetime.datetime(2013, 12, 3, 0, 0)\n datetime.datetime(2013, 12, 4, 0, 0) datetime.datetime(2013, 12, 5, 0, 0)\n datetime.datetime(2013, 12, 6, 0, 0) datetime.datetime(2013, 12, 7, 0, 0)\n datetime.datetime(2013, 12, 8, 0, 0) datetime.datetime(2013, 12, 9, 0, 0)\n datetime.datetime(2013, 12, 10, 0, 0)\n datetime.datetime(2013, 12, 11, 0, 0)\n datetime.datetime(2013, 12, 12, 0, 0)\n datetime.datetime(2013, 12, 13, 0, 0)\n datetime.datetime(2013, 12, 14, 0, 0)\n datetime.datetime(2013, 12, 15, 0, 0)\n datetime.datetime(2013, 12, 16, 0, 0)\n datetime.datetime(2013, 12, 17, 0, 0)\n datetime.datetime(2013, 12, 18, 0, 0)\n datetime.datetime(2013, 12, 19, 0, 0)\n datetime.datetime(2013, 12, 20, 0, 0)\n datetime.datetime(2013, 12, 21, 0, 0)\n datetime.datetime(2013, 12, 22, 0, 0)\n datetime.datetime(2013, 12, 23, 0, 0)\n datetime.datetime(2013, 12, 24, 0, 0)\n datetime.datetime(2013, 12, 25, 0, 0)\n datetime.datetime(2013, 12, 26, 0, 0)\n datetime.datetime(2013, 12, 27, 0, 0)\n datetime.datetime(2013, 12, 28, 0, 0)\n datetime.datetime(2013, 12, 29, 0, 0)\n datetime.datetime(2013, 12, 30, 0, 0)\n datetime.datetime(2013, 12, 31, 0, 0)]' self.assertNumpyAll(td.value[tg.dgroups[1]],np.loads('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01KZ\x85cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(K\x03U\x01|NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK?tb\x89]q\x06(cdatetime\ndatetime\nq\x07U\n\x07\xdd\x01\x01\x00\x00\x00\x00\x00\x00\x85Rq\x08h\x07U\n\x07\xdd\x01\x02\x00\x00\x00\x00\x00\x00\x85Rq\th\x07U\n\x07\xdd\x01\x03\x00\x00\x00\x00\x00\x00\x85Rq\nh\x07U\n\x07\xdd\x01\x04\x00\x00\x00\x00\x00\x00\x85Rq\x0bh\x07U\n\x07\xdd\x01\x05\x00\x00\x00\x00\x00\x00\x85Rq\x0ch\x07U\n\x07\xdd\x01\x06\x00\x00\x00\x00\x00\x00\x85Rq\rh\x07U\n\x07\xdd\x01\x07\x00\x00\x00\x00\x00\x00\x85Rq\x0eh\x07U\n\x07\xdd\x01\x08\x00\x00\x00\x00\x00\x00\x85Rq\x0fh\x07U\n\x07\xdd\x01\t\x00\x00\x00\x00\x00\x00\x85Rq\x10h\x07U\n\x07\xdd\x01\n\x00\x00\x00\x00\x00\x00\x85Rq\x11h\x07U\n\x07\xdd\x01\x0b\x00\x00\x00\x00\x00\x00\x85Rq\x12h\x07U\n\x07\xdd\x01\x0c\x00\x00\x00\x00\x00\x00\x85Rq\x13h\x07U\n\x07\xdd\x01\r\x00\x00\x00\x00\x00\x00\x85Rq\x14h\x07U\n\x07\xdd\x01\x0e\x00\x00\x00\x00\x00\x00\x85Rq\x15h\x07U\n\x07\xdd\x01\x0f\x00\x00\x00\x00\x00\x00\x85Rq\x16h\x07U\n\x07\xdd\x01\x10\x00\x00\x00\x00\x00\x00\x85Rq\x17h\x07U\n\x07\xdd\x01\x11\x00\x00\x00\x00\x00\x00\x85Rq\x18h\x07U\n\x07\xdd\x01\x12\x00\x00\x00\x00\x00\x00\x85Rq\x19h\x07U\n\x07\xdd\x01\x13\x00\x00\x00\x00\x00\x00\x85Rq\x1ah\x07U\n\x07\xdd\x01\x14\x00\x00\x00\x00\x00\x00\x85Rq\x1bh\x07U\n\x07\xdd\x01\x15\x00\x00\x00\x00\x00\x00\x85Rq\x1ch\x07U\n\x07\xdd\x01\x16\x00\x00\x00\x00\x00\x00\x85Rq\x1dh\x07U\n\x07\xdd\x01\x17\x00\x00\x00\x00\x00\x00\x85Rq\x1eh\x07U\n\x07\xdd\x01\x18\x00\x00\x00\x00\x00\x00\x85Rq\x1fh\x07U\n\x07\xdd\x01\x19\x00\x00\x00\x00\x00\x00\x85Rq h\x07U\n\x07\xdd\x01\x1a\x00\x00\x00\x00\x00\x00\x85Rq!h\x07U\n\x07\xdd\x01\x1b\x00\x00\x00\x00\x00\x00\x85Rq"h\x07U\n\x07\xdd\x01\x1c\x00\x00\x00\x00\x00\x00\x85Rq#h\x07U\n\x07\xdd\x01\x1d\x00\x00\x00\x00\x00\x00\x85Rq$h\x07U\n\x07\xdd\x01\x1e\x00\x00\x00\x00\x00\x00\x85Rq%h\x07U\n\x07\xdd\x01\x1f\x00\x00\x00\x00\x00\x00\x85Rq&h\x07U\n\x07\xdd\x02\x01\x00\x00\x00\x00\x00\x00\x85Rq\'h\x07U\n\x07\xdd\x02\x02\x00\x00\x00\x00\x00\x00\x85Rq(h\x07U\n\x07\xdd\x02\x03\x00\x00\x00\x00\x00\x00\x85Rq)h\x07U\n\x07\xdd\x02\x04\x00\x00\x00\x00\x00\x00\x85Rq*h\x07U\n\x07\xdd\x02\x05\x00\x00\x00\x00\x00\x00\x85Rq+h\x07U\n\x07\xdd\x02\x06\x00\x00\x00\x00\x00\x00\x85Rq,h\x07U\n\x07\xdd\x02\x07\x00\x00\x00\x00\x00\x00\x85Rq-h\x07U\n\x07\xdd\x02\x08\x00\x00\x00\x00\x00\x00\x85Rq.h\x07U\n\x07\xdd\x02\t\x00\x00\x00\x00\x00\x00\x85Rq/h\x07U\n\x07\xdd\x02\n\x00\x00\x00\x00\x00\x00\x85Rq0h\x07U\n\x07\xdd\x02\x0b\x00\x00\x00\x00\x00\x00\x85Rq1h\x07U\n\x07\xdd\x02\x0c\x00\x00\x00\x00\x00\x00\x85Rq2h\x07U\n\x07\xdd\x02\r\x00\x00\x00\x00\x00\x00\x85Rq3h\x07U\n\x07\xdd\x02\x0e\x00\x00\x00\x00\x00\x00\x85Rq4h\x07U\n\x07\xdd\x02\x0f\x00\x00\x00\x00\x00\x00\x85Rq5h\x07U\n\x07\xdd\x02\x10\x00\x00\x00\x00\x00\x00\x85Rq6h\x07U\n\x07\xdd\x02\x11\x00\x00\x00\x00\x00\x00\x85Rq7h\x07U\n\x07\xdd\x02\x12\x00\x00\x00\x00\x00\x00\x85Rq8h\x07U\n\x07\xdd\x02\x13\x00\x00\x00\x00\x00\x00\x85Rq9h\x07U\n\x07\xdd\x02\x14\x00\x00\x00\x00\x00\x00\x85Rq:h\x07U\n\x07\xdd\x02\x15\x00\x00\x00\x00\x00\x00\x85Rq;h\x07U\n\x07\xdd\x02\x16\x00\x00\x00\x00\x00\x00\x85Rqh\x07U\n\x07\xdd\x02\x19\x00\x00\x00\x00\x00\x00\x85Rq?h\x07U\n\x07\xdd\x02\x1a\x00\x00\x00\x00\x00\x00\x85Rq@h\x07U\n\x07\xdd\x02\x1b\x00\x00\x00\x00\x00\x00\x85RqAh\x07U\n\x07\xdd\x02\x1c\x00\x00\x00\x00\x00\x00\x85RqBh\x07U\n\x07\xdd\x0c\x01\x00\x00\x00\x00\x00\x00\x85RqCh\x07U\n\x07\xdd\x0c\x02\x00\x00\x00\x00\x00\x00\x85RqDh\x07U\n\x07\xdd\x0c\x03\x00\x00\x00\x00\x00\x00\x85RqEh\x07U\n\x07\xdd\x0c\x04\x00\x00\x00\x00\x00\x00\x85RqFh\x07U\n\x07\xdd\x0c\x05\x00\x00\x00\x00\x00\x00\x85RqGh\x07U\n\x07\xdd\x0c\x06\x00\x00\x00\x00\x00\x00\x85RqHh\x07U\n\x07\xdd\x0c\x07\x00\x00\x00\x00\x00\x00\x85RqIh\x07U\n\x07\xdd\x0c\x08\x00\x00\x00\x00\x00\x00\x85RqJh\x07U\n\x07\xdd\x0c\t\x00\x00\x00\x00\x00\x00\x85RqKh\x07U\n\x07\xdd\x0c\n\x00\x00\x00\x00\x00\x00\x85RqLh\x07U\n\x07\xdd\x0c\x0b\x00\x00\x00\x00\x00\x00\x85RqMh\x07U\n\x07\xdd\x0c\x0c\x00\x00\x00\x00\x00\x00\x85RqNh\x07U\n\x07\xdd\x0c\r\x00\x00\x00\x00\x00\x00\x85RqOh\x07U\n\x07\xdd\x0c\x0e\x00\x00\x00\x00\x00\x00\x85RqPh\x07U\n\x07\xdd\x0c\x0f\x00\x00\x00\x00\x00\x00\x85RqQh\x07U\n\x07\xdd\x0c\x10\x00\x00\x00\x00\x00\x00\x85RqRh\x07U\n\x07\xdd\x0c\x11\x00\x00\x00\x00\x00\x00\x85RqSh\x07U\n\x07\xdd\x0c\x12\x00\x00\x00\x00\x00\x00\x85RqTh\x07U\n\x07\xdd\x0c\x13\x00\x00\x00\x00\x00\x00\x85RqUh\x07U\n\x07\xdd\x0c\x14\x00\x00\x00\x00\x00\x00\x85RqVh\x07U\n\x07\xdd\x0c\x15\x00\x00\x00\x00\x00\x00\x85RqWh\x07U\n\x07\xdd\x0c\x16\x00\x00\x00\x00\x00\x00\x85RqXh\x07U\n\x07\xdd\x0c\x17\x00\x00\x00\x00\x00\x00\x85RqYh\x07U\n\x07\xdd\x0c\x18\x00\x00\x00\x00\x00\x00\x85RqZh\x07U\n\x07\xdd\x0c\x19\x00\x00\x00\x00\x00\x00\x85Rq[h\x07U\n\x07\xdd\x0c\x1a\x00\x00\x00\x00\x00\x00\x85Rq\\h\x07U\n\x07\xdd\x0c\x1b\x00\x00\x00\x00\x00\x00\x85Rq]h\x07U\n\x07\xdd\x0c\x1c\x00\x00\x00\x00\x00\x00\x85Rq^h\x07U\n\x07\xdd\x0c\x1d\x00\x00\x00\x00\x00\x00\x85Rq_h\x07U\n\x07\xdd\x0c\x1e\x00\x00\x00\x00\x00\x00\x85Rq`h\x07U\n\x07\xdd\x0c\x1f\x00\x00\x00\x00\x00\x00\x85Rqaetb.')) - def test_get_boolean_groups_from_time_regions(self): - dates = get_date_list(dt(2012,1,1),dt(2013,12,31),1) - seasons = [[3,4,5],[6,7,8],[9,10,11],[12,1,2]] + def test_get_time_region_value_only(self): + dates = get_date_list(dt(2002,1,31),dt(2009,12,31),1) td = TemporalDimension(value=dates) - time_regions = get_time_regions(seasons,dates,raise_if_incomplete=False) - dgroups = list(iter_boolean_groups_from_time_regions(time_regions,td)) - ## the last winter season is not complete as it does not have enough years - self.assertEqual(len(dgroups),7) + ret,indices = td.get_time_region({'month':[8]},return_indices=True) + self.assertEqual(set([8]),set([d.month for d in ret.value.flat])) - to_test = [] - for dgroup in dgroups: - sub = td[dgroup] - ## (upper and lower values of time vector, count of elements in time group, the middle value of the vector) - to_test.append([sub.extent, sub.shape[0], sub[sub.shape[0]/2].value[0]]) - correct = [[(datetime.datetime(2012, 3, 1, 0, 0), datetime.datetime(2012, 5, 31, 0, 0)), 92, datetime.datetime(2012, 4, 16, 0, 0)], [(datetime.datetime(2012, 6, 1, 0, 0), datetime.datetime(2012, 8, 31, 0, 0)), 92, datetime.datetime(2012, 7, 17, 0, 0)], [(datetime.datetime(2012, 9, 1, 0, 0), datetime.datetime(2012, 11, 30, 0, 0)), 91, datetime.datetime(2012, 10, 16, 0, 0)], [(datetime.datetime(2012, 12, 1, 0, 0), datetime.datetime(2013, 2, 28, 0, 0)), 90, datetime.datetime(2013, 1, 15, 0, 0)], [(datetime.datetime(2013, 3, 1, 0, 0), datetime.datetime(2013, 5, 31, 0, 0)), 92, datetime.datetime(2013, 4, 16, 0, 0)], [(datetime.datetime(2013, 6, 1, 0, 0), datetime.datetime(2013, 8, 31, 0, 0)), 92, datetime.datetime(2013, 7, 17, 0, 0)], [(datetime.datetime(2013, 9, 1, 0, 0), datetime.datetime(2013, 11, 30, 0, 0)), 91, datetime.datetime(2013, 10, 16, 0, 0)]] - self.assertEqual(to_test,correct) + ret,indices = td.get_time_region({'year':[2008,2004]},return_indices=True) + self.assertEqual(set([2008,2004]),set([d.year for d in ret.value.flat])) + + ret,indices = td.get_time_region({'day':[20,31]},return_indices=True) + self.assertEqual(set([20,31]),set([d.day for d in ret.value.flat])) + + ret,indices = td.get_time_region({'day':[20,31],'month':[9,10],'year':[2003]},return_indices=True) + self.assertNumpyAll(ret.value,np.array([dt(2003,9,20),dt(2003,10,20),dt(2003,10,31,)])) + self.assertEqual(ret.shape,indices.shape) + + self.assertEqual(ret.extent,(datetime.datetime(2003,9,20),datetime.datetime(2003,10,31))) + + def test_months_in_time_units(self): + units = "months since 1978-12" + vec = range(0, 36) + datetimes = get_datetime_from_months_time_units(vec, units) + td = TemporalDimension(value=vec, units=units, calendar='standard') + self.assertTrue(td._has_months_units) + self.assertNumpyAll(td.value_datetime, datetimes) + + def test_months_in_time_units_are_bad_netcdftime(self): + units = "months since 1978-12" + vec = range(0, 36) + calendar = "standard" + with self.assertRaises(ValueError): + num2date(vec, units, calendar=calendar) + + def test_months_in_time_units_between(self): + units = "months since 1978-12" + vec = range(0, 36) + datetimes = get_datetime_from_months_time_units(vec, units) + td = TemporalDimension(value=vec, units=units, calendar='standard') + ret = td.get_between(datetimes[0], datetimes[3]) + self.assertNumpyAll(ret.value, np.array([0, 1, 2, 3])) + + def test_months_not_in_time_units(self): + units = "days since 1900-01-01" + value = np.array([31]) + td = TemporalDimension(value=value, units=units, calendar='standard') + self.assertFalse(td._has_months_units) def test_seasonal_get_time_regions(self): dates = get_date_list(dt(2012,1,1),dt(2013,12,31),1) @@ -292,7 +570,7 @@ def test_seasonal_get_time_regions(self): time_regions = get_time_regions(calc_grouping,dates,raise_if_incomplete=False) correct = [[{'month': [3, 4, 5], 'year': [2012]}], [{'month': [6, 7, 8], 'year': [2012]}], [{'month': [9, 10, 11], 'year': [2012]}], [{'month': [12], 'year': [2012]}, {'month': [2, 1], 'year': [2013]}], [{'month': [3, 4, 5], 'year': [2013]}], [{'month': [6, 7, 8], 'year': [2013]}], [{'month': [9, 10, 11], 'year': [2013]}]] self.assertEqual(time_regions,correct) - + def test_time_range_subset(self): dt1 = datetime.datetime(1950,01,01,12) dt2 = datetime.datetime(1950,12,31,12) @@ -311,49 +589,94 @@ def test_time_range_subset(self): td = TemporalDimension(value=dates,bounds=bounds) ret = td.get_between(r1,r2) self.assertEqual(ret.value[-1],datetime.datetime(1950,12,31,12,0)) - - def test_get_sorted_seasons(self): - calc_grouping = [[9, 10, 11], [12, 1, 2], [6, 7, 8]] - methods = ['max', 'min'] - for method in methods: - for perm in itertools.permutations(calc_grouping, r=3): - ret = get_sorted_seasons(perm, method=method) - if method == 'max': - self.assertEqual(ret, [[6, 7, 8], [9, 10, 11], [12, 1, 2]]) - else: - self.assertEqual(ret, [[12, 1, 2], [6, 7, 8], [9, 10, 11]]) - - def test_get_is_interannual(self): - self.assertTrue(get_is_interannual([11,12,1])) - self.assertFalse(get_is_interannual([10,11,12])) - - def test_get_time_region_value_only(self): - dates = get_date_list(dt(2002,1,31),dt(2009,12,31),1) - td = TemporalDimension(value=dates) - - ret,indices = td.get_time_region({'month':[8]},return_indices=True) - self.assertEqual(set([8]),set([d.month for d in ret.value.flat])) - - ret,indices = td.get_time_region({'year':[2008,2004]},return_indices=True) - self.assertEqual(set([2008,2004]),set([d.year for d in ret.value.flat])) - - ret,indices = td.get_time_region({'day':[20,31]},return_indices=True) - self.assertEqual(set([20,31]),set([d.day for d in ret.value.flat])) - - ret,indices = td.get_time_region({'day':[20,31],'month':[9,10],'year':[2003]},return_indices=True) - self.assertNumpyAll(ret.value,np.array([dt(2003,9,20),dt(2003,10,20),dt(2003,10,31,)])) - self.assertEqual(ret.shape,indices.shape) - - self.assertEqual(ret.extent,(datetime.datetime(2003,9,20),datetime.datetime(2003,10,31))) + def test_value_datetime_and_value_numtime(self): + value_datetime = np.array([dt(2000, 1, 15), dt(2000, 2, 15)]) + value = date2num(value_datetime, constants.default_temporal_units, calendar=constants.default_temporal_calendar) + keywords = dict(value=[value, value_datetime], + format_time=[True, False]) + for k in itr_products_keywords(keywords, as_namedtuple=True): + td = TemporalDimension(**k._asdict()) + self.assertNumpyAll(td.value, k.value) + try: + self.assertNumpyAll(td.value_datetime, value_datetime) + except CannotFormatTimeError: + self.assertFalse(k.format_time) + self.assertNumpyAll(td.value_numtime, value) + + def test_write_to_netcdf_dataset(self): + rd = self.test_data.get_rd('cancm4_tas') + path = os.path.join(self.current_dir_output, 'foo.nc') + + keywords = dict(with_bounds=[True, False], + as_datetime=[False, True]) + + for k in itr_products_keywords(keywords, as_namedtuple=True): + field = rd.get() + td = field.temporal + if not k.with_bounds: + td.bounds + td.bounds = None + self.assertIsNone(td.bounds) + if k.as_datetime: + td._value = td.value_datetime + td._bounds = td.bounds_datetime + + original_value = deepcopy(td.value) + original_bounds = deepcopy(td.bounds) + + with nc_scope(path, 'w') as ds: + td.write_to_netcdf_dataset(ds) + for name, expected_value in zip([td.name_value, td.name_bounds], [td.value_numtime, td.bounds_numtime]): + try: + variable = ds.variables[name] + except KeyError: + self.assertFalse(k.with_bounds) + continue + self.assertEqual(variable.calendar, td.calendar) + self.assertEqual(variable.units, td.units) + self.assertNumpyAll(original_value, td.value) + try: + self.assertNumpyAll(original_bounds, td.bounds) + except AttributeError: + self.assertFalse(k.with_bounds) + self.assertIsNone(original_bounds) class TestTemporalGroupDimension(TestBase): - - def test_constructor_by_temporal_dimension(self): - value = [dt(2012,1,1),dt(2012,1,2)] + + def get_tgd(self): + td = self.test_data.get_rd('cancm4_tas').get().temporal + tgd = td.get_grouping(['month']) + return tgd + + def test_init(self): + tgd = self.get_tgd() + self.assertIsInstance(tgd, TemporalDimension) + + def test_return_from_get_grouping(self): + value = [dt(2012, 1, 1), dt(2012, 1, 2)] td = TemporalDimension(value=value) tgd = td.get_grouping(['month']) - self.assertEqual(tuple(tgd.date_parts[0]),(None,1,None,None,None,None)) + self.assertEqual(tuple(tgd.date_parts[0]), (None, 1, None, None, None, None)) self.assertTrue(tgd.dgroups[0].all()) - self.assertNumpyAll(tgd.uid,np.array([1],dtype=constants.np_int)) + self.assertNumpyAll(tgd.uid, np.array([1], dtype=constants.np_int)) + + def test_write_to_netcdf_dataset(self): + tgd = self.get_tgd() + path = os.path.join(self.current_dir_output, 'foo.nc') + with nc_scope(path, 'w') as ds: + tgd.write_to_netcdf_dataset(ds) + self.assertIn('climatology_bounds', ds.variables) + ncvar = ds.variables[tgd.name_value] + self.assertEqual(ncvar.climatology, 'climatology_bounds') + with self.assertRaises(AttributeError): + ncvar.bounds + + # test failure and make sure original bounds name is preserved + self.assertNotEqual(tgd.name_bounds, 'climatology_bounds') + with nc_scope(path, 'w') as ds: + try: + tgd.write_to_netcdf_dataset(ds, darkness='forever') + except TypeError: + self.assertNotEqual(tgd.name_bounds, 'climatology_bounds') diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py index 6916a1f18..3ca9aad49 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py @@ -1,21 +1,30 @@ +from netCDF4 import date2num +import os import unittest from datetime import datetime as dt -from ocgis.util.helpers import get_date_list, make_poly -from ocgis.interface.base.dimension.base import VectorDimension import datetime -from ocgis.interface.base.dimension.spatial import SpatialGridDimension,\ - SpatialDimension -from ocgis.interface.base.field import Field, DerivedField -import numpy as np import itertools -from ocgis.test.base import TestBase -from ocgis.exc import EmptySubsetError +from copy import deepcopy +from importlib import import_module +from collections import OrderedDict + +import numpy as np from shapely import wkt from shapely.ops import cascaded_union + +from ocgis import constants +from ocgis import RequestDataset +from ocgis.interface.base.attributes import Attributes +from ocgis.interface.base.crs import WGS84, Spherical +from ocgis.interface.nc.temporal import NcTemporalDimension +from ocgis.util.helpers import get_date_list, make_poly +from ocgis.interface.base.dimension.base import VectorDimension +from ocgis.interface.base.dimension.spatial import SpatialGridDimension, SpatialDimension +from ocgis.interface.base.field import Field, DerivedField +from ocgis.test.base import TestBase, nc_scope +from ocgis.exc import EmptySubsetError from ocgis.interface.base.variable import Variable, VariableCollection from ocgis.interface.base.dimension.temporal import TemporalDimension -from copy import deepcopy -from importlib import import_module from ocgis.util.itester import itr_products_keywords @@ -24,118 +33,296 @@ class AbstractTestField(TestBase): def setUp(self): np.random.seed(1) super(AbstractTestField,self).setUp() - - def get_col(self,bounds=True): - value = [-100.,-99.,-98.,-97.] + + def get_col(self, bounds=True, with_name=True): + value = [-100., -99., -98., -97.] if bounds: - bounds = [[v-0.5,v+0.5] for v in value] + bounds = [[v - 0.5, v + 0.5] for v in value] else: bounds = None - row = VectorDimension(value=value,bounds=bounds,name='longitude') - return(row) - - def get_row(self,bounds=True): - value = [40.,39.,38.] + name = 'longitude' if with_name else None + col = VectorDimension(value=value, bounds=bounds, name=name) + return col + + def get_row(self, bounds=True, with_name=True): + value = [40., 39., 38.] if bounds: - bounds = [[v+0.5,v-0.5] for v in value] + bounds = [[v + 0.5, v - 0.5] for v in value] else: bounds = None - row = VectorDimension(value=value,bounds=bounds,name='latitude') - return(row) - - def get_field(self,with_bounds=True,with_value=False,with_level=True,with_temporal=True, - with_realization=True,month_count=1,name='tmax',units='kelvin',field_name=None): - + name = 'latitude' if with_name else None + row = VectorDimension(value=value, bounds=bounds, name=name) + return row + + def get_field(self, with_bounds=True, with_value=False, with_level=True, with_temporal=True, with_realization=True, + month_count=1, name='tmax', units='kelvin', field_name=None, crs=None, with_dimension_names=True): + if with_temporal: - temporal_start = dt(2000,1,1,12) + temporal_start = dt(2000, 1, 1, 12) if month_count == 1: - temporal_stop = dt(2000,1,31,12) + temporal_stop = dt(2000, 1, 31, 12) elif month_count == 2: - temporal_stop = dt(2000,2,29,12) + temporal_stop = dt(2000, 2, 29, 12) else: - raise(NotImplementedError) - temporal_value = get_date_list(temporal_start,temporal_stop,1) + raise NotImplementedError + temporal_value = get_date_list(temporal_start, temporal_stop, 1) delta_bounds = datetime.timedelta(hours=12) if with_bounds: - temporal_bounds = [[v-delta_bounds,v+delta_bounds] for v in temporal_value] + temporal_bounds = [[v - delta_bounds, v + delta_bounds] for v in temporal_value] else: temporal_bounds = None - temporal = TemporalDimension(value=temporal_value,bounds=temporal_bounds,name='time', - units='days') + dname = 'time' if with_dimension_names else None + temporal = TemporalDimension(value=temporal_value, bounds=temporal_bounds, name=dname) t_shape = temporal.shape[0] else: temporal = None t_shape = 1 - + if with_level: - level_value = [50,150] + level_value = [50, 150] if with_bounds: - level_bounds = [[0,100],[100,200]] + level_bounds = [[0, 100], [100, 200]] else: level_bounds = None - level = VectorDimension(value=level_value,bounds=level_bounds,name='level', - units='meters') + dname = 'level' if with_dimension_names else None + level = VectorDimension(value=level_value, bounds=level_bounds, name=dname, units='meters') l_shape = level.shape[0] else: level = None l_shape = 1 - - row = self.get_row(bounds=with_bounds) - col = self.get_col(bounds=with_bounds) - grid = SpatialGridDimension(row=row,col=col) - spatial = SpatialDimension(grid=grid) + + with_name = True if with_dimension_names else False + row = self.get_row(bounds=with_bounds, with_name=with_name) + col = self.get_col(bounds=with_bounds, with_name=with_name) + grid = SpatialGridDimension(row=row, col=col) + spatial = SpatialDimension(grid=grid, crs=crs) row_shape = row.shape[0] col_shape = col.shape[0] - + if with_realization: - realization = VectorDimension(value=[1,2],name='realization') + dname = 'realization' if with_dimension_names else None + realization = VectorDimension(value=[1, 2], name=dname) r_shape = realization.shape[0] else: realization = None r_shape = 1 - + if with_value: - value = np.random.rand(r_shape,t_shape,l_shape,row_shape,col_shape) + value = np.random.rand(r_shape, t_shape, l_shape, row_shape, col_shape) + data = None else: value = None - - var = Variable(name,units=units,debug=True,data=None,value=value) + data = 'foo' + + var = Variable(name, units=units, data=data, value=value) vc = VariableCollection(variables=var) - field = Field(variables=vc,temporal=temporal,level=level,realization=realization, - spatial=spatial,name=field_name) - - return(field) + field = Field(variables=vc, temporal=temporal, level=level, realization=realization, spatial=spatial, + name=field_name) + + return field class TestField(AbstractTestField): - def test_should_regrid(self): - field = self.get_field() - self.assertFalse(field._should_regrid) + def test_init(self): + for b, wv in itertools.product([True, False], [True, False]): + field = self.get_field(with_bounds=b, with_value=wv, with_dimension_names=False) + self.assertEqual(field.level.name, 'level') + self.assertEqual(field.level.name_uid, 'level_uid') + self.assertEqual(field.spatial.grid.row.name, 'yc') + with self.assertRaises(NotImplementedError): + list(field) + self.assertIsInstance(field, Attributes) + self.assertEqual(field.attrs, OrderedDict()) + self.assertFalse(field.regrid_destination) + ref = field.shape + self.assertEqual(ref, (2, 31, 2, 3, 4)) + with self.assertRaises(AttributeError): + field.value + self.assertIsInstance(field.variables, VariableCollection) + self.assertIsInstance(field.variables['tmax'], Variable) + if wv: + self.assertIsInstance(field.variables['tmax'].value, np.ma.MaskedArray) + self.assertEqual(field.variables['tmax'].value.shape, field.shape) + else: + with self.assertRaises(Exception): + field.variables['tmax'].value - def test_loading_from_source_spatial_bounds(self): - """Test row bounds may be set to None when loading from source.""" + def test_init_empty(self): + with self.assertRaises(ValueError): + Field() - field = self.test_data.get_rd('cancm4_tas').get() - field.spatial.grid.row.bounds - field.spatial.grid.row.bounds = None - self.assertIsNone(field.spatial.grid.row.bounds) + def test_crs(self): + field = self.get_field(with_value=True) + self.assertIsNone(field.spatial.crs) + self.assertIsNone(field.crs) + field.spatial.crs = WGS84() + self.assertEqual(field.crs, WGS84()) - def test_name_none_one_variable(self): - field = self.get_field(field_name=None) - self.assertEqual(field.name, field.variables.values()[0].alias) + def test_deepcopy(self): + field = self.get_field(with_value=True) + deepcopy(field) - def test_name_none_two_variables(self): - field = self.get_field() - field2 = self.get_field() - var2 = field2.variables['tmax'] - var2.alias = 'tmax2' - field.variables.add_variable(var2, assign_new_uid=True) - self.assertEqual(field.name, 'tmax_tmax2') + def test_fancy_indexing(self): + field = self.get_field(with_value=True) + sub = field[:,(3,5,10,15),:,:,:] + self.assertEqual(sub.shape,(2,4,2,3,4)) + self.assertNumpyAll(sub.variables['tmax'].value,field.variables['tmax'].value[:,(3,5,10,15),:,:,:]) - def test_name(self): - field = self.get_field(field_name='foo') - self.assertEqual(field.name, 'foo') + sub = field[:,(3,15),:,:,:] + self.assertEqual(sub.shape,(2,2,2,3,4)) + self.assertNumpyAll(sub.variables['tmax'].value,field.variables['tmax'].value[:,(3,15),:,:,:]) + + sub = field[:,3:15,:,:,:] + self.assertEqual(sub.shape,(2,12,2,3,4)) + self.assertNumpyAll(sub.variables['tmax'].value,field.variables['tmax'].value[:,3:15,:,:,:]) + + def test_get_aggregated_all(self): + for wv in [True,False]: + field = self.get_field(with_value=wv) + try: + agg = field.get_spatially_aggregated() + except NotImplementedError: + if not wv: + continue + else: + raise + self.assertNotEqual(field.spatial.grid,None) + self.assertEqual(agg.spatial.grid,None) + self.assertEqual(agg.shape,(2,31,2,1,1)) + self.assertNumpyAll(field.variables['tmax'].value,agg._raw.variables['tmax'].value) + self.assertTrue(np.may_share_memory(field.variables['tmax'].value,agg._raw.variables['tmax'].value)) + + to_test = field.variables['tmax'].value[0,0,0,:,:].mean() + self.assertNumpyAll(to_test,agg.variables['tmax'].value[0,0,0,0,0]) + + def test_get_aggregated_irregular(self): + single = wkt.loads('POLYGON((-99.894355 40.230645,-98.725806 40.196774,-97.726613 40.027419,-97.032258 39.942742,-97.681452 39.626613,-97.850806 39.299194,-98.178226 39.643548,-98.844355 39.920161,-99.894355 40.230645))') + field = self.get_field(with_value=True) + for b in [True,False]: + try: + ret = field.get_clip(single,use_spatial_index=b) + agg = ret.get_spatially_aggregated() + to_test = agg.spatial.geom.polygon.value[0,0] + self.assertAlmostEqual(to_test.area,single.area) + self.assertAlmostEqual(to_test.bounds,single.bounds) + self.assertAlmostEqual(to_test.exterior.length,single.exterior.length) + except ImportError: + with self.assertRaises(ImportError): + import_module('rtree') + + def test_get_clip_single_cell(self): + single = wkt.loads('POLYGON((-97.997731 39.339322,-97.709012 39.292322,-97.742584 38.996888,-97.668726 38.641026,-98.158876 38.708170,-98.340165 38.916316,-98.273021 39.218463,-97.997731 39.339322))') + field = self.get_field(with_value=True) + for b in [True,False]: + try: + ret = field.get_clip(single,use_spatial_index=b) + self.assertEqual(ret.shape,(2,31,2,1,1)) + self.assertEqual(ret.spatial.grid._value.sum(),-59.0) + self.assertTrue(ret.spatial.geom.polygon.value[0,0].almost_equals(single)) + self.assertEqual(ret.spatial.uid,np.array([[7]])) + + self.assertEqual(ret.spatial.geom.point.value.shape,ret.spatial.geom.polygon.shape) + ref_pt = ret.spatial.geom.point.value[0,0] + ref_poly = ret.spatial.geom.polygon.value[0,0] + self.assertTrue(ref_poly.intersects(ref_pt)) + except ImportError: + with self.assertRaises(ImportError): + import_module('rtree') + + def test_get_clip_irregular(self): + for wv in [True,False]: + single = wkt.loads('POLYGON((-99.894355 40.230645,-98.725806 40.196774,-97.726613 40.027419,-97.032258 39.942742,-97.681452 39.626613,-97.850806 39.299194,-98.178226 39.643548,-98.844355 39.920161,-99.894355 40.230645))') + field = self.get_field(with_value=wv) + for b in [True,False]: + try: + ret = field.get_clip(single,use_spatial_index=b) + self.assertEqual(ret.shape,(2,31,2,2,4)) + unioned = cascaded_union([geom for geom in ret.spatial.geom.polygon.value.compressed().flat]) + self.assertAlmostEqual(unioned.area,single.area) + self.assertAlmostEqual(unioned.bounds,single.bounds) + self.assertAlmostEqual(unioned.exterior.length,single.exterior.length) + self.assertAlmostEqual(ret.spatial.weights[1,2],0.064016424) + self.assertAlmostEqual(ret.spatial.weights.sum(),1.776435) + if not wv: + with self.assertRaises(NotImplementedError): + ret.variables['tmax'].value + except ImportError: + with self.assertRaises(ImportError): + import_module('rtree') + + def test_get_iter(self): + field = self.get_field(with_value=True) + rows = list(field.get_iter()) + self.assertEqual(len(rows), 2 * 31 * 2 * 3 * 4) + rows[100]['geom'] = rows[100]['geom'].bounds + real = {'realization_bounds_lower': None, 'vid': 1, 'time_bounds_upper': datetime.datetime(2000, 1, 6, 0, 0), + 'realization_bounds_upper': None, 'year': 2000, 'gid': 5, 'level_bounds_upper': 100, + 'realization_uid': 1, 'realization': 1, 'geom': (-100.5, 38.5, -99.5, 39.5), 'level_bounds_lower': 0, + 'variable': 'tmax', 'month': 1, 'time_bounds_lower': datetime.datetime(2000, 1, 5, 0, 0), 'day': 5, + 'level': 50, 'did': None, 'value': 0.32664490177209615, 'alias': 'tmax', 'level_uid': 1, + 'time': datetime.datetime(2000, 1, 5, 12, 0), 'tid': 5, 'name': 'tmax'} + for k, v in rows[100].iteritems(): + self.assertEqual(real[k], v) + self.assertEqual(set(real.keys()), set(rows[100].keys())) + self.assertEqual(set(field.variables['tmax'].value.flatten().tolist()), set([r['value'] for r in rows])) + + # test without names + field = self.get_field(with_value=True, with_dimension_names=False) + rows = list(field.get_iter()) + self.assertAsSetEqual(rows[10].keys(), ['vid', 'gid', 'month', 'year', 'alias', 'geom', 'realization', 'realization_uid', 'time_bounds_lower', 'level_bounds_upper', 'variable', 'day', 'realization_bounds_lower', 'name', 'level', 'did', 'level_bounds_lower', 'value', 'realization_bounds_upper', 'level_uid', 'time', 'tid', 'time_bounds_upper']) + + def test_get_iter_spatial_only(self): + """Test with only a spatial dimension.""" + + field = self.get_field(with_temporal=False, with_level=False, with_realization=False, with_value=True) + self.assertIsNone(field.level) + rows = list(field.get_iter()) + for xx in ['lid', 'level']: + self.assertNotIn(xx, rows[0].keys()) + self.assertEqual(len(rows), 12) + + def test_get_intersects_domain_polygon(self): + regular = make_poly((36.61,41.39),(-101.41,-95.47)) + field = self.get_field(with_value=True) + for b in [True,False]: + try: + ret = field.get_intersects(regular,use_spatial_index=b) + self.assertNumpyAll(ret.variables['tmax'].value,field.variables['tmax'].value) + self.assertNumpyAll(field.spatial.grid.value,ret.spatial.grid.value) + except ImportError: + with self.assertRaises(ImportError): + import_module('rtree') + + def test_get_intersects_irregular_polygon(self): + irregular = wkt.loads('POLYGON((-100.106049 38.211305,-99.286894 38.251591,-99.286894 38.258306,-99.286894 38.258306,-99.260036 39.252035,-98.769886 39.252035,-98.722885 37.734583,-100.092620 37.714440,-100.106049 38.211305))') + keywords = dict(b=[True, False], + with_corners=[True, False]) + for k in itr_products_keywords(keywords, as_namedtuple=True): + try: + field = self.get_field(with_value=True) + if k.with_corners: + field.spatial.grid.corners + ret = field.get_intersects(irregular,use_spatial_index=k.b) + self.assertEqual(ret.shape,(2,31,2,2,2)) + self.assertNumpyAll(ret.variables['tmax'].value.mask[0,2,1,:,:],np.array([[True,False],[False,False]])) + self.assertEqual(ret.spatial.uid.data[ret.spatial.get_mask()][0],5) + if k.with_corners: + self.assertNumpyAll(ret.spatial.grid.corners.mask, np.array([[[[True, True, True, True], [False, False, False, False]], [[False, False, False, False], [False, False, False, False]]], [[[True, True, True, True], [False, False, False, False]], [[False, False, False, False], [False, False, False, False]]]])) + else: + self.assertIsNone(ret.spatial.grid._corners) + except ImportError: + with self.assertRaises(ImportError): + import_module('rtree') + + def test_get_intersects_single_bounds_row(self): + field = self.get_field(with_value=True) + sub = field[:,0,:,0,0] + irregular = wkt.loads('POLYGON((-100.106049 38.211305,-99.286894 38.251591,-99.286894 38.258306,-99.286894 38.258306,-99.260036 39.252035,-98.769886 39.252035,-98.722885 37.734583,-100.092620 37.714440,-100.106049 38.211305))') + ## the intersects operations is empty. this was testing that contiguous + ## bounds check fails appropriately with a single bounds row. + with self.assertRaises(EmptySubsetError): + sub.get_intersects(irregular) def test_get_iter_two_variables(self): field = self.get_field(with_value=True) @@ -154,22 +341,43 @@ def test_get_iter_two_variables(self): self.assertTrue(row['value'] > 3) self.assertEqual(set(vids), set([1, 2])) - def test_get_intersects_single_bounds_row(self): - field = self.get_field(with_value=True) - sub = field[:,0,:,0,0] - irregular = wkt.loads('POLYGON((-100.106049 38.211305,-99.286894 38.251591,-99.286894 38.258306,-99.286894 38.258306,-99.260036 39.252035,-98.769886 39.252035,-98.722885 37.734583,-100.092620 37.714440,-100.106049 38.211305))') - ## the intersects operations is empty. this was testing that contiguous - ## bounds check fails appropriately with a single bounds row. - with self.assertRaises(EmptySubsetError): - sub.get_intersects(irregular) - + def test_name(self): + field = self.get_field(field_name='foo') + self.assertEqual(field.name, 'foo') + field.name = 'svelt' + self.assertEqual(field.name, 'svelt') + + def test_name_none_one_variable(self): + field = self.get_field(field_name=None) + self.assertEqual(field.name, field.variables.values()[0].alias) + + def test_name_none_two_variables(self): + field = self.get_field() + field2 = self.get_field() + var2 = field2.variables['tmax'] + var2.alias = 'tmax2' + field.variables.add_variable(var2, assign_new_uid=True) + self.assertEqual(field.name, 'tmax_tmax2') + + def test_loading_from_source_spatial_bounds(self): + """Test row bounds may be set to None when loading from source.""" + + field = self.test_data.get_rd('cancm4_tas').get() + field.spatial.grid.row.bounds + field.spatial.grid.row.bounds = None + self.assertIsNone(field.spatial.grid.row.bounds) + + def test_should_regrid(self): + field = self.get_field() + self.assertFalse(field._should_regrid) + def test_shape_as_dict(self): field = self.get_field(with_value=False) to_test = field.shape_as_dict for variable in field.variables.values(): self.assertEqual(variable._value,None) self.assertEqual(to_test,{'Y': 3, 'X': 4, 'Z': 2, 'R': 2, 'T': 31}) - + def test_slicing(self): field = self.get_field(with_value=True) with self.assertRaises(IndexError): @@ -177,11 +385,7 @@ def test_slicing(self): sub = field[0,0,0,0,0] self.assertEqual(sub.shape,(1,1,1,1,1)) self.assertEqual(sub.variables['tmax'].value.shape,(1,1,1,1,1)) - - def test_deepcopy(self): - field = self.get_field(with_value=True) - deepcopy(field) - + def test_slicing_general(self): """Test slicing on different types of fields.""" @@ -234,153 +438,27 @@ def test_slicing_general(self): else: self.assertEqual(field_slc.variables['tmax']._value, None) self.assertEqual(field_slc.variables['tmax']._value, field.variables['tmax']._value) - - def test_constructor(self): - for b,wv in itertools.product([True,False],[True,False]): - field = self.get_field(with_bounds=b,with_value=wv) - ref = field.shape - self.assertEqual(ref,(2,31,2,3,4)) - with self.assertRaises(AttributeError): - field.value - self.assertIsInstance(field.variables,VariableCollection) - self.assertIsInstance(field.variables['tmax'],Variable) - if wv: - self.assertIsInstance(field.variables['tmax'].value,np.ma.MaskedArray) - self.assertEqual(field.variables['tmax'].value.shape,field.shape) - else: - with self.assertRaises(Exception): - field.variables['tmax'].value - - def test_get_iter(self): - field = self.get_field(with_value=True) - rows = list(field.get_iter()) - self.assertEqual(len(rows),2*31*2*3*4) - rows[100]['geom'] = rows[100]['geom'].bounds - real = {'realization_bnds_lower': None, 'vid': 1, 'time_bnds_upper': datetime.datetime(2000, 1, 6, 0, 0), 'realization_bnds_upper': None, 'year': 2000, 'SPATIAL_uid': 5, 'level_bnds_upper': 100, 'realization_uid': 1, 'realization': 1, 'geom': (-100.5, 38.5, -99.5, 39.5), 'level_bnds_lower': 0, 'variable': 'tmax', 'month': 1, 'time_bnds_lower': datetime.datetime(2000, 1, 5, 0, 0), 'day': 5, 'level': 50, 'did': None, 'value': 0.32664490177209615, 'alias': 'tmax', 'level_uid': 1, 'time': datetime.datetime(2000, 1, 5, 12, 0), 'time_uid': 5, 'name': 'tmax'} - for k,v in rows[100].iteritems(): - self.assertEqual(real[k],v) - self.assertEqual(set(real.keys()),set(rows[100].keys())) - self.assertEqual(set(field.variables['tmax'].value.flatten().tolist()),set([r['value'] for r in rows])) - - def test_get_intersects_domain_polygon(self): - regular = make_poly((36.61,41.39),(-101.41,-95.47)) - field = self.get_field(with_value=True) - for b in [True,False]: - try: - ret = field.get_intersects(regular,use_spatial_index=b) - self.assertNumpyAll(ret.variables['tmax'].value,field.variables['tmax'].value) - self.assertNumpyAll(field.spatial.grid.value,ret.spatial.grid.value) - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') - - def test_get_intersects_irregular_polygon(self): - irregular = wkt.loads('POLYGON((-100.106049 38.211305,-99.286894 38.251591,-99.286894 38.258306,-99.286894 38.258306,-99.260036 39.252035,-98.769886 39.252035,-98.722885 37.734583,-100.092620 37.714440,-100.106049 38.211305))') - keywords = dict(b=[True, False], - with_corners=[True, False]) - for k in itr_products_keywords(keywords, as_namedtuple=True): - try: - field = self.get_field(with_value=True) - if k.with_corners: - field.spatial.grid.corners - ret = field.get_intersects(irregular,use_spatial_index=k.b) - self.assertEqual(ret.shape,(2,31,2,2,2)) - self.assertNumpyAll(ret.variables['tmax'].value.mask[0,2,1,:,:],np.array([[True,False],[False,False]])) - self.assertEqual(ret.spatial.uid.data[ret.spatial.get_mask()][0],5) - if k.with_corners: - self.assertNumpyAll(ret.spatial.grid.corners.mask, np.array([[[[True, True, True, True], [False, False, False, False]], [[False, False, False, False], [False, False, False, False]]], [[[True, True, True, True], [False, False, False, False]], [[False, False, False, False], [False, False, False, False]]]])) - else: - self.assertIsNone(ret.spatial.grid._corners) - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') - - def test_get_clip_single_cell(self): - single = wkt.loads('POLYGON((-97.997731 39.339322,-97.709012 39.292322,-97.742584 38.996888,-97.668726 38.641026,-98.158876 38.708170,-98.340165 38.916316,-98.273021 39.218463,-97.997731 39.339322))') - field = self.get_field(with_value=True) - for b in [True,False]: - try: - ret = field.get_clip(single,use_spatial_index=b) - self.assertEqual(ret.shape,(2,31,2,1,1)) - self.assertEqual(ret.spatial.grid._value.sum(),-59.0) - self.assertTrue(ret.spatial.geom.polygon.value[0,0].almost_equals(single)) - self.assertEqual(ret.spatial.uid,np.array([[7]])) - - self.assertEqual(ret.spatial.geom.point.value.shape,ret.spatial.geom.polygon.shape) - ref_pt = ret.spatial.geom.point.value[0,0] - ref_poly = ret.spatial.geom.polygon.value[0,0] - self.assertTrue(ref_poly.intersects(ref_pt)) - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') - - def test_get_clip_irregular(self): - for wv in [True,False]: - single = wkt.loads('POLYGON((-99.894355 40.230645,-98.725806 40.196774,-97.726613 40.027419,-97.032258 39.942742,-97.681452 39.626613,-97.850806 39.299194,-98.178226 39.643548,-98.844355 39.920161,-99.894355 40.230645))') - field = self.get_field(with_value=wv) - for b in [True,False]: - try: - ret = field.get_clip(single,use_spatial_index=b) - self.assertEqual(ret.shape,(2,31,2,2,4)) - unioned = cascaded_union([geom for geom in ret.spatial.geom.polygon.value.compressed().flat]) - self.assertAlmostEqual(unioned.area,single.area) - self.assertAlmostEqual(unioned.bounds,single.bounds) - self.assertAlmostEqual(unioned.exterior.length,single.exterior.length) - self.assertAlmostEqual(ret.spatial.weights[1,2],0.064016424) - self.assertAlmostEqual(ret.spatial.weights.sum(),1.776435) - if not wv: - with self.assertRaises(NotImplementedError): - ret.variables['tmax'].value - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') - - def test_get_aggregated_irregular(self): - single = wkt.loads('POLYGON((-99.894355 40.230645,-98.725806 40.196774,-97.726613 40.027419,-97.032258 39.942742,-97.681452 39.626613,-97.850806 39.299194,-98.178226 39.643548,-98.844355 39.920161,-99.894355 40.230645))') + + def test_slicing_specific(self): field = self.get_field(with_value=True) - for b in [True,False]: - try: - ret = field.get_clip(single,use_spatial_index=b) - agg = ret.get_spatially_aggregated() - to_test = agg.spatial.geom.polygon.value[0,0] - self.assertAlmostEqual(to_test.area,single.area) - self.assertAlmostEqual(to_test.bounds,single.bounds) - self.assertAlmostEqual(to_test.exterior.length,single.exterior.length) - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') - - def test_get_aggregated_all(self): - for wv in [True,False]: - field = self.get_field(with_value=wv) - try: - agg = field.get_spatially_aggregated() - except NotImplementedError: - if not wv: - continue - else: - raise - self.assertNotEqual(field.spatial.grid,None) - self.assertEqual(agg.spatial.grid,None) - self.assertEqual(agg.shape,(2,31,2,1,1)) - self.assertNumpyAll(field.variables['tmax'].value,agg._raw.variables['tmax'].value) - self.assertTrue(np.may_share_memory(field.variables['tmax'].value,agg._raw.variables['tmax'].value)) - - to_test = field.variables['tmax'].value[0,0,0,:,:].mean() - self.assertNumpyAll(to_test,agg.variables['tmax'].value[0,0,0,0,0]) - + field_slc = field[:,0:2,0,:,:] + self.assertEqual(field_slc.shape,(2,2,1,3,4)) + self.assertEqual(field_slc.variables['tmax'].value.shape,(2,2,1,3,4)) + ref_field_real_slc = field.variables['tmax'].value[:,0:2,0,:,:] + self.assertNumpyAll(ref_field_real_slc.flatten(),field_slc.variables['tmax'].value.flatten()) + def test_subsetting(self): for wv in [True,False]: field = self.get_field(with_value=wv) self.assertNotIsInstance(field.temporal.value,np.ma.MaskedArray) - + temporal_start = dt(2000,1,1,12) temporal_stop = dt(2000,1,31,12) ret = field.temporal.get_between(temporal_start,temporal_stop) self.assertIsInstance(ret,VectorDimension) self.assertNumpyAll(ret.value,field.temporal.value) self.assertNumpyAll(ret.bounds,field.temporal.bounds) - + ret = field.get_between('temporal',temporal_start,temporal_stop) self.assertIsInstance(ret,Field) self.assertEqual(ret.shape,field.shape) @@ -389,61 +467,178 @@ def test_subsetting(self): else: with self.assertRaises(NotImplementedError): ret.variables['tmax'].value - + ## try empty subset with self.assertRaises(EmptySubsetError): field.get_between('level',100000,2000000000) - + ret = field.get_between('realization',1,1) self.assertEqual(ret.shape,(1, 31, 2, 3, 4)) if wv: self.assertNumpyAll(ret.variables['tmax'].value,field.variables['tmax'].value[0:1,:,:,:,:]) - + ret = field.get_between('temporal',dt(2000,1,15),dt(2000,1,30)) self.assertEqual(ret.temporal.value[0],dt(2000,1,15,12)) self.assertEqual(ret.temporal.value[-1],dt(2000,1,30,12)) - - def test_empty(self): + + def test_variables(self): + row = VectorDimension(value=[5, 6]) + col = VectorDimension(value=[7, 8]) + grid = SpatialGridDimension(row=row, col=col) + sdim = SpatialDimension(grid=grid) + temporal = TemporalDimension(value=[5000]) + field = Field(spatial=sdim, temporal=temporal) + self.assertIsInstance(field.variables, VariableCollection) + self.assertEqual(len(field.variables), 0) + self.assertEqual(field.shape, (1, 1, 1, 2, 2)) with self.assertRaises(ValueError): - Field() - - def test_slicing_specific(self): - field = self.get_field(with_value=True) - field_slc = field[:,0:2,0,:,:] - self.assertEqual(field_slc.shape,(2,2,1,3,4)) - self.assertEqual(field_slc.variables['tmax'].value.shape,(2,2,1,3,4)) - ref_field_real_slc = field.variables['tmax'].value[:,0:2,0,:,:] - self.assertNumpyAll(ref_field_real_slc.flatten(),field_slc.variables['tmax'].value.flatten()) - - def test_fancy_indexing(self): - field = self.get_field(with_value=True) - sub = field[:,(3,5,10,15),:,:,:] - self.assertEqual(sub.shape,(2,4,2,3,4)) - self.assertNumpyAll(sub.variables['tmax'].value,field.variables['tmax'].value[:,(3,5,10,15),:,:,:]) - - sub = field[:,(3,15),:,:,:] - self.assertEqual(sub.shape,(2,2,2,3,4)) - self.assertNumpyAll(sub.variables['tmax'].value,field.variables['tmax'].value[:,(3,15),:,:,:]) - - sub = field[:,3:15,:,:,:] - self.assertEqual(sub.shape,(2,12,2,3,4)) - self.assertNumpyAll(sub.variables['tmax'].value,field.variables['tmax'].value[:,3:15,:,:,:]) + field.variables = 'foo' + + def test_write_to_netcdf_dataset(self): + keywords = dict(file_only=[False, True], + second_variable_alias=[None, 'tmin_alias'], + with_realization=[False, True], + remove_dimension_names=[False, True], + crs=[None, Spherical()], + with_level=[True, False]) + path = os.path.join(self.current_dir_output, 'foo.nc') + + for k in itr_products_keywords(keywords, as_namedtuple=True): + field = self.get_field(with_value=True, with_realization=k.with_realization, crs=k.crs, + with_level=k.with_level) + + if k.remove_dimension_names: + try: + field.level.name = None + except AttributeError: + self.assertFalse(k.with_level) + field.temporal.name = None + field.spatial.grid.row.name = None + field.spatial.grid.col.name = None + + # add another variable + value = np.random.rand(*field.shape) + second_variable_name = 'tmin' + second_variable_alias = k.second_variable_alias or second_variable_name + variable = Variable(value=value, name=second_variable_name, alias=k.second_variable_alias) + variable.attrs['open'] = 'up' + field.variables.add_variable(variable, assign_new_uid=True) + + # add some attributes + field.attrs['foo'] = 'some information' + field.attrs['another'] = 'some more information' + + with nc_scope(path, 'w') as ds: + try: + field.write_to_netcdf_dataset(ds, file_only=k.file_only) + except ValueError: + self.assertTrue(k.with_realization) + self.assertIsNotNone(field.realization) + continue + + with nc_scope(path) as ds: + self.assertEqual(ds.another, 'some more information') + try: + variable_names = ['time', 'time_bounds', 'latitude', 'latitude_bounds', 'longitude', 'longitude_bounds', 'tmax', second_variable_alias] + dimension_names = ['time', 'bounds', 'latitude', 'longitude'] + if k.crs is not None: + variable_names.append(k.crs.name) + if k.with_level: + variable_names += ['level', 'level_bounds'] + dimension_names.append('level') + self.assertEqual(set(ds.variables.keys()), set(variable_names)) + self.assertEqual(set(ds.dimensions.keys()), set(dimension_names)) + except AssertionError: + self.assertTrue(k.remove_dimension_names) + variable_names = ['time', 'time_bounds', 'yc', 'yc_bounds', 'xc', 'xc_bounds', 'tmax', second_variable_alias] + dimension_names = ['time', 'bounds', 'yc', 'xc'] + if k.crs is not None: + variable_names.append(k.crs.name) + if k.with_level: + variable_names += ['level', 'level_bounds'] + dimension_names.append('level') + self.assertEqual(set(ds.variables.keys()), set(variable_names)) + self.assertEqual(set(ds.dimensions.keys()), set(dimension_names)) + nc_second_variable = ds.variables[second_variable_alias] + + try: + for field_variable in field.variables.itervalues(): + self.assertEqual(ds.variables[field_variable.alias].grid_mapping, + k.crs.name) + except AttributeError: + self.assertIsNone(k.crs) + + self.assertEqual(nc_second_variable.open, 'up') + try: + self.assertNumpyAll(nc_second_variable[:], value.squeeze()) + except AssertionError: + self.assertTrue(k.file_only) + self.assertTrue(nc_second_variable[:].mask.all()) + self.assertEqual(ds.variables['tmax'].units, field.variables['tmax'].units) + self.assertEqual(nc_second_variable.units, '') + + new_field = RequestDataset(path).get() + self.assertEqual(new_field.variables.keys(), ['tmax', second_variable_alias]) + if k.with_level: + level_shape = 2 + else: + level_shape = 1 + self.assertEqual(new_field.shape, (1, 31, level_shape, 3, 4)) + + def test_write_to_netcdf_dataset_with_metadata(self): + """Test writing to netCDF with a source metadata dictionary attached and data loaded from file.""" + + rd = self.test_data.get_rd('narccap_lambert_conformal') + field = rd.get()[:, 0:31, :, 20:30, 30:40] + path = os.path.join(self.current_dir_output, 'foo.nc') + with nc_scope(path, 'w') as ds: + field.write_to_netcdf_dataset(ds) + self.assertSetEqual(set(ds.variables.keys()), {u'time', 'time_bnds', u'yc', u'xc', u'Lambert_Conformal', + 'pr'}) + self.assertGreater(len(ds.__dict__), 0) + self.assertGreater(len(ds.variables['time'].__dict__), 0) + + def test_write_to_netcdf_dataset_without_row_column_on_grid(self): + """Test writing a field without rows and columns on the grid.""" + + field = self.get_field(with_value=True, with_realization=False) + field.spatial.grid.value + field.spatial.grid.corners + field.spatial.grid.row = None + field.spatial.grid.col = None + path = os.path.join(self.current_dir_output, 'foo.nc') + with nc_scope(path, 'w') as ds: + field.write_to_netcdf_dataset(ds) + self.assertAsSetEqual(ds.variables.keys(), ['time', 'time_bounds', 'level', 'level_bounds', + constants.default_name_row_coordinates, + constants.default_name_col_coordinates, 'yc_corners', + 'xc_corners', 'tmax']) + self.assertAsSetEqual(ds.dimensions.keys(), + ['time', 'bounds', 'level', constants.default_name_row_coordinates, + constants.default_name_col_coordinates, constants.default_name_corners_dimension]) + + def test_write_to_netcdf_dataset_without_temporal(self): + """Test without a temporal dimensions.""" + + path = os.path.join(self.current_dir_output, 'foo.nc') + field = self.get_field(with_temporal=False, with_realization=False, with_value=True, with_level=False) + with self.nc_scope(path, 'w') as ds: + field.write_to_netcdf_dataset(ds) + with self.nc_scope(path) as ds: + vars = ds.variables.keys() + self.assertAsSetEqual(vars, [u'latitude', u'latitude_bounds', u'longitude', u'longitude_bounds', u'tmax']) class TestDerivedField(AbstractTestField): - def test_constructor(self): + def test_init(self): field = self.get_field(with_value=True,month_count=2) tgd = field.temporal.get_grouping(['month']) new_data = np.random.rand(2,2,2,3,4) mu = Variable(name='mu',value=new_data) df = DerivedField(variables=mu,temporal=tgd,spatial=field.spatial, level=field.level,realization=field.realization) + self.assertIsInstance(df, Field) self.assertIsInstance(df.temporal.value[0],datetime.datetime) self.assertEqual(df.temporal.value.tolist(),[datetime.datetime(2000, 1, 16, 0, 0),datetime.datetime(2000, 2, 16, 0, 0)]) self.assertEqual(df.temporal.bounds[1,1],datetime.datetime(2000, 3, 1, 0, 0)) - - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py index b765ba4ea..b4cfb8c96 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py @@ -1,32 +1,84 @@ +from collections import OrderedDict from numpy.ma import MaskedArray from cfunits import Units from ocgis.exc import VariableInCollectionError, NoUnitsError +from ocgis.interface.base.attributes import Attributes from ocgis.test.base import TestBase -from ocgis.interface.base.variable import Variable, VariableCollection +from ocgis.interface.base.variable import Variable, VariableCollection, AbstractSourcedVariable, AbstractValueVariable import numpy as np from ocgis.util.helpers import get_iter from ocgis.util.itester import itr_products_keywords +class FakeAbstractSourcedVariable(AbstractSourcedVariable): + + def _set_value_from_source_(self): + self._value = self._src_idx*2 + + +class TestAbstractSourcedVariable(TestBase): + + def iter(self): + src_idx = [1, 2] + data = 'foo' + kwds = dict(src_idx=[src_idx, None], + data=[data, None]) + + for k in self.iter_product_keywords(kwds): + yield k + + def test_init(self): + for k in self.iter(): + FakeAbstractSourcedVariable(k.data, k.src_idx) + + FakeAbstractSourcedVariable(None, None) + + def test_format_src_idx(self): + aa = FakeAbstractSourcedVariable('foo', src_idx=[1, 2]) + self.assertNumpyAll(aa._format_src_idx_([1, 2]), np.array([1, 2])) + + def test_get_value(self): + aa = FakeAbstractSourcedVariable('foo', src_idx=[1, 2]) + aa._value = None + self.assertNumpyAll(aa._get_value_(), np.array([1, 2])*2) + + def test_src_idx(self): + aa = FakeAbstractSourcedVariable('foo', src_idx=[1, 2]) + self.assertNumpyAll(aa._src_idx, np.array([1, 2])) + + + +class FakeAbstractValueVariable(AbstractValueVariable): + + def _get_value_(self): + return np.array(self._value) + + +class TestAbstractValueVariable(TestBase): + create_dir = False + + def test_init(self): + kwds = dict(value=[[4, 5, 6]]) + + for k in self.iter_product_keywords(kwds): + av = FakeAbstractValueVariable(value=k.value) + self.assertEqual(av.value, k.value) + + class TestVariable(TestBase): - def test_init_without_value_dtype_fill_value(self): - var = Variable(data='foo') - with self.assertRaises(ValueError): - var.dtype - with self.assertRaises(ValueError): - var.fill_value - - def test_init_without_value_with_dtype_fill_value(self): - var = Variable(data='foo',dtype=np.float,fill_value=9) - self.assertEqual(var.dtype,np.float) - self.assertEqual(var.fill_value,9) - + def test_init(self): + self.assertEqual(Variable.__bases__, (AbstractSourcedVariable, AbstractValueVariable, Attributes)) + + # test passing attributes + var = Variable(attrs={'a': 6}, value=np.array([5])) + self.assertEqual(var.attrs['a'], 6) + def test_init_with_value_with_dtype_fill_value(self): var = Variable(data='foo',dtype=np.float,fill_value=9,value=np.array([1,2,3,4])) self.assertEqual(var.dtype,np.float) self.assertEqual(var.fill_value,9) - + def test_init_with_value_without_dtype_fill_value(self): value = np.array([1,2,3,4]) value = np.ma.array(value) @@ -34,6 +86,22 @@ def test_init_with_value_without_dtype_fill_value(self): self.assertEqual(var.dtype,value.dtype) self.assertEqual(var.fill_value,value.fill_value) + def test_init_without_value_dtype_fill_value(self): + var = Variable(data='foo') + with self.assertRaises(ValueError): + var.dtype + with self.assertRaises(ValueError): + var.fill_value + + def test_init_without_value_with_dtype_fill_value(self): + var = Variable(data='foo',dtype=np.float,fill_value=9) + self.assertEqual(var.dtype,np.float) + self.assertEqual(var.fill_value,9) + + def test_str(self): + var = Variable(name='toon') + self.assertEqual(str(var), 'Variable(name="toon", alias="toon", units=None)') + def test_conform_units_to(self): """Test using the conform_units_to keyword argument.""" @@ -91,10 +159,14 @@ def test_get_empty_like(self): value = np.array([1, 2, 3, 4, 5]) value = np.ma.array(value, mask=[False, True, False, True, False]) kwargs['value'] = value + kwargs['attrs'] = OrderedDict(foo=5) var = Variable(**kwargs) for shape in [None, (2, 2)]: new_var = var.get_empty_like(shape=shape) + self.assertDictEqual(new_var.attrs, kwargs['attrs']) + new_var.attrs['hi'] = 'wow' + self.assertNotEqual(new_var.attrs, kwargs['attrs']) self.assertEqual(new_var.uid, var.uid) if shape is None: actual = np.ma.array(np.zeros(5), dtype=var.dtype, fill_value=var.fill_value, mask=value.mask) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_nc/test_spatial.py b/src/ocgis/test/test_ocgis/test_interface/test_nc/test_spatial.py new file mode 100644 index 000000000..1ddd024d2 --- /dev/null +++ b/src/ocgis/test/test_ocgis/test_interface/test_nc/test_spatial.py @@ -0,0 +1,95 @@ +from ocgis import RequestDataset +from ocgis.interface.base.dimension.spatial import SpatialGridDimension +from ocgis.interface.base.variable import AbstractSourcedVariable +from ocgis.interface.base.dimension.base import VectorDimension +from ocgis.interface.nc.spatial import NcSpatialGridDimension +from ocgis.test.base import TestBase +import numpy as np + + +class TestNcSpatialGridDimension(TestBase): + + def test_init(self): + self.assertEqual(NcSpatialGridDimension.__bases__, (AbstractSourcedVariable, SpatialGridDimension)) + + row = VectorDimension(value=[4, 5]) + col = VectorDimension(value=[6, 7, 8]) + NcSpatialGridDimension(row=row, col=col) + + def test_getitem(self): + src_idx = {'row': np.array([5, 6, 7, 8]), 'col': np.array([9, 10, 11])} + grid = NcSpatialGridDimension(src_idx=src_idx, data='foo') + self.assertIsNone(grid._uid) + sub = grid[1:3, 1] + self.assertNumpyAll(sub._src_idx['col'], np.array([10])) + self.assertNumpyAll(sub._src_idx['row'], np.array([6, 7])) + for k, v in src_idx.iteritems(): + self.assertNumpyAll(grid._src_idx[k], v) + + def test_format_src_idx(self): + ref = NcSpatialGridDimension._format_src_idx_ + value = {'row': np.array([5]), 'col': np.array([6])} + self.assertEqual(value, ref(value)) + + def test_get_uid(self): + src_idx = {'row': np.array([5, 6, 7, 8]), 'col': np.array([9, 10, 11])} + grid = NcSpatialGridDimension(src_idx=src_idx, data='foo') + uid1 = grid._get_uid_() + self.assertEqual(uid1.shape, (4, 3)) + + value = np.ma.array(np.zeros((2, 4, 3))) + grid = NcSpatialGridDimension(value=value) + uid2 = grid._get_uid_() + self.assertEqual(uid2.shape, (4, 3)) + + self.assertNumpyAll(uid1, uid2) + + def test_set_value_from_source(self): + path = self.get_netcdf_path_no_row_column() + rd = RequestDataset(path) + + src_idx = {'row': np.array([0, 1]), 'col': np.array([0])} + grid = NcSpatialGridDimension(data=rd, src_idx=src_idx, name_row='yc', name_col='xc') + self.assertEqual(grid.value.shape, (2, 2, 1)) + with self.nc_scope(path) as ds: + var_row = ds.variables[grid.name_row] + var_col = ds.variables[grid.name_col] + self.assertNumpyAll(var_row[:, 0].reshape(2, 1), grid.value[0].data) + self.assertNumpyAll(var_col[:, 0].reshape(2, 1), grid.value[1].data) + + src_idx = {'row': np.array([0]), 'col': np.array([1])} + grid = NcSpatialGridDimension(data=rd, src_idx=src_idx, name_row='yc', name_col='xc') + self.assertIsNone(grid._value) + self.assertIsNone(grid._corners) + self.assertEqual(grid.value.shape, (2, 1, 1)) + self.assertEqual(grid.corners.shape, (2, 1, 1, 4)) + self.assertEqual(grid.corners_esmf.shape, (2, 2, 2)) + actual = np.ma.array([[[[3.5, 3.5, 4.5, 4.5]]], [[[45.0, 55.0, 55.0, 45.0]]]]) + self.assertNumpyAll(actual, grid.corners) + + def test_shape(self): + src_idx = {'row': np.array([5, 6, 7, 8]), 'col': np.array([9, 10, 11])} + grid = NcSpatialGridDimension(src_idx=src_idx, data='foo') + self.assertEqual(grid.shape, (4, 3)) + self.assertIsNone(grid._value) + + row = VectorDimension(value=[4, 5]) + col = VectorDimension(value=[6, 7, 8]) + grid = NcSpatialGridDimension(row=row, col=col) + self.assertEqual(grid.shape, (2, 3)) + + + def test_validate(self): + with self.assertRaises(ValueError): + NcSpatialGridDimension() + NcSpatialGridDimension(data='foo') + + def test_value(self): + row = VectorDimension(value=[4, 5]) + col = VectorDimension(value=[6, 7, 8]) + grid = NcSpatialGridDimension(row=row, col=col) + self.assertEqual(grid.shape, (2, 3)) + + value = grid.value.copy() + grid = NcSpatialGridDimension(value=value) + self.assertNumpyAll(grid.value, value) \ No newline at end of file diff --git a/src/ocgis/test/test_ocgis/test_interface/test_nc/test_temporal.py b/src/ocgis/test/test_ocgis/test_interface/test_nc/test_temporal.py index d184d727e..586f6658a 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_nc/test_temporal.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_nc/test_temporal.py @@ -1,95 +1,18 @@ -import netCDF4 as nc +from ocgis.interface.nc.dimension import NcVectorDimension +from ocgis.interface.base.dimension.temporal import TemporalDimension, TemporalGroupDimension + from ocgis.test.base import TestBase -from ocgis.interface.nc.temporal import NcTemporalDimension, \ - get_origin_datetime_from_months_units, get_datetime_from_months_time_units, \ - get_difference_in_months, get_num_from_months_time_units -import datetime -import numpy as np +from ocgis.interface.nc.temporal import NcTemporalDimension class TestNcTemporalDimension(TestBase): - def test_360_day_calendar(self): - months = range(1, 13) - days = range(1, 31) - vec = [] - for month in months: - for day in days: - vec.append(nc.netcdftime.datetime(2000, month, day)) - num = nc.date2num(vec, 'days since 1900-01-01', calendar='360_day') - td = NcTemporalDimension(value=num, calendar='360_day', units='days since 1900-01-01') - self.assertNumpyAll(np.array(vec), td.value_datetime) - - def test_get_origin_datetime_from_months_units(self): - units = "months since 1978-12" - self.assertEqual(get_origin_datetime_from_months_units(units), datetime.datetime(1978, 12, 1)) - units = "months since 1979-1-1 0" - self.assertEqual(get_origin_datetime_from_months_units(units), datetime.datetime(1979, 1, 1)) - - def test_get_datetime_from_months_time_units(self): - units = "months since 1978-12" - vec = range(0, 36) - datetimes = get_datetime_from_months_time_units(vec, units) - test_datetimes = [datetime.datetime(1978, 12, 16, 0, 0), datetime.datetime(1979, 1, 16, 0, 0), - datetime.datetime(1979, 2, 16, 0, 0), datetime.datetime(1979, 3, 16, 0, 0), - datetime.datetime(1979, 4, 16, 0, 0), datetime.datetime(1979, 5, 16, 0, 0), - datetime.datetime(1979, 6, 16, 0, 0), datetime.datetime(1979, 7, 16, 0, 0), - datetime.datetime(1979, 8, 16, 0, 0), datetime.datetime(1979, 9, 16, 0, 0), - datetime.datetime(1979, 10, 16, 0, 0), datetime.datetime(1979, 11, 16, 0, 0), - datetime.datetime(1979, 12, 16, 0, 0), datetime.datetime(1980, 1, 16, 0, 0), - datetime.datetime(1980, 2, 16, 0, 0), datetime.datetime(1980, 3, 16, 0, 0), - datetime.datetime(1980, 4, 16, 0, 0), datetime.datetime(1980, 5, 16, 0, 0), - datetime.datetime(1980, 6, 16, 0, 0), datetime.datetime(1980, 7, 16, 0, 0), - datetime.datetime(1980, 8, 16, 0, 0), datetime.datetime(1980, 9, 16, 0, 0), - datetime.datetime(1980, 10, 16, 0, 0), datetime.datetime(1980, 11, 16, 0, 0), - datetime.datetime(1980, 12, 16, 0, 0), datetime.datetime(1981, 1, 16, 0, 0), - datetime.datetime(1981, 2, 16, 0, 0), datetime.datetime(1981, 3, 16, 0, 0), - datetime.datetime(1981, 4, 16, 0, 0), datetime.datetime(1981, 5, 16, 0, 0), - datetime.datetime(1981, 6, 16, 0, 0), datetime.datetime(1981, 7, 16, 0, 0), - datetime.datetime(1981, 8, 16, 0, 0), datetime.datetime(1981, 9, 16, 0, 0), - datetime.datetime(1981, 10, 16, 0, 0), datetime.datetime(1981, 11, 16, 0, 0)] - self.assertNumpyAll(datetimes, np.array(test_datetimes)) - - def test_get_difference_in_months(self): - distance = get_difference_in_months(datetime.datetime(1978, 12, 1), datetime.datetime(1979, 3, 1)) - self.assertEqual(distance, 3) - distance = get_difference_in_months(datetime.datetime(1978, 12, 1), datetime.datetime(1978, 7, 1)) - self.assertEqual(distance, -5) - distance = get_difference_in_months(datetime.datetime(1978, 12, 1), datetime.datetime(1978, 12, 1)) - self.assertEqual(distance, 0) - - def test_get_num_from_months_time_units_1d_array(self): - units = "months since 1978-12" - vec = range(0, 36) - datetimes = get_datetime_from_months_time_units(vec, units) - num = get_num_from_months_time_units(datetimes, units, dtype=np.int32) - self.assertNumpyAll(num, np.array(vec,dtype=np.int32)) - self.assertEqual(num.dtype, np.int32) - - def test_months_in_time_units_are_bad_netcdftime(self): - units = "months since 1978-12" - vec = range(0, 36) - calendar = "standard" - with self.assertRaises(ValueError): - nc.num2date(vec, units, calendar=calendar) - - def test_months_in_time_units(self): - units = "months since 1978-12" - vec = range(0, 36) - datetimes = get_datetime_from_months_time_units(vec, units) - td = NcTemporalDimension(value=vec, units=units, calendar='standard') - self.assertTrue(td._has_months_units) - self.assertNumpyAll(td.value_datetime, datetimes) - def test_months_in_time_units_between(self): - units = "months since 1978-12" - vec = range(0, 36) - datetimes = get_datetime_from_months_time_units(vec, units) - td = NcTemporalDimension(value=vec, units=units, calendar='standard') - ret = td.get_between(datetimes[0], datetimes[3]) - self.assertNumpyAll(ret.value, np.array([0, 1, 2, 3])) + def test_init(self): + ntd = NcTemporalDimension(value=[5]) + self.assertIsInstance(ntd, TemporalDimension) + self.assertIsInstance(ntd, NcVectorDimension) - def test_months_not_in_time_units(self): - units = "days since 1900-01-01" - value = np.array([31]) - td = NcTemporalDimension(value=value, units=units, calendar='standard') - self.assertFalse(td._has_months_units) + def test_get_grouping(self): + ntd = NcTemporalDimension(value=[5000., 5001.]) + tgd = ntd.get_grouping(['month']) + self.assertIsInstance(tgd, TemporalGroupDimension) diff --git a/src/ocgis/test/test_ocgis/test_regrid/test_base.py b/src/ocgis/test/test_ocgis/test_regrid/test_base.py index c3209f85c..72f0db88f 100644 --- a/src/ocgis/test/test_ocgis/test_regrid/test_base.py +++ b/src/ocgis/test/test_ocgis/test_regrid/test_base.py @@ -1,18 +1,26 @@ -from copy import deepcopy, copy +from copy import deepcopy +from unittest import SkipTest +import itertools + import ESMF from shapely.geometry import Polygon, MultiPolygon +import numpy as np + +from ocgis.conv.esmpy import ESMPyConverter +from ocgis.api.collection import SpatialCollection +from ocgis.interface.base.dimension.temporal import TemporalDimension +from ocgis.interface.base.dimension.base import VectorDimension import ocgis -from ocgis.exc import RegriddingError, CornersInconsistentError +from ocgis.exc import RegriddingError, CornersInconsistentError, CannotFormatTimeError from ocgis.interface.base.crs import CoordinateReferenceSystem, WGS84, Spherical from ocgis.interface.base.dimension.spatial import SpatialGridDimension, SpatialDimension from ocgis.interface.base.field import Field -from ocgis.interface.base.variable import VariableCollection +from ocgis.interface.base.variable import VariableCollection, Variable from ocgis.regrid.base import check_fields_for_regridding, iter_regridded_fields, get_esmf_grid_from_sdim, \ - iter_esmf_fields, get_sdim_from_esmf_grid + iter_esmf_fields, get_sdim_from_esmf_grid, get_ocgis_field_from_esmpy_field from ocgis.test.test_simple.make_test_data import SimpleNc from ocgis.test.test_simple.test_simple import TestSimpleBase -import numpy as np -from ocgis.util.helpers import iter_array, make_poly +from ocgis.util.helpers import make_poly from ocgis.util.itester import itr_products_keywords @@ -70,7 +78,7 @@ def get_coords(mpoly): mpoly_updated = deepcopy(odd.spatial.geom.polygon.value[0, 0]) mpoly_updated_coords = get_coords(mpoly_updated) - import ipdb;ipdb.set_trace() + raise def atest_to_spherical(self): rd = self.test_data.get_rd('cancm4_tas') @@ -100,7 +108,7 @@ def atest_to_spherical(self): # grid_new = field.spatial.grid.value.copy() field.spatial.write_fiona('/tmp/wgs84.shp', target=target) # diff = np.abs(grid_original[0].data - grid_new[0].data).mean() - import ipdb;ipdb.set_trace() + raise def test_check_fields_for_regridding(self): @@ -411,7 +419,8 @@ def test_get_sdim_from_esmf_grid(self): rd = ocgis.RequestDataset(**self.get_dataset()) keywords = dict(has_corners=[True, False], - has_mask=[True, False]) + has_mask=[True, False], + crs=[None, CoordinateReferenceSystem(epsg=4326)]) for k in itr_products_keywords(keywords, as_namedtuple=True): field = rd.get() @@ -432,7 +441,8 @@ def test_get_sdim_from_esmf_grid(self): egrid.coords[ESMF.StaggerLoc.CORNER] = [np.array(0.0), np.array(0.0)] egrid.coords_done[ESMF.StaggerLoc.CORNER] = [False, False] - nsdim = get_sdim_from_esmf_grid(egrid) + nsdim = get_sdim_from_esmf_grid(egrid, crs=k.crs) + self.assertEqual(nsdim.crs, k.crs) self.assertNumpyAll(sdim.grid.value, nsdim.grid.value) if k.has_corners: @@ -608,6 +618,125 @@ def test_get_esmf_grid_from_sdim_value_mask(self): egrid = get_esmf_grid_from_sdim(field.spatial, value_mask=value_mask) self.assertNumpyAll(egrid.mask[0], np.invert(value_mask.astype(bool)).astype(egrid.mask[0].dtype)) + def test_get_ocgis_field_from_esmpy_field(self): + raise SkipTest + #todo: return spherical crs if none is passed. check something on the grid + np.random.seed(1) + temporal = TemporalDimension(value=[3000., 4000., 5000.]) + level = VectorDimension(value=[10, 20, 30, 40]) + realization = VectorDimension(value=[100, 200]) + + kwds = dict(crs=[None, CoordinateReferenceSystem(epsg=4326), Spherical()], + with_mask=[False, True], + with_corners=[False, True], + dimensions=[False, True], + drealization=[False, True], + dtemporal=[False, True], + dlevel=[False, True]) + + for k in self.iter_product_keywords(kwds): + row = VectorDimension(value=[1., 2.]) + col = VectorDimension(value=[3., 4.]) + if k.with_corners: + row.set_extrapolated_bounds() + col.set_extrapolated_bounds() + + value_tmin = np.random.rand(2, 3, 4, 2, 2) + tmin = Variable(value=value_tmin, name='tmin') + variables = VariableCollection([tmin]) + grid = SpatialGridDimension(row=row, col=col) + sdim = SpatialDimension(grid=grid, crs=k.crs) + field = Field(variables=variables, spatial=sdim, temporal=temporal, level=level, realization=realization) + if k.with_mask: + mask = np.zeros(value_tmin.shape[-2:], dtype=bool) + mask[0, 1] = True + field._set_new_value_mask_(field, mask) + sdim.set_mask(mask) + self.assertTrue(tmin.value.mask.any()) + self.assertTrue(sdim.get_mask().any()) + else: + self.assertFalse(tmin.value.mask.any()) + self.assertFalse(sdim.get_mask().any()) + coll = SpatialCollection() + coll[1] = {field.name: field} + conv = ESMPyConverter([coll]) + efield = conv.write() + + if k.dimensions: + dimensions = {} + if k.drealization: + dimensions['realization'] = realization + if k.dtemporal: + dimensions['temporal'] = temporal + if k.dlevel: + dimensions['level'] = level + else: + dimensions = None + + ofield = get_ocgis_field_from_esmpy_field(efield, crs=k.crs, dimensions=dimensions) + + self.assertIsInstance(ofield, Field) + self.assertEqual(ofield.shape, efield.shape) + + if k.drealization and k.dimensions: + target = realization.value + else: + target = np.array([1, 2]) + self.assertNumpyAll(ofield.realization.value, target) + + if k.dtemporal and k.dimensions: + target = temporal.value + else: + target = np.array([1, 1, 1]) + with self.assertRaises(CannotFormatTimeError): + ofield.temporal.value_datetime + self.assertFalse(ofield.temporal.format_time) + self.assertNumpyAll(ofield.temporal.value, target) + + if k.dlevel and k.dimensions: + target = level.value + else: + target = np.array([1, 2, 3, 4]) + self.assertNumpyAll(ofield.level.value, target) + + self.assertNumpyAll(field.spatial.grid.value, ofield.spatial.grid.value) + if k.with_corners: + self.assertIsNotNone(ofield.spatial.grid.corners) + self.assertNumpyAll(field.spatial.grid.corners, ofield.spatial.grid.corners) + + self.assertEqual(ofield.spatial.crs, sdim.crs) + + ofield_tmin_value = ofield.variables[efield.name].value + for arr1, arr2 in itertools.combinations([tmin.value, efield, ofield_tmin_value], r=2): + self.assertNumpyAll(arr1, arr2, check_arr_type=False) + + rows = list(ofield.get_iter()) + try: + self.assertEqual(len(rows), len(value_tmin.flatten())) + except AssertionError: + self.assertTrue(k.with_mask) + self.assertEqual(len(rows), len(tmin.value.compressed())) + + self.assertTrue(np.may_share_memory(ofield_tmin_value, efield)) + self.assertFalse(np.may_share_memory(ofield_tmin_value, tmin.value)) + + def test_get_ocgis_field_from_esmpy_spatial_only(self): + """Test with spatial information only.""" + raise SkipTest + row = VectorDimension(value=[5, 6]) + col = VectorDimension(value=[7, 8]) + grid = SpatialGridDimension(row=row, col=col) + sdim = SpatialDimension(grid=grid) + field = Field(spatial=sdim) + value = np.random.rand(*field.shape) + variable = Variable(value=value, name='foo') + field.variables.add_variable(variable) + efield = self.get_esmf_field(field=field) + self.assertIsInstance(efield, ESMF.Field) + ofield = get_ocgis_field_from_esmpy_field(efield) + for attr in ['realization', 'temporal', 'level']: + self.assertIsNone(getattr(ofield, attr)) + def test_get_esmf_grid_from_sdim_with_corners(self): """Test with the with_corners option set to False.""" diff --git a/src/ocgis/test/test_ocgis/test_util/test_helpers.py b/src/ocgis/test/test_ocgis/test_util/test_helpers.py index 2345b8ac6..6be1406b8 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_helpers.py +++ b/src/ocgis/test/test_ocgis/test_util/test_helpers.py @@ -1,17 +1,19 @@ from collections import OrderedDict import os +import itertools +from datetime import datetime as dt, datetime + import numpy as np from shapely.geometry import Point + from ocgis.exc import SingleElementError, ShapeError from ocgis.test.test_ocgis.test_interface.test_base.test_dimension.test_spatial import AbstractTestSpatialDimension from ocgis.util.helpers import format_bool, iter_array, validate_time_subset,\ get_formatted_slice, get_is_date_between, get_trimmed_array_by_mask,\ get_added_slice, get_iter, get_ordered_dicts_from_records_array, get_sorted_uris_by_time_dimension, \ get_bounds_from_1d, get_date_list, get_bounds_vector_from_centroids, get_extrapolated_corners_esmf, get_is_increasing, \ - get_extrapolated_corners_esmf_vector -import itertools + get_extrapolated_corners_esmf_vector, set_name_attributes, get_ocgis_corners_from_esmf_corners from ocgis.test.base import TestBase -from datetime import datetime as dt, datetime class Test1(AbstractTestSpatialDimension): @@ -196,9 +198,50 @@ def test_get_bounds_vector_from_centroids(self): ret = get_bounds_vector_from_centroids(centroids) self.assertNumpyAll(ret, np.array([3.5, 0.5, -2.5])) + def test_get_ocgis_corners_from_esmf_corners(self): + sdim = self.get_sdim() + ecorners = sdim.grid.corners_esmf + ocorners = get_ocgis_corners_from_esmf_corners(ecorners) + self.assertNumpyAll(ocorners, sdim.grid.corners) + + sdim = self.get_sdim()[0, 0] + self.assertEqual(sdim.shape, (1, 1)) + ecorners = sdim.grid.corners_esmf + ocorners = get_ocgis_corners_from_esmf_corners(ecorners) + self.assertNumpyAll(ocorners, sdim.grid.corners) + class Test2(TestBase): + def test_get_iter(self): + element = 'hi' + ret = list(get_iter(element)) + self.assertEqual(ret, ['hi']) + + element = np.array([5, 6, 7]) + ret = list(get_iter(element)) + self.assertNumpyAll(ret[0], np.array([5, 6, 7])) + + ## test dtype ################################################################################################## + + class FooIterable(object): + + def __init__(self): + self.value = [4, 5, 6] + + def __iter__(self): + for element in self.value: + yield element + + element = FooIterable() + ret = list(get_iter(element)) + self.assertEqual(ret, [4, 5, 6]) + for dtype in FooIterable, (FooIterable, list): + ret = list(get_iter(element, dtype=dtype)) + self.assertIsInstance(ret, list) + self.assertEqual(len(ret), 1) + self.assertIsInstance(ret[0], FooIterable) + def test_get_sorted_uris_by_time_dimension(self): rd_2001 = self.test_data.get_rd('cancm4_tasmax_2001') rd_2011 = self.test_data.get_rd('cancm4_tasmax_2011') @@ -350,7 +393,8 @@ def test_get_is_date_between(self): upper = dt(2013, 1, 2, 0, 0) self.assertTrue(get_is_date_between(lower,upper,year=2013)) - def test_get_formatted_slc(self): + def test_get_formatted_slice(self): + ret = get_formatted_slice(slice(None,None,None),10) self.assertEqual(ret,[slice(None,None,None)]*10) @@ -373,6 +417,22 @@ def test_get_formatted_slc(self): ret = get_formatted_slice((1,),1) self.assertEqual(ret,slice(1)) + def test_set_name_attributes(self): + + class Foo(object): + + def __init__(self, name): + self.name = name + + a = Foo(None) + b = Foo('harbringer') + + name_mapping = {a: 'evil_twin', b: 'again', None: 'whatever'} + set_name_attributes(name_mapping) + + self.assertEqual(a.name, 'evil_twin') + self.assertEqual(b.name, 'harbringer') + def test_validate_time_subset(self): time_range = [dt(2000,1,1),dt(2001,1,1)] self.assertTrue(validate_time_subset(time_range,{'year':[2000,2001]})) diff --git a/src/ocgis/test/test_ocgis/test_util/test_inspect.py b/src/ocgis/test/test_ocgis/test_util/test_inspect.py index 77cf3ec25..adc40e017 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_inspect.py +++ b/src/ocgis/test/test_ocgis/test_util/test_inspect.py @@ -5,8 +5,9 @@ import ocgis from ocgis.exc import RequestValidationError from ocgis.interface.metadata import NcMetadata +from ocgis.test.base import nc_scope from ocgis.test.test_simple.make_test_data import SimpleNc -from ocgis.test.test_simple.test_simple import nc_scope, TestSimpleBase +from ocgis.test.test_simple.test_simple import TestSimpleBase from ocgis import Inspect, RequestDataset import numpy as np from ocgis.util.itester import itr_products_keywords diff --git a/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py b/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py index f14070396..b9b1af73b 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py +++ b/src/ocgis/test/test_ocgis/test_util/test_spatial/test_spatial_subset.py @@ -188,7 +188,6 @@ def test_get_spatial_subset(self): # if ctr != 73: # continue # else: - # import ipdb;ipdb.set_trace() try: ret = use_ss.get_spatial_subset(operation, use_subset_sdim, use_spatial_index=True, diff --git a/src/ocgis/test/test_real_data/test_combinatorial.py b/src/ocgis/test/test_real_data/test_combinatorial.py new file mode 100644 index 000000000..65c79ada2 --- /dev/null +++ b/src/ocgis/test/test_real_data/test_combinatorial.py @@ -0,0 +1,75 @@ +import os +import shutil +from ocgis.test.test_base import longrunning +from ocgis import OcgOperations, RequestDataset +from ocgis.test.base import TestBase + + +class TestCombinatorial(TestBase): + + def iter_dataset(self): + for as_request_dataset in [True, False]: + for k in self.test_data.iterkeys(): + kwds = {} + if k == 'cmip3_extraction': + dimension_map = {'R': 'projection', 'T': 'time', 'Y': 'latitude', 'X': 'longitude'} + kwds['dimension_map'] = dimension_map + rd = self.test_data.get_rd(k, kwds=kwds) + if as_request_dataset: + yield k, rd + else: + yield k, rd.get() + + @longrunning + def test(self): + import logbook + + log = logbook.Logger(name='combos', level=logbook.INFO) + + for key, dataset in self.iter_dataset(): + + # if key != 'qed_2013_TNn_annual_min': continue + + # these datasets have only one time element + if key in ('qed_2013_TNn_annual_min', + 'qed_2013_TasMin_seasonal_max_of_seasonal_means', + 'qed_2013_climatology_Tas_annual_max_of_annual_means', + 'qed_2013_maurer02v2_median_txxmmedm_january_1971-2000', + 'qed_2013_maurer02v2_median_txxmmedm_february_1971-2000', + 'qed_2013_maurer02v2_median_txxmmedm_march_1971-2000', + 'snippet_maurer_dtr', + 'snippet_seasonalbias'): + slc = None + else: + slc = [None, [10, 20], None, None, None] + + # this has different data types on the bounds for the coordinate variables. they currently get casted by the + # software. + if key == 'maurer_bcca_1991': + check_types = False + else: + check_types = True + + log.debug('processing: {0} ({1})'.format(key, dataset.__class__.__name__)) + ops = OcgOperations(dataset=dataset, output_format='nc', prefix='nc1', slice=slc) + try: + log.debug('initial write...') + ret1 = ops.execute() + except ValueError: + # realization dimensions may not be written to netCDF yet + if key == 'cmip3_extraction': + continue + else: + raise + else: + try: + ops2 = OcgOperations(dataset={'uri': ret1}, output_format='nc', prefix='nc2') + log.debug('second write...') + ret2 = ops2.execute() + log.debug('comparing...') + self.assertNcEqual(ret1, ret2, ignore_attributes={'global': ['history']}, check_types=check_types) + finally: + for path in [ret1, ret2]: + folder = os.path.split(path)[0] + shutil.rmtree(folder) + log.debug('success') diff --git a/src/ocgis/test/test_real_data/test_narccap.py b/src/ocgis/test/test_real_data/test_narccap.py index 06be8a344..10ec7bfc1 100644 --- a/src/ocgis/test/test_real_data/test_narccap.py +++ b/src/ocgis/test/test_real_data/test_narccap.py @@ -1,12 +1,11 @@ import unittest -from ocgis.test.base import TestBase +from ocgis.test.base import TestBase, nc_scope import os from ocgis.api.request.base import RequestDataset import ocgis from ocgis.api.operations import OcgOperations import numpy as np from ocgis.exc import DefinitionValidationError, ExtentError -from ocgis.test.test_simple.test_simple import nc_scope from ocgis.test.test_base import longrunning from ocgis.interface.base.crs import CFRotatedPole, CFWGS84 @@ -78,21 +77,21 @@ def test_to_netcdf(self): def test_to_netcdf_with_geometry(self): rd = self.test_data.get_rd('narccap_rotated_pole') - ## this bounding box covers the entire spatial domain. the software will - ## move between rotated pole and CFWGS84 using this operation. it can then - ## be compared against the "null" result which just does a snippet. - geom = [-173.3,8.8,-20.6,79.0] - ops = OcgOperations(dataset=rd,output_format='nc',snippet=True,geom=geom) + # this bounding box covers the entire spatial domain. the software will move between rotated pole and CFWGS84 + # using this operation. it can then be compared against the "null" result which just does a snippet. + geom = [-173.3, 8.8, -20.6, 79.0] + ops = OcgOperations(dataset=rd, output_format='nc', snippet=True, geom=geom) ret = ops.execute() - ops2 = OcgOperations(dataset=rd,output_format='nc',snippet=True,prefix='hi') + ops2 = OcgOperations(dataset=rd, output_format='nc', snippet=True, prefix='hi') ret2 = ops2.execute() - self.assertNcEqual(ret,ret2,metadata_only=True,ignore_attributes={'global': ['history']}) + self.assertNcEqual(ret, ret2, metadata_only=True, ignore_attributes={'global': ['history']}) + with nc_scope(ret) as ds: with nc_scope(ret2) as ds2: - for var_name in ['yc','xc','tas']: + for var_name in ['yc', 'xc', 'tas']: var = ds.variables[var_name][:] var2 = ds2.variables[var_name][:] - diff = np.abs(var-var2) + diff = np.abs(var - var2) self.assertTrue(diff.max() <= 1.02734374963e-06) def test_to_netcdf_with_slice(self): diff --git a/src/ocgis/test/test_real_data/test_random_datasets.py b/src/ocgis/test/test_real_data/test_random_datasets.py index 79d711f98..2ceb91478 100644 --- a/src/ocgis/test/test_real_data/test_random_datasets.py +++ b/src/ocgis/test/test_real_data/test_random_datasets.py @@ -1,6 +1,8 @@ +from netCDF4 import date2num +from ocgis.util.inspect import Inspect import ocgis from ocgis.calc.library.index.dynamic_kernel_percentile import DynamicDailyKernelPercentileThreshold -from ocgis.test.base import TestBase +from ocgis.test.base import TestBase, nc_scope import itertools from ocgis.api.operations import OcgOperations from datetime import datetime as dt @@ -11,7 +13,6 @@ import fiona from csv import DictReader from ocgis.api.request.base import RequestDataset -from ocgis.test.test_simple.test_simple import nc_scope from copy import deepcopy from ocgis.test.test_base import longrunning from shapely.geometry.point import Point @@ -195,35 +196,40 @@ def test_empty_subset_multi_geometry_wrapping(self): self.assertEqual(set(ret.keys()),set([5,6,7])) def test_seasonal_calc(self): - calc = [{'func':'mean','name':'my_mean'},{'func':'std','name':'my_std'}] - calc_grouping = [[3,4,5]] + """Test some calculations using a seasonal grouping.""" + + calc = [{'func': 'mean', 'name': 'my_mean'}, {'func': 'std', 'name': 'my_std'}] + calc_grouping = [[3, 4, 5]] rd = self.test_data.get_rd('cancm4_tas') - ops = ocgis.OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, - calc_sample_size=True,geom='state_boundaries', - select_ugid=[23]) + ops = ocgis.OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping, calc_sample_size=True, + geom='state_boundaries', select_ugid=[23]) ret = ops.execute() - self.assertEqual(ret[23]['tas'].variables['n_my_std'].value.mean(),920.0) - self.assertEqual(ret[23]['tas'].variables['my_std'].value.shape,(1,1,1,4,3)) + self.assertEqual(ret[23]['tas'].variables['n_my_std'].value.mean(), 920.0) + self.assertEqual(ret[23]['tas'].variables['my_std'].value.shape, (1, 1, 1, 4, 3)) - calc = [{'func':'mean','name':'my_mean'},{'func':'std','name':'my_std'}] - calc_grouping = [[12,1,2],[3,4,5],[6,7,8],[9,10,11]] + calc = [{'func': 'mean', 'name': 'my_mean'}, {'func': 'std', 'name': 'my_std'}] + calc_grouping = [[12, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11]] rd = self.test_data.get_rd('cancm4_tas') - ops = ocgis.OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, - calc_sample_size=True,geom='state_boundaries', - select_ugid=[23]) + ops = ocgis.OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping, calc_sample_size=True, + geom='state_boundaries', select_ugid=[23]) ret = ops.execute() - self.assertEqual(ret[23]['tas'].variables['my_std'].value.shape,(1,4,1,4,3)) - self.assertNumpyAll(ret[23]['tas'].temporal.value,np.array([ 56955., 56680., 56771., 56863.])) - - calc = [{'func':'mean','name':'my_mean'},{'func':'std','name':'my_std'}] - calc_grouping = [[12,1],[2,3]] + self.assertEqual(ret[23]['tas'].variables['my_std'].value.shape, (1, 4, 1, 4, 3)) + temporal = ret[23]['tas'].temporal + numtime = temporal.value_numtime + numtime_actual = np.array([56993., 56718., 56809., 56901.]) + self.assertNumpyAll(numtime, numtime_actual) + + calc = [{'func': 'mean', 'name': 'my_mean'}, {'func': 'std', 'name': 'my_std'}] + calc_grouping = [[12, 1], [2, 3]] rd = self.test_data.get_rd('cancm4_tas') - ops = ocgis.OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, - calc_sample_size=True,geom='state_boundaries', - select_ugid=[23]) + ops = ocgis.OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping, calc_sample_size=True, + geom='state_boundaries', select_ugid=[23]) ret = ops.execute() - self.assertEqual(ret[23]['tas'].variables['my_std'].value.shape,(1,2,1,4,3)) - self.assertNumpyAll(ret[23]['tas'].temporal.bounds,np.array([[ 55115., 58765.],[ 55146., 58490.]])) + self.assertEqual(ret[23]['tas'].variables['my_std'].value.shape, (1, 2, 1, 4, 3)) + temporal = ret[23]['tas'].temporal + bounds_numtime = temporal.bounds_numtime + bounds_numtime_actual = np.array([[55152.0, 58804.0], [55183.0, 58529.0]]) + self.assertNumpyAll(bounds_numtime, bounds_numtime_actual) def test_seasonal_calc_dkp(self): key = 'dynamic_kernel_percentile_threshold' @@ -494,7 +500,10 @@ def test_bad_time_dimension(self): self.assertTrue(float(row['TIME']) < -50000) if output_format == 'nc': - self.assertNcEqual(dataset.uri, ret, check_types=False, ignore_attributes={'global': ['history']}) + self.assertNcEqual(ret, dataset.uri, check_types=False, + ignore_attributes={'global': ['history'], 'bounds_time': ['calendar', 'units'], + 'bias': ['_FillValue', 'grid_mapping', 'units']}, + ignore_variables=['latitude_longitude']) def test_time_region_climatology(self): """Test for reading metadata from QED 2013 climate data files.""" diff --git a/src/ocgis/test/test_simple/make_test_data.py b/src/ocgis/test/test_simple/make_test_data.py index be9385833..cd6f4b9d1 100644 --- a/src/ocgis/test/test_simple/make_test_data.py +++ b/src/ocgis/test/test_simple/make_test_data.py @@ -124,9 +124,12 @@ def write(self): bound = rootgrp.createDimension('bound',size=2) ## create the variables times = rootgrp.createVariable(TIME['name'],'f8',('time',)) + times.axis = 'T' bounds_times = rootgrp.createVariable('time_bnds','f8',('time','bound')) cols = rootgrp.createVariable('longitude','f8',('lon',)) + cols.axis = 'X' rows = rootgrp.createVariable('latitude','f8',('lat',)) + rows.axis = 'Y' bounds_col = rootgrp.createVariable(SPACE['col_bnds'],'f8',('lon','bound')) bounds_row = rootgrp.createVariable(SPACE['row_bnds'],'f8',('lat','bound')) value = rootgrp.createVariable(VAR,'f8',('time','lat','lon'),fill_value=1e20) @@ -381,12 +384,16 @@ def write(self): bound = rootgrp.createDimension('bound',size=2) ## create the variables times = rootgrp.createVariable(TIME['name'],'f8',('time',)) + times.axis = 'T' bounds_times = rootgrp.createVariable('time_bnds','f8',('time','bound')) levels = rootgrp.createVariable(LEVEL['name'],'i4',('level',)) + levels.axis = 'Z' bounds_levels = rootgrp.createVariable('level_bnds','i4',('level','bound')) cols = rootgrp.createVariable('longitude','f8',('lon',)) + cols.axis = 'X' cols.standard_name = 'projection_x_coordinate' rows = rootgrp.createVariable('latitude','f8',('lat',)) + rows.axis = 'Y' rows.standard_name = 'projection_y_coordinate' bounds_col = rootgrp.createVariable(SPACE['col_bnds'],'f8',('lon','bound')) bounds_row = rootgrp.createVariable(SPACE['row_bnds'],'f8',('lat','bound')) @@ -409,7 +416,7 @@ def write(self): value.units = 'huge' value.grid_mapping = 'crs' - grid_mapping = rootgrp.createVariable('crs','u1') + grid_mapping = rootgrp.createVariable('crs','c') grid_mapping.grid_mapping_name = "lambert_conformal_conic" grid_mapping.standard_parallel = [30., 60.] grid_mapping.longitude_of_central_meridian = -97. diff --git a/src/ocgis/test/test_simple/test_dependencies.py b/src/ocgis/test/test_simple/test_dependencies.py index 0fc4f0282..fc65316e5 100644 --- a/src/ocgis/test/test_simple/test_dependencies.py +++ b/src/ocgis/test/test_simple/test_dependencies.py @@ -1,9 +1,21 @@ +import os from ocgis import CoordinateReferenceSystem from ocgis.test.base import TestBase class TestDependencies(TestBase): + def test_netCDF4(self): + path = os.path.join(self.current_dir_output, 'foo.nc') + with self.nc_scope(path, 'w') as ds: + ds.createDimension('a', 1) + ds.createDimension('b', 1) + var = ds.createVariable('foo', int, dimensions=('a', 'b')) + var[:] = 5 + with self.nc_scope(path) as ds: + var = ds.variables['foo'] + self.assertEqual(var.shape, (1, 1)) + def test_osr(self): crs = CoordinateReferenceSystem(epsg=4326) self.assertNotEqual(crs.value, {}) diff --git a/src/ocgis/test/test_simple/test_optional_dependencies.py b/src/ocgis/test/test_simple/test_optional_dependencies.py index 87f226844..4a4ec16cc 100644 --- a/src/ocgis/test/test_simple/test_optional_dependencies.py +++ b/src/ocgis/test/test_simple/test_optional_dependencies.py @@ -21,8 +21,10 @@ def test_esmf(self): ops = OcgOperations(dataset=rd1, regrid_destination=rd2, output_format='nc') ret = ops.execute() ignore_attributes = {'time_bnds': ['units', 'calendar'], - 'global': ['history']} - self.assertNcEqual(ret, rd1.uri, ignore_attributes=ignore_attributes) + 'global': ['history'], + 'foo': ['grid_mapping']} + ignore_variables = ['latitude_longitude'] + self.assertNcEqual(ret, rd1.uri, ignore_attributes=ignore_attributes, ignore_variables=ignore_variables) def test_rtree(self): from ocgis.util.spatial.index import SpatialIndex diff --git a/src/ocgis/test/test_simple/test_simple.py b/src/ocgis/test/test_simple/test_simple.py index b8249af35..a8d3b7bc9 100644 --- a/src/ocgis/test/test_simple/test_simple.py +++ b/src/ocgis/test/test_simple/test_simple.py @@ -1,38 +1,39 @@ import re import unittest -from fiona.crs import from_string -from osgeo.osr import SpatialReference -from ocgis.api.operations import OcgOperations -from ocgis.api.interpreter import OcgInterpreter import itertools -import numpy as np import datetime -from ocgis.api.parms.definition import SpatialOperation -from ocgis.util.helpers import make_poly, project_shapely_geometry -from ocgis import exc, env, constants import os.path from abc import ABCMeta, abstractproperty import netCDF4 as nc -from ocgis.test.base import TestBase +import csv +from collections import OrderedDict +from copy import deepcopy +from csv import DictReader +import tempfile + +from fiona.crs import from_string +from osgeo.osr import SpatialReference +import numpy as np from shapely.geometry.point import Point -import ocgis -from ocgis.exc import ExtentError, DefinitionValidationError from shapely.geometry.polygon import Polygon -import csv import fiona -from collections import OrderedDict -from ocgis.interface.base import crs from shapely.geometry.geo import mapping from shapely import wkt + +from ocgis.api.operations import OcgOperations +from ocgis.api.interpreter import OcgInterpreter +from ocgis.api.parms.definition import SpatialOperation +from ocgis.util.helpers import make_poly, project_shapely_geometry +from ocgis import exc, env, constants +from ocgis.test.base import TestBase, nc_scope +import ocgis +from ocgis.exc import ExtentError, DefinitionValidationError +from ocgis.interface.base import crs from ocgis.interface.base.crs import CoordinateReferenceSystem, WGS84, CFWGS84, WrappableCoordinateReferenceSystem from ocgis.api.request.base import RequestDataset, RequestDatasetCollection -from copy import deepcopy -from contextlib import contextmanager from ocgis.test.test_simple.make_test_data import SimpleNcNoLevel, SimpleNc, SimpleNcNoBounds, SimpleMaskNc, \ SimpleNc360, SimpleNcProjection, SimpleNcNoSpatialBounds, SimpleNcMultivariate -from csv import DictReader from ocgis.test.test_base import longrunning -import tempfile from ocgis.api.parms.definition import OutputFormat from ocgis.interface.base.field import DerivedMultivariateField from ocgis.util.itester import itr_products_keywords @@ -40,29 +41,6 @@ from ocgis.util.spatial.fiona_maker import FionaMaker -@contextmanager -def nc_scope(path, mode='r'): - """ - Provide a transactional scope around a :class:`netCDF4.Dataset` object. - - >>> with nc_scope('/my/file.nc') as ds: - >>> print ds.variables - - :param str path: The full path to the netCDF dataset. - :param str mode: The file mode to use when opening the dataset. - :returns: An open dataset object that will be closed after leaving the ``with statement``. - :rtype: :class:`netCDF4.Dataset` - """ - - ds = nc.Dataset(path, mode=mode) - try: - yield ds - except: - raise - finally: - ds.close() - - class TestSimpleBase(TestBase): __metaclass__ = ABCMeta @@ -292,13 +270,13 @@ def test_units_calendar_on_time_bounds(self): """Test units and calendar are copied to the time bounds.""" rd = self.get_dataset() - ops = ocgis.OcgOperations(dataset=rd,output_format='nc') + ops = ocgis.OcgOperations(dataset=rd, output_format='nc') ret = ops.execute() with nc_scope(ret) as ds: time_attrs = deepcopy(ds.variables['time'].__dict__) time_attrs.pop('bounds') - self.assertEqual(dict(time_attrs), - dict(ds.variables['time_bnds'].__dict__)) + time_attrs.pop('axis') + self.assertEqual(dict(time_attrs), dict(ds.variables['time_bnds'].__dict__)) def test_units_calendar_on_time_bounds_calculation(self): rd = self.get_dataset() @@ -411,24 +389,24 @@ def test_slicing(self): ## pass only three slices with self.assertRaises(DefinitionValidationError): self.get_ops(kwds={'slice':[None,[1,3],[1,3]]}) - + def test_file_only(self): - ret = self.get_ret(kwds={'output_format':'nc','file_only':True, - 'calc':[{'func':'mean','name':'my_mean'}], - 'calc_grouping':['month']}) + ret = self.get_ret( + kwds={'output_format': 'nc', 'file_only': True, 'calc': [{'func': 'mean', 'name': 'my_mean'}], + 'calc_grouping': ['month']}) try: - ds = nc.Dataset(ret,'r') - self.assertTrue(isinstance(ds.variables['my_mean'][:].sum(), - np.ma.core.MaskedConstant)) - self.assertEqual(set(ds.variables['my_mean'].ncattrs()),set([u'_FillValue', u'units', u'long_name', u'standard_name'])) + ds = nc.Dataset(ret, 'r') + self.assertTrue(isinstance(ds.variables['my_mean'][:].sum(), np.ma.core.MaskedConstant)) + self.assertEqual(set(ds.variables['my_mean'].ncattrs()), + set([u'_FillValue', u'units', u'long_name', u'standard_name', 'grid_mapping'])) finally: ds.close() - + with self.assertRaises(DefinitionValidationError): - self.get_ret(kwds={'file_only':True,'output_format':'shp'}) - + self.get_ret(kwds={'file_only': True, 'output_format': 'shp'}) + with self.assertRaises(DefinitionValidationError): - self.get_ret(kwds={'file_only':True}) + self.get_ret(kwds={'file_only': True}) def test_return_all(self): ret = self.get_ret() @@ -595,26 +573,26 @@ def test_snippet_time_region(self): with self.assertRaises(DefinitionValidationError): rd = self.get_dataset(time_region={'month':[1]}) OcgOperations(dataset=rd,snippet=True) - + def test_calc(self): - calc = [{'func':'mean','name':'my_mean'}] - group = ['month','year'] - - ## raw - ret = self.get_ret(kwds={'calc':calc,'calc_grouping':group}) - ref = ret.gvu(1,'my_mean') - self.assertEqual(ref.shape,(1,2,2,4,4)) + calc = [{'func': 'mean', 'name': 'my_mean'}] + group = ['month', 'year'] + + # raw + ret = self.get_ret(kwds={'calc': calc, 'calc_grouping': group}) + ref = ret.gvu(1, 'my_mean') + self.assertEqual(ref.shape, (1, 2, 2, 4, 4)) with self.assertRaises(KeyError): - ret.gvu(1,'n') + ret.gvu(1, 'n') - ## aggregated - for calc_raw in [True,False]: - ret = self.get_ret(kwds={'calc':calc,'calc_grouping':group, - 'aggregate':True,'calc_raw':calc_raw}) - ref = ret.gvu(1,'my_mean') - self.assertEqual(ref.shape,(1,2,2,1,1)) - self.assertEqual(ref.flatten().mean(),2.5) - self.assertDictEqual(ret[1]['foo'].variables['my_mean'].meta['attrs'], {'long_name': 'Mean', 'standard_name': 'mean'}) + # aggregated + for calc_raw in [True, False]: + ret = self.get_ret(kwds={'calc': calc, 'calc_grouping': group, 'aggregate': True, 'calc_raw': calc_raw}) + ref = ret.gvu(1, 'my_mean') + self.assertEqual(ref.shape, (1, 2, 2, 1, 1)) + self.assertEqual(ref.flatten().mean(), 2.5) + self.assertDictEqual(ret[1]['foo'].variables['my_mean'].attrs, + {'long_name': 'Mean', 'standard_name': 'mean'}) def test_calc_multivariate(self): rd1 = self.get_dataset() @@ -643,7 +621,7 @@ def test_calc_eval(self): if of == 'nc': with nc_scope(ret) as ds: self.assertEqual(ds.variables['foo2'][:].mean(),6.5) - + def test_calc_eval_multivariate(self): rd = self.get_dataset() rd2 = self.get_dataset() @@ -651,14 +629,18 @@ def test_calc_eval_multivariate(self): calc = 'foo3=foo+foo2+4' ocgis.env.OVERWRITE = True for of in OutputFormat.iter_possible(): - ops = ocgis.OcgOperations(dataset=[rd,rd2],calc=calc,output_format=of, - slice=[None,[0,10],None,None,None]) + try: + ops = ocgis.OcgOperations(dataset=[rd, rd2], calc=calc, output_format=of, + slice=[None, [0, 10], None, None, None]) + except DefinitionValidationError: + self.assertEqual(of, 'esmpy') + continue ret = ops.execute() if of == 'numpy': - self.assertIsInstance(ret[1]['foo_foo2'],DerivedMultivariateField) + self.assertIsInstance(ret[1]['foo_foo2'], DerivedMultivariateField) if of == 'nc': with nc_scope(ret) as ds: - self.assertEqual(ds.variables['foo3'][:].mean(),9.0) + self.assertEqual(ds.variables['foo3'][:].mean(), 9.0) @longrunning def test_calc_sample_size(self): @@ -749,10 +731,23 @@ def test_nc_conversion(self): ops = OcgOperations(dataset=rd, output_format='nc') ret = self.get_ret(ops) - self.assertNcEqual(rd['uri'], ret, ignore_attributes={'global': ['history']}) - with nc_scope(ret) as ds: + self.assertNcEqual(ret, rd['uri'], ignore_attributes={'global': ['history'], + 'time_bnds': ['calendar', 'units'], + rd['variable']: ['grid_mapping'], + 'time': ['axis'], + 'level': ['axis'], + 'latitude': ['axis'], + 'longitude': ['axis']}, + ignore_variables=['latitude_longitude']) + + with self.nc_scope(ret) as ds: + expected = {'time': 'T', 'level': 'Z', 'latitude': 'Y', 'longitude': 'X'} + for k, v in expected.iteritems(): + var = ds.variables[k] + self.assertEqual(var.axis, v) + with self.nc_scope(ret) as ds: self.assertEqual(ds.file_format, constants.netCDF_default_data_model) - + def test_nc_conversion_calc(self): calc_grouping = ['month'] calc = [{'func':'mean','name':'my_mean'}, @@ -789,6 +784,7 @@ def test_nc_conversion_level_subset(self): ops = OcgOperations(dataset={'uri': no_level, 'variable': 'foo'}, output_format='nc', prefix='no_level_again') no_level_again = ops.execute() + self.assertNcEqual(no_level, no_level_again, ignore_attributes={'global': ['history']}) ds = nc.Dataset(no_level_again) @@ -945,11 +941,6 @@ def test_combinatorial_projection_with_geometries(self): continue else: raise - except ImproperPolygonBoundsError: - if ab == 'polygon' and unbounded: - continue - else: - raise except ExtentError: if unbounded or ab == 'point': continue @@ -1191,35 +1182,44 @@ def test_csv_calc_conversion_two_calculations(self): reader2 = csv.DictReader(f2) for row,row2 in zip(reader,reader2): self.assertDictEqual(row,row2) - + def test_calc_multivariate_conversion(self): rd1 = self.get_dataset() rd1['alias'] = 'var1' rd2 = self.get_dataset() rd2['alias'] = 'var2' - calc = [{'name':'divide','func':'divide','kwds':{'arr1':'var1','arr2':'var2'}}] - + calc = [{'name': 'divide', 'func': 'divide', 'kwds': {'arr1': 'var1', 'arr2': 'var2'}}] + for o in constants.output_formats: calc_grouping = ['month'] - ops = OcgOperations(dataset=[rd1,rd2],calc=calc,calc_grouping=calc_grouping,output_format=o, - prefix=o+'yay') + + try: + ops = OcgOperations(dataset=[rd1, rd2], calc=calc, calc_grouping=calc_grouping, output_format=o, + prefix=o + 'yay') + except DefinitionValidationError: + self.assertEqual(o, 'esmpy') + continue + ret = ops.execute() - - if o in ['csv','csv+']: - with open(ret,'r') as f: + + if o in ['csv', 'csv+']: + with open(ret, 'r') as f: reader = csv.DictReader(f) row = reader.next() - self.assertDictEqual(row,{'LID': '1', 'UGID': '1', 'CID':'1', 'LEVEL': '50', 'DID': '', 'YEAR': '2000', 'TIME': '2000-03-16 00:00:00', 'CALC_ALIAS': 'divide', 'VALUE': '1.0', 'MONTH': '3', 'GID': '1', 'CALC_KEY': 'divide', 'TID': '1', 'DAY': '16'}) - + self.assertDictEqual(row, + {'LID': '1', 'UGID': '1', 'CID': '1', 'LEVEL': '50', 'DID': '', 'YEAR': '2000', + 'TIME': '2000-03-16 00:00:00', 'CALC_ALIAS': 'divide', 'VALUE': '1.0', + 'MONTH': '3', 'GID': '1', 'CALC_KEY': 'divide', 'TID': '1', 'DAY': '16'}) + if o == 'nc': with nc_scope(ret) as ds: - self.assertIn('divide',ds.variables) + self.assertIn('divide', ds.variables) self.assertTrue(np.all(ds.variables['divide'][:] == 1.)) - + if o == 'shp': with fiona.open(ret) as f: row = f.next() - self.assertIn('CID',row['properties']) + self.assertIn('CID', row['properties']) def test_meta_conversion(self): ops = OcgOperations(dataset=self.get_dataset(),output_format='meta') @@ -1389,10 +1389,15 @@ def test_operations_convert_multiple_request_datasets(self): if o == 'nc': continue rds = self.get_multiple_request_datasets() - ops = OcgOperations(dataset=rds, output_format=o, prefix=o, slice=[None, [0, 2], None, None, None]) + try: + ops = OcgOperations(dataset=rds, output_format=o, prefix=o, slice=[None, [0, 2], None, None, None]) + except DefinitionValidationError: + # only one dataset for esmpy output + self.assertEqual(o, 'esmpy') + continue ret = ops.execute() path_source_metadata = os.path.join(self.current_dir_output, ops.prefix, '{0}_source_metadata.txt'.format(ops.prefix)) - if o != 'numpy': + if o not in ['numpy', 'meta']: self.assertTrue(os.path.exists(ret)) with open(path_source_metadata, 'r') as f: lines = f.readlines() @@ -1503,7 +1508,6 @@ def test_spatial_touch_only(self): # field = rd.get() # field.spatial.write_fiona('/tmp/touch.shp') # write_geom_dict({1:g},path='/tmp/should_touch.shp') - # import ipdb;ipdb.set_trace() raise def test_spatial(self): @@ -1602,7 +1606,9 @@ def test_differing_projection_with_output_crs(self): def test_nc_projection(self): dataset = self.get_dataset() ret = self.get_ret(kwds={'output_format': 'nc'}) - self.assertNcEqual(dataset['uri'], ret, ignore_attributes={'global': ['history']}) + self.assertNcEqual(dataset['uri'], ret, + ignore_attributes={'global': ['history'], 'time_bnds': ['calendar', 'units'], + 'crs': ['proj4', 'units']}) def test_nc_projection_to_shp(self): ret = self.get_ret(kwds={'output_format':'shp'}) diff --git a/src/ocgis/util/helpers.py b/src/ocgis/util/helpers.py index c370b2391..0dcc90529 100644 --- a/src/ocgis/util/helpers.py +++ b/src/ocgis/util/helpers.py @@ -15,63 +15,58 @@ import fiona from shapely.geometry.geo import mapping from fiona.crs import from_epsg -from ocgis.exc import SingleElementError, ShapeError - -from ocgis.util.logging_ocgis import ocgis_lh +from ocgis.exc import SingleElementError, ShapeError -def get_sorted_uris_by_time_dimension(uris, variable=None): - """ - Sort a sequence of NetCDF URIs by the maximum time extent in ascending order. - :param uris: The sequence of NetCDF URIs to sort. - :type uris: list[str] +class ProgressBar(object): - >>> uris = ['/path/to/file2.nc', 'path/to/file1.nc'] + def __init__(self,title): + sys.stdout.write(title + ": [" + "-"*40 + "]" + chr(8)*41) + sys.stdout.flush() + self.px = 0 +# globals()["progress_x"] = 0 - :param str variable: The target variable for sorting. If ``None`` is provided, then the variable will be - autodiscovered. - :returns: A sequence of sorted URIs. - :rtype: list[str] - """ +# def startProgress(title): +# sys.stdout.write(title + ": [" + "-"*40 + "]" + chr(8)*41) +# sys.stdout.flush() +# globals()["progress_x"] = 0 - from ocgis import RequestDataset + def progress(self,x): + x = x*40//100 + sys.stdout.write("#"*(x - self.px)) + sys.stdout.flush() + self.px = x +# globals()["progress_x"] = x - to_sort = {} - for uri in uris: - rd = RequestDataset(uri=uri, variable=variable) - to_sort[rd.get().temporal.extent_datetime[1]] = rd.uri - sorted_keys = sorted(to_sort) - ret = [to_sort[sk] for sk in sorted_keys] - return ret + def endProgress(self): + sys.stdout.write("#"*(40 - self.px)) + sys.stdout.write("]\n") + sys.stdout.flush() -def write_geom_dict(dct, path=None, filename=None, epsg=4326, crs=None): +def format_bool(value): """ - :param dct: - :type dct: dict - - >>> dct = {1: Point(1, 2), 2: Point(3, 4)} + Format a string to boolean. - :param path: - :type path: str - :param filename: - :type filename: str + :param value: The value to convert. + :type value: int or str """ - filename = filename or 'out' - path = path or os.path.join(mkdtemp(), '{0}.shp'.format(filename)) + try: + ret = bool(int(value)) + except ValueError: + value = value.lower() + if value in ['t', 'true']: + ret = True + elif value in ['f', 'false']: + ret = False + else: + raise ValueError('String not recognized for boolean conversion: {0}'.format(value)) + return ret - crs = crs or from_epsg(epsg) - driver = 'ESRI Shapefile' - schema = {'properties': {'UGID': 'int'}, 'geometry': dct.values()[0].geom_type} - with fiona.open(path, 'w', driver=driver, crs=crs, schema=schema) as source: - for k, v in dct.iteritems(): - rec = {'properties': {'UGID': k}, 'geometry': mapping(v)} - source.write(rec) - return path - -def get_added_slice(slice1,slice2): + +def get_added_slice(slice1, slice2): ''' :param slice slice1: :param slice slice2: @@ -80,129 +75,106 @@ def get_added_slice(slice1,slice2): ''' assert(slice1.step == None) assert(slice2.step == None) - + def _add_(a,b): a = a or 0 b = b or 0 return(a+b) - + start = _add_(slice1.start,slice2.start) stop = _add_(slice1.stop,slice2.stop) - + return(slice(start,stop)) - -def get_trimmed_array_by_mask(arr,return_adjustments=False): - ''' - Returns a slice of the masked array ``arr`` with masked rows and columns - removed. - - :param arr: Two-dimensional array object. - :type arr: :class:`numpy.ma.MaskedArray` or bool :class:`numpy.ndarray` - :param bool return_adjustments: If ``True``, return a dictionary with - values of index adjustments that may be added to a slice object. - :raises NotImplementedError: - :returns: :class:`numpy.ma.MaskedArray` or (:class:`numpy.ma.MaskedArray', {'row':slice(...),'col':slice(...)}) - ''' - try: - _mask = arr.mask - except AttributeError: - ## likely a boolean array - if arr.dtype == np.dtype(bool): - _mask = arr - else: - raise(NotImplementedError('Array type is not implemented.')) - ## row 0 to end - start_row = 0 - for idx_row in range(arr.shape[0]): - if _mask[idx_row,:].all(): - start_row += 1 - else: - break - - ## row end to 0 - stop_row = 0 - idx_row_adjust = 1 - for __ in range(arr.shape[0]): - if _mask[stop_row-idx_row_adjust,:].all(): - idx_row_adjust += 1 - else: - idx_row_adjust -= 1 - break - if idx_row_adjust == 0: - stop_row = None - else: - stop_row = stop_row - idx_row_adjust - - ## col 0 to end - start_col = 0 - for idx_col in range(arr.shape[1]): - if _mask[:,idx_col].all(): - start_col += 1 - else: - break - - ## col end to 0 - stop_col = 0 - idx_col_adjust = 1 - for __ in range(arr.shape[0]): - if _mask[:,stop_col-idx_col_adjust,].all(): - idx_col_adjust += 1 - else: - idx_col_adjust -= 1 - break - if idx_col_adjust == 0: - stop_col = None - else: - stop_col = stop_col - idx_col_adjust - - ret = arr[start_row:stop_row,start_col:stop_col] - - if return_adjustments: - ret = (ret,{'row':slice(start_row,stop_row),'col':slice(start_col,stop_col)}) - - return(ret) -def get_is_increasing(vec): +def get_bbox_poly(minx, miny, maxx, maxy): + rtup = (miny, maxy) + ctup = (minx, maxx) + return make_poly(rtup, ctup) + + +def get_bounds_from_1d(centroids): """ - :param vec: A vector array. - :type vec: :class:`numpy.ndarray` - :returns: ``True`` if the array is increasing from index 0 to -1. ``False`` otherwise. - :rtype: bool - :raises: SingleElementError, ShapeError + :param centroids: Vector representing center coordinates from which to interpolate bounds. + :type centroids: :class:`numpy.ndarray` + :returns: A *n*-by-2 array with *n* equal to the shape of ``centroids``. + + >>> import numpy as np + >>> centroids = np.array([1,2,3]) + >>> get_bounds_from_1d(centroids) + np.array([[0, 1],[1, 2],[2, 3]]) + + :rtype: :class:`numpy.ndarray` + :raises: NotImplementedError, ValueError """ - if vec.shape == (1,): - raise SingleElementError('Increasing can only be determined with a minimum of two elements.') - if len(vec.shape) > 1: - msg = 'Only vectors allowed.' - raise ShapeError(msg) + mids = get_bounds_vector_from_centroids(centroids) - if vec[0] < vec[-1]: - ret = True - else: - ret = False + # loop to fill the bounds array + bounds = np.zeros((centroids.shape[0], 2), dtype=centroids.dtype) + for ii in range(mids.shape[0]): + try: + bounds[ii, 0] = mids[ii] + bounds[ii, 1] = mids[ii + 1] + except IndexError: + break - return ret + return bounds -def get_extrapolated_corners_esmf_vector(vec): +def get_bounds_vector_from_centroids(centroids): """ - :param vec: A vector. - :type vec: :class:`numpy.ndarray` - :returns: A two-dimensional corners array with dimension ``(2, vec.shape[0]+1)``. - :rtype: :class:`numpy.ndarray` - :raises: ShapeError + :param centroids: Vector representing center coordinates from which to interpolate bounds. + :type centroids: :class:`numpy.ndarray` + :returns: Vector representing upper and lower bounds for centroids with edges extrapolated. + :rtype: :class:`numpy.ndarray` with shape ``centroids.shape[0]+1`` + :raises: NotImplementedError, ValueError """ - if len(vec.shape) > 1: - msg = 'A vector is required.' - raise ShapeError(msg) + if len(centroids) < 2: + raise ValueError('Centroid arrays must have length >= 2.') - corners = np.zeros((2, vec.shape[0]+1), dtype=vec.dtype) - corners[:] = get_bounds_vector_from_centroids(vec) + # will hold the mean midpoints between coordinate elements + mids = np.zeros(centroids.shape[0] - 1, dtype=centroids.dtype) + # this is essentially a two-element span moving average kernel + for ii in range(mids.shape[0]): + try: + mids[ii] = np.mean(centroids[ii:ii + 2]) + # if the data type is datetime.datetime raise a more verbose error message + except TypeError: + if isinstance(centroids[ii], datetime.datetime): + raise NotImplementedError('Bounds interpolation is not implemented for datetime.datetime objects.') + else: + raise + # account for edge effects by averaging the difference of the midpoints. if there is only a single value, use the + # different of the original values instead. + if len(mids) == 1: + diff = np.diff(centroids) + else: + diff = np.mean(np.diff(mids)) + # appends for the edges shifting the nearest coordinate by the mean difference + mids = np.append([mids[0] - diff], mids) + mids = np.append(mids, [mids[-1] + diff]) - return corners + return mids + + +def get_date_list(start, stop, days): + ret = [] + delta = datetime.timedelta(days=days) + check = start + while check <= stop: + ret.append(check) + check += delta + return ret + + +def get_default_or_apply(target,f,default=None): + if target is None: + ret = default + else: + ret = f(target) + return ret def get_extrapolated_corners_esmf(arr): @@ -273,70 +245,60 @@ def get_extrapolated_corners_esmf(arr): return corners -def get_bounds_vector_from_centroids(centroids): +def get_extrapolated_corners_esmf_vector(vec): """ - :param centroids: Vector representing center coordinates from which to interpolate bounds. - :type centroids: :class:`numpy.ndarray` - :returns: Vector representing upper and lower bounds for centroids with edges extrapolated. - :rtype: :class:`numpy.ndarray` with shape ``centroids.shape[0]+1`` - :raises: NotImplementedError, ValueError + :param vec: A vector. + :type vec: :class:`numpy.ndarray` + :returns: A two-dimensional corners array with dimension ``(2, vec.shape[0]+1)``. + :rtype: :class:`numpy.ndarray` + :raises: ShapeError """ - if len(centroids) < 2: - raise ValueError('Centroid arrays must have length >= 2.') - - # will hold the mean midpoints between coordinate elements - mids = np.zeros(centroids.shape[0] - 1, dtype=centroids.dtype) - # this is essentially a two-element span moving average kernel - for ii in range(mids.shape[0]): - try: - mids[ii] = np.mean(centroids[ii:ii + 2]) - # if the data type is datetime.datetime raise a more verbose error message - except TypeError: - if isinstance(centroids[ii], datetime.datetime): - raise NotImplementedError('Bounds interpolation is not implemented for datetime.datetime objects.') - else: - raise - # account for edge effects by averaging the difference of the midpoints. if there is only a single value, use the - # different of the original values instead. - if len(mids) == 1: - diff = np.diff(centroids) - else: - diff = np.mean(np.diff(mids)) - # appends for the edges shifting the nearest coordinate by the mean difference - mids = np.append([mids[0] - diff], mids) - mids = np.append(mids, [mids[-1] + diff]) - - return mids + if len(vec.shape) > 1: + msg = 'A vector is required.' + raise ShapeError(msg) + corners = np.zeros((2, vec.shape[0]+1), dtype=vec.dtype) + corners[:] = get_bounds_vector_from_centroids(vec) -def get_bounds_from_1d(centroids): - """ - :param centroids: Vector representing center coordinates from which to interpolate bounds. - :type centroids: :class:`numpy.ndarray` - :returns: A *n*-by-2 array with *n* equal to the shape of ``centroids``. + return corners - >>> import numpy as np - >>> centroids = np.array([1,2,3]) - >>> get_bounds_from_1d(centroids) - np.array([[0, 1],[1, 2],[2, 3]]) - :rtype: :class:`numpy.ndarray` - :raises: NotImplementedError, ValueError - """ +def get_formatted_slice(slc, n_dims): - mids = get_bounds_vector_from_centroids(centroids) + def _format_(slc): + if isinstance(slc, int): + ret = slice(slc, slc + 1) + elif isinstance(slc, slice): + ret = slc + elif isinstance(slc, np.ndarray): + ret = slc + else: + if len(slc) == 1: + ret = slice(slc[0]) + elif len(slc) > 1: + ret = np.array(slc) + else: + raise (NotImplementedError(slc, n_dims)) + return ret - # loop to fill the bounds array - bounds = np.zeros((centroids.shape[0], 2), dtype=centroids.dtype) - for ii in range(mids.shape[0]): + if isinstance(slc, slice) and slc == slice(None): + if n_dims == 1: + ret = slc + else: + ret = [slice(None)] * n_dims + elif n_dims == 1: + ret = _format_(slc) + elif n_dims > 1: try: - bounds[ii, 0] = mids[ii] - bounds[ii, 1] = mids[ii + 1] - except IndexError: - break + assert (len(slc) == n_dims) + except (TypeError, AssertionError): + raise IndexError("Only {0}-d slicing allowed.".format(n_dims)) + ret = map(_format_, slc) + else: + raise (NotImplementedError((slc, n_dims))) - return bounds + return ret def get_is_date_between(lower,upper,month=None,year=None): @@ -346,7 +308,7 @@ def get_is_date_between(lower,upper,month=None,year=None): else: attr = 'year' to_test = year - + part_lower,part_upper = getattr(lower,attr),getattr(upper,attr) if part_lower != part_upper: ret = np.logical_and(to_test >= part_lower,to_test < part_upper) @@ -355,140 +317,227 @@ def get_is_date_between(lower,upper,month=None,year=None): return(ret) -def project_shapely_geometry(geom,from_sr,to_sr): - if from_sr.IsSame(to_sr) == 1: - ret = geom +def get_is_increasing(vec): + """ + :param vec: A vector array. + :type vec: :class:`numpy.ndarray` + :returns: ``True`` if the array is increasing from index 0 to -1. ``False`` otherwise. + :rtype: bool + :raises: SingleElementError, ShapeError + """ + + if vec.shape == (1,): + raise SingleElementError('Increasing can only be determined with a minimum of two elements.') + if len(vec.shape) > 1: + msg = 'Only vectors allowed.' + raise ShapeError(msg) + + if vec[0] < vec[-1]: + ret = True else: - ogr_geom = CreateGeometryFromWkb(geom.wkb) - ogr_geom.AssignSpatialReference(from_sr) - ogr_geom.TransformTo(to_sr) - ret = wkb_loads(ogr_geom.ExportToWkb()) - return(ret) + ret = False -def assert_raise(test,**kwds): - try: - assert(test) - except AssertionError: - ocgis_lh(**kwds) + return ret def get_iter(element, dtype=None): """ - :param element: The element comprising the base iterator. + :param element: The element comprising the base iterator. If the element is a ``basestring`` or :class:`numpy.ndarray` + then the iterator will return the element and stop iteration. + :type element: varying :param dtype: If not ``None``, use this argument as the argument to ``isinstance``. If ``element`` is an instance of ``dtype``, ``element`` will be placed in a list and passed to ``iter``. + :type dtype: type or tuple """ + if dtype is not None: if isinstance(element, dtype): element = (element,) if isinstance(element, (basestring, np.ndarray)): - it = [element] + it = iter([element]) else: try: it = iter(element) except TypeError: it = iter([element]) - - return it + return it -def get_default_or_apply(target,f,default=None): - if target is None: - ret = default - else: - ret = f(target) - return(ret) def get_none_or_1d(target): if target is None: ret = None else: ret = np.atleast_1d(target) - return(ret) + return ret + def get_none_or_2d(target): if target is None: ret = None else: ret = np.atleast_2d(target) - return(ret) + return ret + -def get_none_or_slice(target,slc): +def get_none_or_slice(target, slc): if target is None: ret = None else: ret = target[slc] - return(ret) + return ret + + +def get_ocgis_corners_from_esmf_corners(ecorners): + """ + :param ecorners: An array of ESMF corners. + :type ecorners: :class:`numpy.ndarray` + :returns: A masked array of OCGIS corners. + :rtype: :class:`~numpy.ma.core.MaskedArray` + """ + + base_shape = [xx-1 for xx in ecorners.shape[1:]] + grid_corners = np.zeros([2] + base_shape + [4], dtype=ecorners.dtype) + slices = [(0, 0), (0, 1), (1, 1), (1, 0)] + # collect the corners and insert into ocgis corners array + for ii, jj in itertools.product(range(base_shape[0]), range(base_shape[1])): + row_slice = slice(ii, ii+2) + col_slice = slice(jj, jj+2) + row_corners = ecorners[0][row_slice, col_slice] + col_corners = ecorners[1][row_slice, col_slice] + for kk, slc in enumerate(slices): + grid_corners[:, ii, jj, kk] = row_corners[slc], col_corners[slc] + grid_corners = np.ma.array(grid_corners, mask=False) + return grid_corners + def get_reduced_slice(arr): - arr_min,arr_max = arr.min(),arr.max() - assert(arr_max-arr_min+1 == arr.shape[0]) - ret = slice(arr_min,arr_max+1) - return(ret) + arr_min, arr_max = arr.min(), arr.max() + assert (arr_max - arr_min + 1 == arr.shape[0]) + ret = slice(arr_min, arr_max + 1) + return ret -def get_formatted_slice(slc,n_dims): - - def _format_(slc): - if isinstance(slc,int): - ret = slice(slc,slc+1) - elif isinstance(slc,slice): - ret = slc - elif isinstance(slc,np.ndarray): - ret = slc + +def get_sorted_uris_by_time_dimension(uris, variable=None): + """ + Sort a sequence of NetCDF URIs by the maximum time extent in ascending order. + + :param uris: The sequence of NetCDF URIs to sort. + :type uris: list[str] + + >>> uris = ['/path/to/file2.nc', 'path/to/file1.nc'] + + :param str variable: The target variable for sorting. If ``None`` is provided, then the variable will be + autodiscovered. + :returns: A sequence of sorted URIs. + :rtype: list[str] + """ + + from ocgis import RequestDataset + + to_sort = {} + for uri in uris: + rd = RequestDataset(uri=uri, variable=variable) + to_sort[rd.get().temporal.extent_datetime[1]] = rd.uri + sorted_keys = sorted(to_sort) + ret = [to_sort[sk] for sk in sorted_keys] + return ret + + +def get_trimmed_array_by_mask(arr,return_adjustments=False): + ''' + Returns a slice of the masked array ``arr`` with masked rows and columns + removed. + + :param arr: Two-dimensional array object. + :type arr: :class:`numpy.ma.MaskedArray` or bool :class:`numpy.ndarray` + :param bool return_adjustments: If ``True``, return a dictionary with + values of index adjustments that may be added to a slice object. + :raises NotImplementedError: + :returns: :class:`numpy.ma.MaskedArray` or (:class:`numpy.ma.MaskedArray', {'row':slice(...),'col':slice(...)}) + ''' + try: + _mask = arr.mask + except AttributeError: + ## likely a boolean array + if arr.dtype == np.dtype(bool): + _mask = arr else: - if len(slc) == 1: - ret = slice(slc[0]) - elif len(slc) > 1: - ret = np.array(slc) - else: - raise(NotImplementedError(slc,n_dims)) - return(ret) - - if isinstance(slc,slice) and slc == slice(None): - if n_dims == 1: - ret = slc + raise(NotImplementedError('Array type is not implemented.')) + ## row 0 to end + start_row = 0 + for idx_row in range(arr.shape[0]): + if _mask[idx_row,:].all(): + start_row += 1 else: - ret = [slice(None)]*n_dims - elif n_dims == 1: - ret = _format_(slc) - elif n_dims > 1: - try: - assert(len(slc) == n_dims) - except (TypeError,AssertionError): - raise(IndexError("Only {0}-d slicing allowed.".format(n_dims))) - ret = map(_format_,slc) + break + + ## row end to 0 + stop_row = 0 + idx_row_adjust = 1 + for __ in range(arr.shape[0]): + if _mask[stop_row-idx_row_adjust,:].all(): + idx_row_adjust += 1 + else: + idx_row_adjust -= 1 + break + if idx_row_adjust == 0: + stop_row = None else: - raise(NotImplementedError((slc,n_dims))) - - return(ret) + stop_row = stop_row - idx_row_adjust + + ## col 0 to end + start_col = 0 + for idx_col in range(arr.shape[1]): + if _mask[:,idx_col].all(): + start_col += 1 + else: + break -def iter_arg(arg): - if isinstance(arg,basestring): - itr = [arg] + ## col end to 0 + stop_col = 0 + idx_col_adjust = 1 + for __ in range(arr.shape[0]): + if _mask[:,stop_col-idx_col_adjust,].all(): + idx_col_adjust += 1 + else: + idx_col_adjust -= 1 + break + if idx_col_adjust == 0: + stop_col = None else: - try: - itr = iter(arg) - except TypeError: - itr = iter([arg]) - for element in itr: - yield(element) + stop_col = stop_col - idx_col_adjust + ret = arr[start_row:stop_row,start_col:stop_col] + + if return_adjustments: + ret = (ret,{'row':slice(start_row,stop_row),'col':slice(start_col,stop_col)}) -def get_date_list(start,stop,days): - ret = [] - delta = datetime.timedelta(days=days) - check = start - while check <= stop: - ret.append(check) - check += delta return(ret) -def bbox_poly(minx,miny,maxx, maxy): - rtup = (miny,maxy) - ctup = (minx,maxx) - return(make_poly(rtup,ctup)) +def project_shapely_geometry(geom, from_sr, to_sr): + if from_sr.IsSame(to_sr) == 1: + ret = geom + else: + ogr_geom = CreateGeometryFromWkb(geom.wkb) + ogr_geom.AssignSpatialReference(from_sr) + ogr_geom.TransformTo(to_sr) + ret = wkb_loads(ogr_geom.ExportToWkb()) + return ret + + +def set_name_attributes(name_mapping): + """ + Set the name attributes on the keys of ``name_mapping``. + :param dict name_mapping: The keys are objects with a name attribute to set to its value if the attribute is + ``None``. + """ + + for target, name in name_mapping.iteritems(): + if target is not None and target.name is None: + target.name = name def validate_time_subset(time_range,time_region): @@ -496,7 +545,7 @@ def validate_time_subset(time_range,time_region): Ensure `time_range` and `time_region` overlap. If one of the values is `None`, the function always returns `True`. Function will return `False` if the two time range descriptions do not overlap. - + :param time_range: Sequence with two datetime elements. :type time_range: sequence :param time_region: Dictionary with two keys 'month' and 'year' each containing @@ -506,14 +555,14 @@ def validate_time_subset(time_range,time_region): :type time_region: dict :rtype: bool ''' - + def _between_(target,lower,upper): if target >= lower and target <= upper: ret = True else: ret = False return(ret) - + def _check_months_(targets,months): check = [target in months for target in targets] if all(check): @@ -521,14 +570,14 @@ def _check_months_(targets,months): else: ret = False return(ret) - + def _check_years_(targets,min_range_year,max_range_year): if all([_between_(year_bound,min_range_year,max_range_year) for year_bound in targets]): ret = True else: ret = False return(ret) - + ## by default we return that it does not validate ret = False ## if any of the parameters are none, then it will validate True @@ -568,55 +617,39 @@ def _check_years_(targets,min_range_year,max_range_year): ret = True return(ret) +def write_geom_dict(dct, path=None, filename=None, epsg=4326, crs=None): + """ + :param dct: + :type dct: dict -def format_bool(value): - '''Format a string to boolean. - - :param value: The value to convert. - :type value: int or str''' - - try: - ret = bool(int(value)) - except ValueError: - value = value.lower() - if value in ['t','true']: - ret = True - elif value in ['f','false']: - ret = False - else: - raise(ValueError('String not recognized for boolean conversion: {0}'.format(value))) - return(ret) + >>> dct = {1: Point(1, 2), 2: Point(3, 4)} -class ProgressBar(object): - - def __init__(self,title): - sys.stdout.write(title + ": [" + "-"*40 + "]" + chr(8)*41) - sys.stdout.flush() - self.px = 0 -# globals()["progress_x"] = 0 - -# def startProgress(title): -# sys.stdout.write(title + ": [" + "-"*40 + "]" + chr(8)*41) -# sys.stdout.flush() -# globals()["progress_x"] = 0 + :param path: + :type path: str + :param filename: + :type filename: str + """ + + filename = filename or 'out' + path = path or os.path.join(mkdtemp(), '{0}.shp'.format(filename)) + + crs = crs or from_epsg(epsg) + driver = 'ESRI Shapefile' + schema = {'properties': {'UGID': 'int'}, 'geometry': dct.values()[0].geom_type} + with fiona.open(path, 'w', driver=driver, crs=crs, schema=schema) as source: + for k, v in dct.iteritems(): + rec = {'properties': {'UGID': k}, 'geometry': mapping(v)} + source.write(rec) + return path - def progress(self,x): - x = x*40//100 - sys.stdout.write("#"*(x - self.px)) - sys.stdout.flush() - self.px = x -# globals()["progress_x"] = x - - def endProgress(self): - sys.stdout.write("#"*(40 - self.px)) - sys.stdout.write("]\n") - sys.stdout.flush() def locate(pattern, root=os.curdir, followlinks=True): - '''Locate all files matching supplied filename pattern in and below - supplied root directory.''' - for path, dirs, files in os.walk(os.path.abspath(root),followlinks=followlinks): - for filename in filter(lambda x: x == pattern,files): + """ + Locate all files matching supplied filename pattern in and below supplied root directory. + """ + + for path, dirs, files in os.walk(os.path.abspath(root), followlinks=followlinks): + for filename in filter(lambda x: x == pattern, files): yield os.path.join(path, filename) @@ -639,14 +672,15 @@ def get_ordered_dicts_from_records_array(arr): ret.append(fill) return ret -def iter_array(arr,use_mask=True,return_value=False): + +def iter_array(arr, use_mask=True, return_value=False): try: shp = arr.shape - ## assume array is not a numpy array + # assume array is not a numpy array except AttributeError: - arr = np.array(arr,ndmin=1) + arr = np.array(arr, ndmin=1) shp = arr.shape - iter_args = [range(0,ii) for ii in shp] + iter_args = [range(0, ii) for ii in shp] if use_mask and not np.ma.isMaskedArray(arr): use_mask = False else: @@ -656,10 +690,10 @@ def iter_array(arr,use_mask=True,return_value=False): # referencing the mask. if not use_mask: arr = arr.data - ## array is not masked + # array is not masked except AttributeError: pass - + for ii in itertools.product(*iter_args): if use_mask: try: @@ -667,7 +701,7 @@ def iter_array(arr,use_mask=True,return_value=False): continue else: idx = ii - ## occurs with singleton dimension of masked array + # occurs with singleton dimension of masked array except IndexError: if mask: continue @@ -676,18 +710,11 @@ def iter_array(arr,use_mask=True,return_value=False): else: idx = ii if return_value: - ret = (idx,arr[ii]) + ret = (idx, arr[ii]) else: ret = idx - yield(ret) + yield ret -#def geom_to_mask(coll): -# coll['geom'] = np.ma.array(coll['geom'],mask=coll['geom_mask']) -# return(coll) -# -#def mask_to_geom(coll): -# coll['geom'] = np.array(coll['geom']) -# return(coll) def itersubclasses(cls, _seen=None): """ @@ -792,64 +819,7 @@ def make_poly(rtup,ctup): (ctup[0],rtup[1]), (ctup[1],rtup[1]), (ctup[1],rtup[0]))) - -#def get_sub_range(a): -# """ -# >>> vec = np.array([2,5,9]) -# >>> sub_range(vec) -# array([2, 3, 4, 5, 6, 7, 8, 9]) -# """ -# a = np.array(a) -## ## for the special case of the array with one element -## if len(a) == 1: -## ret = np.arange(a[0],a[0]+1) -## else: -# ret = np.arange(a.min(),a.max()+1) -# return(ret) -# -#def bounding_coords(polygon): -# min_x,min_y,max_x,max_y = polygon.bounds -# Bounds = namedtuple('Bounds',['min_x','min_y','max_x','max_y']) -# return(Bounds(min_x=min_x, -# max_x=max_x, -# min_y=min_y, -# max_y=max_y)) -# -#def shapely_to_shp(obj,path,srs=None): -# from osgeo import osr, ogr -# -## path = os.path.join('/tmp',outname+'.shp') -# if srs is None: -# srs = osr.SpatialReference() -# srs.ImportFromEPSG(4326) -# -# if isinstance(obj,MultiPoint): -# test = ogr.CreateGeometryFromWkb(obj[0].wkb) -# ogr_geom = test.GetGeometryType() -# else: -# ogr_geom = 3 -# -# dr = ogr.GetDriverByName('ESRI Shapefile') -# ds = dr.CreateDataSource(path) -# try: -# if ds is None: -# raise IOError('Could not create file on disk. Does it already exist?') -# -# layer = ds.CreateLayer('lyr',srs=srs,geom_type=ogr_geom) -# try: -# feature_def = layer.GetLayerDefn() -# except: -# import ipdb;ipdb.set_trace() -# feat = ogr.Feature(feature_def) -# try: -# iterator = iter(obj) -# except TypeError: -# iterator = iter([obj]) -# for geom in iterator: -# feat.SetGeometry(ogr.CreateGeometryFromWkb(geom.wkb)) -# layer.CreateFeature(feat) -# finally: -# ds.Destroy() + def get_temp_path(suffix='',name=None,nest=False,only_dir=False,wd=None,dir_prefix=None): """Return absolute path to a temporary file.""" diff --git a/src/ocgis/util/shp_cabinet.py b/src/ocgis/util/shp_cabinet.py index 798a0d9d5..248a2298e 100644 --- a/src/ocgis/util/shp_cabinet.py +++ b/src/ocgis/util/shp_cabinet.py @@ -19,30 +19,27 @@ class ShpCabinetIterator(object): """ Iterate over a geometry selected by ``key`` or ``path``. - :param key: Unique key identifier for a shapefile contained in the ShpCabinet - directory. + :param key: Unique key identifier for a shapefile contained in the ShpCabinet directory. :type key: str >>> key = 'state_boundaries' - :param select_ugid: Sequence of unique identifiers matching values from the - shapefile's UGID attribute. + :param select_ugid: Sequence of unique identifiers matching values from the shapefile's UGID attribute. :type select_ugid: sequence >>> select_ugid = [23,24] - :param path: Path to the target shapefile to iterate over. If ``key`` is - provided it will override ``path``. + :param path: Path to the target shapefile to iterate over. If ``key`` is provided it will override ``path``. :type path: str >>> path = '/path/to/shapefile.shp' - :param bool load_geoms: If ``False``, do not load geometries, excluding - the ``'geom'`` key from the output dictionary. + :param bool load_geoms: If ``False``, do not load geometries, excluding the ``'geom'`` key from the output + dictionary. + :param bool as_spatial_dimension: If ``True``, yield as spatial dimension objects. """ def __init__(self, key=None, select_ugid=None, path=None, load_geoms=True, as_spatial_dimension=False): - #todo: doc spatial dimension self.key = key self.path = path self.select_ugid = select_ugid @@ -143,7 +140,6 @@ def _get_path_(self, key, ext='shp'): ValueError('a shapefile with key "{0}" was not found under the directory: {1}'.format(key, self.path))) def iter_geoms(self, key=None, select_ugid=None, path=None, load_geoms=True, as_spatial_dimension=False): - #todo: doc spatial dimension """ Iterate over geometries from a shapefile specified by ``key`` or ``path``. @@ -152,29 +148,27 @@ def iter_geoms(self, key=None, select_ugid=None, path=None, load_geoms=True, as_ >>> len(list(geoms)) 2 - :param key: Unique key identifier for a shapefile contained in the ShpCabinet - directory. + :param key: Unique key identifier for a shapefile contained in the ShpCabinet directory. :type key: str >>> key = 'state_boundaries' - :param select_ugid: Sequence of unique identifiers matching values from the - shapefile's UGID attribute. Ascending order only. + :param select_ugid: Sequence of unique identifiers matching values from the shapefile's UGID attribute. + Ascending order only. :type select_ugid: sequence >>> select_ugid = [23,24] - :param path: Path to the target shapefile to iterate over. If ``key`` is - provided it will override ``path``. + :param path: Path to the target shapefile to iterate over. If ``key`` is provided it will override ``path``. :type path: str >>> path = '/path/to/shapefile.shp' - :param bool load_geoms: If ``False``, do not load geometries, excluding - the ``'geom'`` key from the output dictionary. - + :param bool load_geoms: If ``False``, do not load geometries, excluding the ``'geom'`` key from the output + dictionary. + :param bool as_spatial_dimension: If ``True``, yield spatial dimension objects. :raises: ValueError, RuntimeError - :yields: dict + :rtype: dict """ # ensure select ugid is in ascending order From c569047e41d67974f63d766ecd0804317f2e69d6 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Sun, 7 Dec 2014 13:15:57 -0700 Subject: [PATCH 28/71] moved to nose attr for testing Removed dev and longrunning decorators replaced by adding nose attributes to test. Info on how to run the tests is located in ocgis/test/base.py. --- src/ocgis/constants.py | 4 - src/ocgis/test/base.py | 41 ++++++- src/ocgis/test/test_base.py | 29 +---- .../test_ocgis/test_api/test_operations.py | 9 +- .../test_api/test_parms/test_definition.py | 8 +- .../test_request/test_driver/test_nc.py | 67 +++++------ .../test_library/test_index/test_duration.py | 14 +-- .../test_dynamic_kernel_percentile.py | 13 ++- .../test_contrib/test_library_icclim.py | 16 +-- .../test/test_ocgis/test_conv/test_esmpy.py | 6 +- .../test_base/test_dimension/test_spatial.py | 109 +++++++----------- .../test_interface/test_base/test_field.py | 106 +++++++---------- .../test/test_ocgis/test_regrid/test_base.py | 7 +- .../test_ocgis/test_util/test_large_array.py | 17 +-- .../test/test_real_data/test_combinatorial.py | 8 +- .../test_real_data/test_multiple_datasets.py | 15 +-- src/ocgis/test/test_real_data/test_narccap.py | 9 +- .../test_real_data/test_random_datasets.py | 38 +++--- src/ocgis/test/test_simple/test_simple.py | 7 +- .../test/test_unfiled/test_remote_data.py | 27 ++--- 20 files changed, 241 insertions(+), 309 deletions(-) diff --git a/src/ocgis/constants.py b/src/ocgis/constants.py index bd41930fa..1c43d0186 100644 --- a/src/ocgis/constants.py +++ b/src/ocgis/constants.py @@ -70,7 +70,3 @@ #: The value for the 180th meridian to use when wrapping. meridian_180th = 180. # meridian_180th = 179.9999999999999 - - -test_run_long_tests = False -test_run_dev_tests = False diff --git a/src/ocgis/test/base.py b/src/ocgis/test/base.py index bb4d9a33a..d95120b4b 100644 --- a/src/ocgis/test/base.py +++ b/src/ocgis/test/base.py @@ -5,18 +5,20 @@ import datetime import subprocess import itertools -from ocgis.api.collection import SpatialCollection -from ocgis.interface.base.field import Field -from ocgis.interface.base.dimension.spatial import SpatialGridDimension, SpatialDimension -from ocgis import env import shutil from copy import deepcopy, copy import os from collections import OrderedDict -import ocgis +import netCDF4 as nc + import numpy as np + +from ocgis.api.collection import SpatialCollection +from ocgis.interface.base.field import Field +from ocgis.interface.base.dimension.spatial import SpatialGridDimension, SpatialDimension +from ocgis import env +import ocgis from ocgis.api.request.base import RequestDataset -import netCDF4 as nc from ocgis.interface.base.dimension.base import VectorDimension from ocgis.interface.base.dimension.temporal import TemporalDimension from ocgis.interface.base.variable import Variable @@ -24,6 +26,16 @@ from ocgis.util.itester import itr_products_keywords +""" +Definitions for various "attrs": + * slow: long-running tests that are typically ran before a release + * remote: tests relying on remote datasets that are typically run before a release + * esmpy7: tests requiring a branch version of ESMF + +nosetests -vs --with-id -a '!slow,!remote,!esmpy7' ocgis/test +""" + + class ToTest(Exception): """ Useful when wanting to flag things as not tested. @@ -597,6 +609,23 @@ def update(self, collection, variable, filename, key=None): 'filename': filename, 'variable': variable}}) +def attr(*args, **kwargs): + """ + Decorator that adds attributes to classes or functions for use with the Attribute (-a) plugin. + + http://nose.readthedocs.org/en/latest/plugins/attrib.html + """ + + def wrap_ob(ob): + for name in args: + setattr(ob, name, True) + for name, value in kwargs.iteritems(): + setattr(ob, name, value) + return ob + + return wrap_ob + + @contextmanager def nc_scope(path, mode='r', format=None): """ diff --git a/src/ocgis/test/test_base.py b/src/ocgis/test/test_base.py index e13b6a50f..c9a82d182 100644 --- a/src/ocgis/test/test_base.py +++ b/src/ocgis/test/test_base.py @@ -1,34 +1,11 @@ import datetime from netCDF4 import Dataset import os -from ocgis.interface.base.field import Field -from ocgis.test.base import TestBase, TestData -import ocgis -from unittest.case import SkipTest -from ocgis import constants -import numpy as np +import numpy as np -def longrunning(f): - if constants.test_run_long_tests: - ret = f - else: - def skip(*args): - raise SkipTest("long-running test") - skip.__name__ = f.__name__ - ret = skip - return ret - - -def dev(f): - if constants.test_run_dev_tests: - ret = f - else: - def skip(*args): - raise SkipTest("development-only test") - skip.__name__ = f.__name__ - ret = skip - return ret +from ocgis.interface.base.field import Field +from ocgis.test.base import TestBase class TestTestBase(TestBase): diff --git a/src/ocgis/test/test_ocgis/test_api/test_operations.py b/src/ocgis/test/test_ocgis/test_api/test_operations.py index c96862c53..83e93f43d 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_operations.py +++ b/src/ocgis/test/test_ocgis/test_api/test_operations.py @@ -3,7 +3,6 @@ import itertools import datetime import os -from unittest import SkipTest import ESMF from numpy import dtype @@ -11,7 +10,7 @@ from ocgis.api.parms.definition import RegridOptions, OutputFormat from ocgis.interface.base.crs import CFWGS84 -from ocgis.test.base import TestBase +from ocgis.test.base import TestBase, attr from ocgis.exc import DefinitionValidationError, DimensionNotFound, RequestValidationError from ocgis.api.parms import definition from ocgis import constants @@ -258,9 +257,10 @@ def test_keyword_conform_units_to_bad_units(self): with self.assertRaises(RequestValidationError): OcgOperations(dataset=rd, conform_units_to='crap') + @attr('esmpy7') def test_keyword_dataset_esmf(self): """Test with operations on an ESMF Field.""" - raise SkipTest + efield = self.get_esmf_field() output_format = OutputFormat.iter_possible() for kk in output_format: @@ -343,9 +343,10 @@ def test_keyword_prefix(self): ops.execute() self.assertEqual(len(os.listdir(self.current_dir_output)), 0) + @attr('esmpy7') def test_keyword_output_format_esmpy(self): """Test with the ESMPy output format.""" - raise SkipTest + #todo: test spatial subsetting #todo: test calculations slc = [None, None, None, [0, 10], [0, 10]] diff --git a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py index 1c20fd5c9..74a8d2a07 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py +++ b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py @@ -1,16 +1,14 @@ import unittest import pickle import tempfile -from unittest import SkipTest from cfunits import Units -import numpy as np from ocgis import env from ocgis.api.parms.definition import * from ocgis.interface.base.dimension.spatial import SpatialDimension, SpatialGeometryPointDimension from ocgis.util.helpers import make_poly -from ocgis.test.base import TestBase +from ocgis.test.base import TestBase, attr from ocgis.calc.library.statistics import Mean from ocgis.util.itester import itr_products_keywords from ocgis.util.shp_cabinet import ShpCabinet @@ -442,8 +440,8 @@ def test_init(self): dsb = [dsa, {'uri': reference_rd2.uri, 'variable': reference_rd2.variable, 'alias': 'knight'}] Dataset(dsb) + @attr('esmpy7') def test_init_esmf(self): - raise SkipTest #todo: what to do about time values, units, etc. efield = self.get_esmf_field() dd = Dataset(efield) @@ -621,8 +619,8 @@ def test_bad_ordination(self): class TestOutputFormat(TestBase): create_dir = False + @attr('esmpy7') def test_init_esmpy(self): - raise SkipTest oo = OutputFormat('esmpy') self.assertEqual(oo.value, 'esmpy') diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index 505349ba0..7007fa032 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -1,29 +1,28 @@ -from ocgis.interface.nc.spatial import NcSpatialGridDimension -from ocgis.interface.base.dimension.base import VectorDimension -from ocgis import constants from copy import deepcopy import os import shutil import tempfile -import unittest +import netCDF4 as nc +from datetime import datetime as dt +import datetime +from collections import OrderedDict + +import numpy as np +import fiona +from shapely.geometry.geo import shape + +from ocgis.interface.nc.spatial import NcSpatialGridDimension +from ocgis.interface.base.dimension.base import VectorDimension +from ocgis import constants from ocgis import RequestDataset from ocgis.api.request.driver.nc import DriverNetcdf, get_dimension_map from ocgis.interface.metadata import NcMetadata -from ocgis.test.base import TestBase, nc_scope -import netCDF4 as nc +from ocgis.test.base import TestBase, nc_scope, attr from ocgis.interface.base.crs import WGS84, CFWGS84, CFLambertConformal -import numpy as np -from datetime import datetime as dt from ocgis.interface.base.dimension.spatial import SpatialGeometryPolygonDimension, SpatialGeometryDimension, \ SpatialDimension -import fiona -from shapely.geometry.geo import shape from ocgis.exc import EmptySubsetError, DimensionNotFound -import datetime -from unittest.case import SkipTest import ocgis -from importlib import import_module -from collections import OrderedDict from ocgis.util.logging_ocgis import ocgis_lh from ocgis import ShpCabinet @@ -300,27 +299,23 @@ def test_get_field_geometry_subset(self): ca = ca.geom.polygon.value[0,0] for u in [True,False]: - try: - rd = RequestDataset(variable=ref_test['variable'],uri=uri,alias='foo') - field = rd.get() - ca_sub = field.get_intersects(ca,use_spatial_index=u) - self.assertEqual(ca_sub.shape,(1, 3650, 1, 5, 4)) - self.assertTrue(ca_sub.variables['foo'].value.mask.any()) - self.assertFalse(field.spatial.uid.mask.any()) - self.assertFalse(field.spatial.get_mask().any()) - - ca_sub = field.get_intersects(ca.envelope,use_spatial_index=u) - self.assertEqual(ca_sub.shape,(1, 3650, 1, 5, 4)) - self.assertFalse(ca_sub.variables['foo'].value.mask.any()) - - rd = RequestDataset(variable=ref_test['variable'],uri=uri,alias='foo',time_region={'year':[2007]}) - field = rd.get() - ca_sub = field.get_intersects(ca,use_spatial_index=u) - self.assertEqual(ca_sub.shape,(1, 365, 1, 5, 4)) - self.assertEqual(set([2007]),set([d.year for d in ca_sub.temporal.value_datetime])) - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') + rd = RequestDataset(variable=ref_test['variable'],uri=uri,alias='foo') + field = rd.get() + ca_sub = field.get_intersects(ca,use_spatial_index=u) + self.assertEqual(ca_sub.shape,(1, 3650, 1, 5, 4)) + self.assertTrue(ca_sub.variables['foo'].value.mask.any()) + self.assertFalse(field.spatial.uid.mask.any()) + self.assertFalse(field.spatial.get_mask().any()) + + ca_sub = field.get_intersects(ca.envelope,use_spatial_index=u) + self.assertEqual(ca_sub.shape,(1, 3650, 1, 5, 4)) + self.assertFalse(ca_sub.variables['foo'].value.mask.any()) + + rd = RequestDataset(variable=ref_test['variable'],uri=uri,alias='foo',time_region={'year':[2007]}) + field = rd.get() + ca_sub = field.get_intersects(ca,use_spatial_index=u) + self.assertEqual(ca_sub.shape,(1, 365, 1, 5, 4)) + self.assertEqual(set([2007]),set([d.year for d in ca_sub.temporal.value_datetime])) def test_get_field_time_region_slicing(self): ref_test = self.test_data['cancm4_tas'] @@ -343,8 +338,8 @@ def test_get_field_time_region_slicing(self): sub2 = field[:,:,:,0,1] self.assertEqual(sub2.shape,(1, 124, 1, 1, 1)) + @attr('remote') def test_get_field_remote(self): - raise(SkipTest("server IO errors")) uri = 'http://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml' variable = 'sresa1b_bccr-bcm2-0_1_Tavg' rd = RequestDataset(uri,variable,time_region={'month':[1,10],'year':[2011,2013]}) diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py index ac8c8d68c..57f235eee 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py @@ -1,14 +1,12 @@ -from ocgis.test.base import TestBase -from ocgis.calc.library.index.duration import Duration, FrequencyDuration +import csv + import numpy as np + +from ocgis.test.base import attr +from ocgis.calc.library.index.duration import Duration, FrequencyDuration from ocgis.exc import DefinitionValidationError -import ocgis from ocgis.api.operations import OcgOperations -import csv -from ocgis.api.request.base import RequestDataset -import webbrowser from ocgis.test.test_ocgis.test_calc.test_calc_general import AbstractCalcBase -from ocgis.test.test_base import longrunning class TestDuration(AbstractCalcBase): @@ -94,7 +92,7 @@ def test_calculate(self): else: raise(dct['exception']) - @longrunning + @attr('slow') def test_real_data_multiple_datasets(self): kwds = {'time_region': {'year': [1991], 'month': [7]}} rd_tasmax = self.test_data.get_rd('maurer_2010_concatenated_tasmax', kwds=kwds) diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py index d6d7ad962..b2a364526 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_dynamic_kernel_percentile.py @@ -1,12 +1,13 @@ -from ocgis.api.operations import OcgOperations -from ocgis.api.request.base import RequestDataset import netCDF4 as nc -from ocgis.test.base import TestBase, nc_scope import itertools import datetime + import numpy as np + +from ocgis.api.operations import OcgOperations +from ocgis.api.request.base import RequestDataset +from ocgis.test.base import TestBase, nc_scope, attr from ocgis.calc.library.index.dynamic_kernel_percentile import DynamicDailyKernelPercentileThreshold -from ocgis.test.test_base import longrunning class TestDynamicDailyKernelPercentileThreshold(TestBase): @@ -82,7 +83,7 @@ def test_calculate(self): self.assertEqual(ret['tg10p'].value.shape,(1,36,1,64,128)) self.assertAlmostEqual(ret['tg10p'].value.mean(),3.6267225477430554) - @longrunning + @attr('slow') def test_operations(self): uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(uri=uri, @@ -99,7 +100,7 @@ def test_operations(self): ref = ds.variables['tg10p'][:] self.assertAlmostEqual(ref.mean(),2.9778004964192708) - @longrunning + @attr('slow') def test_operations_two_steps(self): ## get the request dataset to use as the basis for the percentiles uri = self.test_data.get_uri('cancm4_tas') diff --git a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py index 0913b543c..9672b5537 100644 --- a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py +++ b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py @@ -1,5 +1,11 @@ import unittest -from ocgis.test.base import TestBase, nc_scope +import json +from collections import OrderedDict +from copy import deepcopy + +import numpy as np + +from ocgis.test.base import TestBase, nc_scope, attr from ocgis.contrib.library_icclim import IcclimTG, IcclimSU, AbstractIcclimFunction,\ IcclimDTR, IcclimETR, IcclimTN, IcclimTX,\ AbstractIcclimUnivariateSetFunction, AbstractIcclimMultivariateFunction @@ -10,18 +16,12 @@ from ocgis.api.operations import OcgOperations from ocgis.calc.library.thresholds import Threshold import ocgis -from ocgis.test.test_base import longrunning -import numpy as np -import json -from collections import OrderedDict -from copy import deepcopy from ocgis.util.helpers import itersubclasses from ocgis.contrib import library_icclim class TestLibraryIcclim(TestBase): -# @longrunning def test_standard_AbstractIcclimFunction(self): shapes = ([('month',), 12],[('month', 'year'), 24],[('year',),2]) ocgis.env.OVERWRITE = True @@ -259,7 +259,7 @@ def test_calculation_operations_to_nc(self): u'long_name': 'Summer days (number of days where daily maximum temperature > 25 degrees)', 'grid_mapping': 'latitude_longitude'}) - @longrunning + @attr('remote') def test_calculate_opendap(self): ## test against an opendap target ensuring icclim and ocgis operations ## are equivalent in the netcdf output diff --git a/src/ocgis/test/test_ocgis/test_conv/test_esmpy.py b/src/ocgis/test/test_ocgis/test_conv/test_esmpy.py index 02bbcd81d..bdc8b1d12 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_esmpy.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_esmpy.py @@ -1,5 +1,4 @@ from copy import deepcopy -from unittest import SkipTest import ESMF import numpy as np @@ -8,14 +7,13 @@ from ocgis import SpatialCollection, OcgOperations from ocgis.conv.base import AbstractConverter from ocgis.conv.esmpy import ESMPyConverter +from ocgis.test.base import attr from ocgis.test.test_ocgis.test_conv.test_base import AbstractTestConverter +@attr('esmpy7') class TestESMPyConverter(AbstractTestConverter): - def setUp(self): - raise SkipTest - def get_conv(self, with_corners=True, value_mask=None, esmf_field_name=None, field=None): coll = self.get_spatial_collection(field=field) conv = ESMPyConverter([coll], with_corners=with_corners, value_mask=value_mask, esmf_field_name=esmf_field_name) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py index 7639228f2..96bc449aa 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py @@ -1,8 +1,6 @@ from copy import deepcopy, copy import os import itertools -from importlib import import_module -from unittest.case import SkipTest import numpy as np from shapely import wkt @@ -502,19 +500,15 @@ def test_get_clip(self): poly = make_poly((37.75,38.25),(-100.25,-99.75)) for b in [True,False]: - try: - ret = sdim.get_clip(poly,use_spatial_index=b) - - self.assertEqual(ret.uid,np.array([[9]])) - self.assertTrue(poly.almost_equals(ret.geom.polygon.value[0,0])) - - self.assertEqual(ret.geom.point.value.shape,ret.geom.polygon.shape) - ref_pt = ret.geom.point.value[0,0] - ref_poly = ret.geom.polygon.value[0,0] - self.assertTrue(ref_poly.intersects(ref_pt)) - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') + ret = sdim.get_clip(poly,use_spatial_index=b) + + self.assertEqual(ret.uid,np.array([[9]])) + self.assertTrue(poly.almost_equals(ret.geom.polygon.value[0,0])) + + self.assertEqual(ret.geom.point.value.shape,ret.geom.polygon.shape) + ref_pt = ret.geom.point.value[0,0] + ref_poly = ret.geom.polygon.value[0,0] + self.assertTrue(ref_poly.intersects(ref_pt)) def test_get_geom_iter(self): sdim = self.get_sdim(bounds=True) @@ -560,52 +554,40 @@ def test_get_intersects_polygon_small(self): sdim = self.get_sdim(bounds=b) poly = make_poly((37.75,38.25),(-100.25,-99.75)) for u in [True,False]: - try: - ret = sdim.get_intersects(poly,use_spatial_index=u) - to_test = np.ma.array([[[38.]],[[-100.]]],mask=False) - self.assertNumpyAll(ret.grid.value,to_test) - self.assertNumpyAll(ret.uid,np.ma.array([[9]],dtype=constants.np_int)) - self.assertEqual(ret.shape,(1,1)) - to_test = ret.geom.point.value.compressed()[0] - self.assertTrue(to_test.almost_equals(Point(-100,38))) - if b is False: - self.assertIsNone(ret.geom.polygon) - else: - to_test = ret.geom.polygon.value.compressed()[0].bounds - self.assertEqual((-100.5,37.5,-99.5,38.5),to_test) - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') + ret = sdim.get_intersects(poly,use_spatial_index=u) + to_test = np.ma.array([[[38.]],[[-100.]]],mask=False) + self.assertNumpyAll(ret.grid.value,to_test) + self.assertNumpyAll(ret.uid,np.ma.array([[9]],dtype=constants.np_int)) + self.assertEqual(ret.shape,(1,1)) + to_test = ret.geom.point.value.compressed()[0] + self.assertTrue(to_test.almost_equals(Point(-100,38))) + if b is False: + self.assertIsNone(ret.geom.polygon) + else: + to_test = ret.geom.polygon.value.compressed()[0].bounds + self.assertEqual((-100.5,37.5,-99.5,38.5),to_test) def test_get_intersects_polygon_no_point_overlap(self): for b in [True,False]: sdim = self.get_sdim(bounds=b) poly = make_poly((39.25,39.75),(-97.75,-97.25)) for u in [True,False]: - try: - if b is False: - with self.assertRaises(EmptySubsetError): - sdim.get_intersects(poly,use_spatial_index=u) - else: - ret = sdim.get_intersects(poly,use_spatial_index=u) - self.assertEqual(ret.shape,(2,2)) - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') + if b is False: + with self.assertRaises(EmptySubsetError): + sdim.get_intersects(poly,use_spatial_index=u) + else: + ret = sdim.get_intersects(poly,use_spatial_index=u) + self.assertEqual(ret.shape,(2,2)) def test_get_intersects_polygon_all(self): for b in [True,False]: sdim = self.get_sdim(bounds=b) poly = make_poly((37,41),(-101,-96)) for u in [True,False]: - try: - ret = sdim.get_intersects(poly,use_spatial_index=u) - self.assertNumpyAll(sdim.grid.value,ret.grid.value) - self.assertNumpyAll(sdim.grid.value.mask[0,:,:],sdim.geom.point.value.mask) - self.assertEqual(ret.shape,(3,4)) - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') + ret = sdim.get_intersects(poly,use_spatial_index=u) + self.assertNumpyAll(sdim.grid.value,ret.grid.value) + self.assertNumpyAll(sdim.grid.value.mask[0,:,:],sdim.geom.point.value.mask) + self.assertEqual(ret.shape,(3,4)) def test_get_intersects_polygon_empty(self): for b in [True,False]: @@ -632,28 +614,19 @@ def test_geom_mask_by_polygon(self): subset_polygon = sdim[:,select].geom.polygon.value[0,0] for b in [True,False]: - try: - msked = spdim.get_intersects_masked(subset_polygon,use_spatial_index=b) - - self.assertEqual(msked.value.mask.sum(),50) + msked = spdim.get_intersects_masked(subset_polygon, use_spatial_index=b) + + self.assertEqual(msked.value.mask.sum(), 50) + self.assertTrue(msked.value.compressed()[0].almost_equals(subset_polygon)) + + with self.assertRaises(NotImplementedError): + msked = spdim.get_intersects_masked(subset_polygon.centroid) self.assertTrue(msked.value.compressed()[0].almost_equals(subset_polygon)) - - with self.assertRaises(NotImplementedError): - msked = spdim.get_intersects_masked(subset_polygon.centroid) - self.assertTrue(msked.value.compressed()[0].almost_equals(subset_polygon)) - - with self.assertRaises(EmptySubsetError): - spdim.get_intersects_masked(Point(1000,1000).buffer(1)) - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') + + with self.assertRaises(EmptySubsetError): + spdim.get_intersects_masked(Point(1000, 1000).buffer(1)) def test_geom_mask_by_polygon_equivalent_without_spatial_index(self): - try: - import_module('rtree') - except ImportError: - raise(SkipTest('rtree not available for import')) - sdim = self.get_spatial_dimension_from_records() spdim = sdim.geom.polygon ref = spdim.value.mask diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py index 3ca9aad49..80ca65118 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py @@ -1,11 +1,8 @@ -from netCDF4 import date2num import os -import unittest from datetime import datetime as dt import datetime import itertools from copy import deepcopy -from importlib import import_module from collections import OrderedDict import numpy as np @@ -16,7 +13,6 @@ from ocgis import RequestDataset from ocgis.interface.base.attributes import Attributes from ocgis.interface.base.crs import WGS84, Spherical -from ocgis.interface.nc.temporal import NcTemporalDimension from ocgis.util.helpers import get_date_list, make_poly from ocgis.interface.base.dimension.base import VectorDimension from ocgis.interface.base.dimension.spatial import SpatialGridDimension, SpatialDimension @@ -200,56 +196,44 @@ def test_get_aggregated_irregular(self): single = wkt.loads('POLYGON((-99.894355 40.230645,-98.725806 40.196774,-97.726613 40.027419,-97.032258 39.942742,-97.681452 39.626613,-97.850806 39.299194,-98.178226 39.643548,-98.844355 39.920161,-99.894355 40.230645))') field = self.get_field(with_value=True) for b in [True,False]: - try: - ret = field.get_clip(single,use_spatial_index=b) - agg = ret.get_spatially_aggregated() - to_test = agg.spatial.geom.polygon.value[0,0] - self.assertAlmostEqual(to_test.area,single.area) - self.assertAlmostEqual(to_test.bounds,single.bounds) - self.assertAlmostEqual(to_test.exterior.length,single.exterior.length) - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') + ret = field.get_clip(single,use_spatial_index=b) + agg = ret.get_spatially_aggregated() + to_test = agg.spatial.geom.polygon.value[0,0] + self.assertAlmostEqual(to_test.area,single.area) + self.assertAlmostEqual(to_test.bounds,single.bounds) + self.assertAlmostEqual(to_test.exterior.length,single.exterior.length) def test_get_clip_single_cell(self): single = wkt.loads('POLYGON((-97.997731 39.339322,-97.709012 39.292322,-97.742584 38.996888,-97.668726 38.641026,-98.158876 38.708170,-98.340165 38.916316,-98.273021 39.218463,-97.997731 39.339322))') field = self.get_field(with_value=True) for b in [True,False]: - try: - ret = field.get_clip(single,use_spatial_index=b) - self.assertEqual(ret.shape,(2,31,2,1,1)) - self.assertEqual(ret.spatial.grid._value.sum(),-59.0) - self.assertTrue(ret.spatial.geom.polygon.value[0,0].almost_equals(single)) - self.assertEqual(ret.spatial.uid,np.array([[7]])) - - self.assertEqual(ret.spatial.geom.point.value.shape,ret.spatial.geom.polygon.shape) - ref_pt = ret.spatial.geom.point.value[0,0] - ref_poly = ret.spatial.geom.polygon.value[0,0] - self.assertTrue(ref_poly.intersects(ref_pt)) - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') + ret = field.get_clip(single,use_spatial_index=b) + self.assertEqual(ret.shape,(2,31,2,1,1)) + self.assertEqual(ret.spatial.grid._value.sum(),-59.0) + self.assertTrue(ret.spatial.geom.polygon.value[0,0].almost_equals(single)) + self.assertEqual(ret.spatial.uid,np.array([[7]])) + + self.assertEqual(ret.spatial.geom.point.value.shape,ret.spatial.geom.polygon.shape) + ref_pt = ret.spatial.geom.point.value[0,0] + ref_poly = ret.spatial.geom.polygon.value[0,0] + self.assertTrue(ref_poly.intersects(ref_pt)) def test_get_clip_irregular(self): for wv in [True,False]: single = wkt.loads('POLYGON((-99.894355 40.230645,-98.725806 40.196774,-97.726613 40.027419,-97.032258 39.942742,-97.681452 39.626613,-97.850806 39.299194,-98.178226 39.643548,-98.844355 39.920161,-99.894355 40.230645))') field = self.get_field(with_value=wv) for b in [True,False]: - try: - ret = field.get_clip(single,use_spatial_index=b) - self.assertEqual(ret.shape,(2,31,2,2,4)) - unioned = cascaded_union([geom for geom in ret.spatial.geom.polygon.value.compressed().flat]) - self.assertAlmostEqual(unioned.area,single.area) - self.assertAlmostEqual(unioned.bounds,single.bounds) - self.assertAlmostEqual(unioned.exterior.length,single.exterior.length) - self.assertAlmostEqual(ret.spatial.weights[1,2],0.064016424) - self.assertAlmostEqual(ret.spatial.weights.sum(),1.776435) - if not wv: - with self.assertRaises(NotImplementedError): - ret.variables['tmax'].value - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') + ret = field.get_clip(single,use_spatial_index=b) + self.assertEqual(ret.shape,(2,31,2,2,4)) + unioned = cascaded_union([geom for geom in ret.spatial.geom.polygon.value.compressed().flat]) + self.assertAlmostEqual(unioned.area,single.area) + self.assertAlmostEqual(unioned.bounds,single.bounds) + self.assertAlmostEqual(unioned.exterior.length,single.exterior.length) + self.assertAlmostEqual(ret.spatial.weights[1,2],0.064016424) + self.assertAlmostEqual(ret.spatial.weights.sum(),1.776435) + if not wv: + with self.assertRaises(NotImplementedError): + ret.variables['tmax'].value def test_get_iter(self): field = self.get_field(with_value=True) @@ -286,34 +270,26 @@ def test_get_intersects_domain_polygon(self): regular = make_poly((36.61,41.39),(-101.41,-95.47)) field = self.get_field(with_value=True) for b in [True,False]: - try: - ret = field.get_intersects(regular,use_spatial_index=b) - self.assertNumpyAll(ret.variables['tmax'].value,field.variables['tmax'].value) - self.assertNumpyAll(field.spatial.grid.value,ret.spatial.grid.value) - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') + ret = field.get_intersects(regular,use_spatial_index=b) + self.assertNumpyAll(ret.variables['tmax'].value,field.variables['tmax'].value) + self.assertNumpyAll(field.spatial.grid.value,ret.spatial.grid.value) def test_get_intersects_irregular_polygon(self): irregular = wkt.loads('POLYGON((-100.106049 38.211305,-99.286894 38.251591,-99.286894 38.258306,-99.286894 38.258306,-99.260036 39.252035,-98.769886 39.252035,-98.722885 37.734583,-100.092620 37.714440,-100.106049 38.211305))') keywords = dict(b=[True, False], with_corners=[True, False]) for k in itr_products_keywords(keywords, as_namedtuple=True): - try: - field = self.get_field(with_value=True) - if k.with_corners: - field.spatial.grid.corners - ret = field.get_intersects(irregular,use_spatial_index=k.b) - self.assertEqual(ret.shape,(2,31,2,2,2)) - self.assertNumpyAll(ret.variables['tmax'].value.mask[0,2,1,:,:],np.array([[True,False],[False,False]])) - self.assertEqual(ret.spatial.uid.data[ret.spatial.get_mask()][0],5) - if k.with_corners: - self.assertNumpyAll(ret.spatial.grid.corners.mask, np.array([[[[True, True, True, True], [False, False, False, False]], [[False, False, False, False], [False, False, False, False]]], [[[True, True, True, True], [False, False, False, False]], [[False, False, False, False], [False, False, False, False]]]])) - else: - self.assertIsNone(ret.spatial.grid._corners) - except ImportError: - with self.assertRaises(ImportError): - import_module('rtree') + field = self.get_field(with_value=True) + if k.with_corners: + field.spatial.grid.corners + ret = field.get_intersects(irregular,use_spatial_index=k.b) + self.assertEqual(ret.shape,(2,31,2,2,2)) + self.assertNumpyAll(ret.variables['tmax'].value.mask[0,2,1,:,:],np.array([[True,False],[False,False]])) + self.assertEqual(ret.spatial.uid.data[ret.spatial.get_mask()][0],5) + if k.with_corners: + self.assertNumpyAll(ret.spatial.grid.corners.mask, np.array([[[[True, True, True, True], [False, False, False, False]], [[False, False, False, False], [False, False, False, False]]], [[[True, True, True, True], [False, False, False, False]], [[False, False, False, False], [False, False, False, False]]]])) + else: + self.assertIsNone(ret.spatial.grid._corners) def test_get_intersects_single_bounds_row(self): field = self.get_field(with_value=True) diff --git a/src/ocgis/test/test_ocgis/test_regrid/test_base.py b/src/ocgis/test/test_ocgis/test_regrid/test_base.py index 72f0db88f..dc7dffbfb 100644 --- a/src/ocgis/test/test_ocgis/test_regrid/test_base.py +++ b/src/ocgis/test/test_ocgis/test_regrid/test_base.py @@ -1,5 +1,4 @@ from copy import deepcopy -from unittest import SkipTest import itertools import ESMF @@ -18,6 +17,7 @@ from ocgis.interface.base.variable import VariableCollection, Variable from ocgis.regrid.base import check_fields_for_regridding, iter_regridded_fields, get_esmf_grid_from_sdim, \ iter_esmf_fields, get_sdim_from_esmf_grid, get_ocgis_field_from_esmpy_field +from ocgis.test.base import attr from ocgis.test.test_simple.make_test_data import SimpleNc from ocgis.test.test_simple.test_simple import TestSimpleBase from ocgis.util.helpers import make_poly @@ -618,8 +618,8 @@ def test_get_esmf_grid_from_sdim_value_mask(self): egrid = get_esmf_grid_from_sdim(field.spatial, value_mask=value_mask) self.assertNumpyAll(egrid.mask[0], np.invert(value_mask.astype(bool)).astype(egrid.mask[0].dtype)) + @attr('esmpy7') def test_get_ocgis_field_from_esmpy_field(self): - raise SkipTest #todo: return spherical crs if none is passed. check something on the grid np.random.seed(1) temporal = TemporalDimension(value=[3000., 4000., 5000.]) @@ -720,9 +720,10 @@ def test_get_ocgis_field_from_esmpy_field(self): self.assertTrue(np.may_share_memory(ofield_tmin_value, efield)) self.assertFalse(np.may_share_memory(ofield_tmin_value, tmin.value)) + @attr('esmpy7') def test_get_ocgis_field_from_esmpy_spatial_only(self): """Test with spatial information only.""" - raise SkipTest + row = VectorDimension(value=[5, 6]) col = VectorDimension(value=[7, 8]) grid = SpatialGridDimension(row=row, col=col) diff --git a/src/ocgis/test/test_ocgis/test_util/test_large_array.py b/src/ocgis/test/test_ocgis/test_util/test_large_array.py index 58d79872b..e7bf1b89d 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_large_array.py +++ b/src/ocgis/test/test_ocgis/test_util/test_large_array.py @@ -1,13 +1,14 @@ -from ocgis.test.base import TestBase -import ocgis -from ocgis.util.large_array import compute import netCDF4 as nc +from copy import deepcopy +import time + import numpy as np + +from ocgis.test.base import TestBase, attr +import ocgis +from ocgis.util.large_array import compute from ocgis.calc import tile from ocgis.api.request.base import RequestDatasetCollection -from ocgis.test.test_base import longrunning -from copy import deepcopy -import time class Test(TestBase): @@ -32,7 +33,7 @@ def callback(a, b): hundreds = hundreds >= 100.0 self.assertEqual(hundreds.sum(), 1) - @longrunning + @attr('slow') def test_timing_use_optimizations(self): n = range(10) t = {True:[],False:[]} @@ -133,7 +134,7 @@ def test_compute_small(self): ret_ocgis = ops.execute() self.assertNcEqual(ret_compute,ret_ocgis,ignore_attributes={'global': ['history']}) - @longrunning + @attr('slow') def test_compute_large(self): """Test calculations using compute are equivalent with standard calculations.""" diff --git a/src/ocgis/test/test_real_data/test_combinatorial.py b/src/ocgis/test/test_real_data/test_combinatorial.py index 65c79ada2..a713744cb 100644 --- a/src/ocgis/test/test_real_data/test_combinatorial.py +++ b/src/ocgis/test/test_real_data/test_combinatorial.py @@ -1,8 +1,8 @@ import os import shutil -from ocgis.test.test_base import longrunning -from ocgis import OcgOperations, RequestDataset -from ocgis.test.base import TestBase + +from ocgis import OcgOperations +from ocgis.test.base import TestBase, attr class TestCombinatorial(TestBase): @@ -20,7 +20,7 @@ def iter_dataset(self): else: yield k, rd.get() - @longrunning + @attr('slow') def test(self): import logbook diff --git a/src/ocgis/test/test_real_data/test_multiple_datasets.py b/src/ocgis/test/test_real_data/test_multiple_datasets.py index a9526ba2c..cbc4623cf 100644 --- a/src/ocgis/test/test_real_data/test_multiple_datasets.py +++ b/src/ocgis/test/test_real_data/test_multiple_datasets.py @@ -1,15 +1,16 @@ -from ocgis.api.operations import OcgOperations from itertools import izip +import os +from copy import deepcopy + import numpy as np -from ocgis.test.base import TestBase -import ocgis import fiona -import os + +from ocgis.api.operations import OcgOperations +from ocgis.test.base import TestBase, attr +import ocgis from ocgis.exc import DefinitionValidationError from ocgis.util.shp_cabinet import ShpCabinetIterator -from copy import deepcopy from ocgis.interface.base.crs import CFWGS84, CoordinateReferenceSystem -from ocgis.test.test_base import longrunning class Test(TestBase): @@ -133,7 +134,7 @@ def test_same_variable_name(self): values = [v.variables[k] for k,v in ret[1].iteritems()] self.assertTrue(np.all(values[0].value == values[1].value)) - @longrunning + @attr('slow') def test_consolidating_projections(self): def assert_projection(path,check_ugid=True): diff --git a/src/ocgis/test/test_real_data/test_narccap.py b/src/ocgis/test/test_real_data/test_narccap.py index 10ec7bfc1..d544ae717 100644 --- a/src/ocgis/test/test_real_data/test_narccap.py +++ b/src/ocgis/test/test_real_data/test_narccap.py @@ -1,12 +1,13 @@ import unittest -from ocgis.test.base import TestBase, nc_scope import os + +import numpy as np + +from ocgis.test.base import TestBase, nc_scope, attr from ocgis.api.request.base import RequestDataset import ocgis from ocgis.api.operations import OcgOperations -import numpy as np from ocgis.exc import DefinitionValidationError, ExtentError -from ocgis.test.test_base import longrunning from ocgis.interface.base.crs import CFRotatedPole, CFWGS84 @@ -113,7 +114,7 @@ def test_cf_lambert_conformal(self): crs = field.spatial.crs self.assertDictEqual(crs.value,{'lon_0': -97, 'ellps': 'WGS84', 'y_0': 2700000, 'no_defs': True, 'proj': 'lcc', 'x_0': 3325000, 'units': 'm', 'lat_2': 60, 'lat_1': 30, 'lat_0': 47.5}) - @longrunning + @attr('slow') def test_read_write_projections(self): """Test NARCCAP coordinate systems may be appropriately read and written to NetCDF.""" diff --git a/src/ocgis/test/test_real_data/test_random_datasets.py b/src/ocgis/test/test_real_data/test_random_datasets.py index 2ceb91478..a1088e1b8 100644 --- a/src/ocgis/test/test_real_data/test_random_datasets.py +++ b/src/ocgis/test/test_real_data/test_random_datasets.py @@ -1,28 +1,26 @@ -from netCDF4 import date2num -from ocgis.util.inspect import Inspect -import ocgis -from ocgis.calc.library.index.dynamic_kernel_percentile import DynamicDailyKernelPercentileThreshold -from ocgis.test.base import TestBase, nc_scope import itertools -from ocgis.api.operations import OcgOperations from datetime import datetime as dt -from ocgis.exc import DefinitionValidationError, MaskedDataError, ExtentError, RequestValidationError -import numpy as np import unittest -from ocgis.interface.base.crs import CFWGS84 -import fiona from csv import DictReader -from ocgis.api.request.base import RequestDataset from copy import deepcopy -from ocgis.test.test_base import longrunning -from shapely.geometry.point import Point -from ocgis.util.shp_cabinet import ShpCabinetIterator import os +import numpy as np +import fiona +from shapely.geometry.point import Point + +import ocgis +from ocgis.calc.library.index.dynamic_kernel_percentile import DynamicDailyKernelPercentileThreshold +from ocgis.test.base import TestBase, nc_scope, attr +from ocgis.api.operations import OcgOperations +from ocgis.exc import MaskedDataError, ExtentError, RequestValidationError +from ocgis.interface.base.crs import CFWGS84 +from ocgis.api.request.base import RequestDataset + class TestCMIP3Masking(TestBase): - @longrunning + @attr('slow') def test_many_request_datasets(self): rd_base = self.test_data.get_rd('subset_test_Prcp') geom = [-74.0, 40.0, -72.0, 42.0] @@ -289,7 +287,7 @@ def test_selecting_single_value(self): values = np.squeeze(ds.variables['tas'][:]) self.assertNumpyAll(data_values,values) - @longrunning + @attr('slow') def test_value_conversion(self): ## confirm value data types are properly converted ocgis.env.DIR_DATA = ocgis.env.DIR_TEST_DATA @@ -313,7 +311,7 @@ def test_qed_multifile(self): field = rd.get() self.assertEqual(field.shape, (1, 3, 1, 222, 462)) - @longrunning + @attr('slow') def test_maurer_concatenated_shp(self): """Test Maurer concatenated data may be appropriately subsetted.""" @@ -349,7 +347,7 @@ def test_point_shapefile_subset(self): if output_format == 'numpy': self.assertEqual(len(ret), 4) - @longrunning + @attr('slow') def test_maurer_concatenated_tasmax_region(self): rd = self.test_data.get_rd('maurer_2010_concatenated_tasmax') ops = ocgis.OcgOperations(dataset=rd, geom='us_counties', select_ugid=[2778], @@ -420,7 +418,7 @@ def test_time_range_time_region_do_not_overlap(self): with self.assertRaises(RequestValidationError): self.test_data.get_rd('cancm4_rhs',kwds=kwds) - @longrunning + @attr('slow') def test_maurer_2010(self): ## inspect the multi-file maurer datasets keys = ['maurer_2010_pr','maurer_2010_tas','maurer_2010_tasmin','maurer_2010_tasmax'] @@ -457,7 +455,7 @@ def test_clip_aggregate(self): aggregate=False,spatial_operation='clip',output_format='csv+') ret = ops.execute() - @longrunning + @attr('slow') def test_narccap_point_subset_small(self): rd = self.test_data.get_rd('narccap_pr_wrfg_ncep') geom = [-97.74278,30.26694] diff --git a/src/ocgis/test/test_simple/test_simple.py b/src/ocgis/test/test_simple/test_simple.py index a8d3b7bc9..f63598cf8 100644 --- a/src/ocgis/test/test_simple/test_simple.py +++ b/src/ocgis/test/test_simple/test_simple.py @@ -25,7 +25,7 @@ from ocgis.api.parms.definition import SpatialOperation from ocgis.util.helpers import make_poly, project_shapely_geometry from ocgis import exc, env, constants -from ocgis.test.base import TestBase, nc_scope +from ocgis.test.base import TestBase, nc_scope, attr import ocgis from ocgis.exc import ExtentError, DefinitionValidationError from ocgis.interface.base import crs @@ -33,7 +33,6 @@ from ocgis.api.request.base import RequestDataset, RequestDatasetCollection from ocgis.test.test_simple.make_test_data import SimpleNcNoLevel, SimpleNc, SimpleNcNoBounds, SimpleMaskNc, \ SimpleNc360, SimpleNcProjection, SimpleNcNoSpatialBounds, SimpleNcMultivariate -from ocgis.test.test_base import longrunning from ocgis.api.parms.definition import OutputFormat from ocgis.interface.base.field import DerivedMultivariateField from ocgis.util.itester import itr_products_keywords @@ -642,7 +641,7 @@ def test_calc_eval_multivariate(self): with nc_scope(ret) as ds: self.assertEqual(ds.variables['foo3'][:].mean(), 9.0) - @longrunning + @attr('slow') def test_calc_sample_size(self): rd1 = self.get_dataset() rd1['alias'] = 'var1' @@ -834,7 +833,7 @@ def test_limiting_headers(self): with self.assertRaises(DefinitionValidationError): OcgOperations(dataset=self.get_dataset(),headers=['foo'],output_format='csv') - @longrunning + @attr('slow') def test_combinatorial_projection_with_geometries(self): # self.get_ret(kwds={'output_format':'shp','prefix':'as_polygon'}) diff --git a/src/ocgis/test/test_unfiled/test_remote_data.py b/src/ocgis/test/test_unfiled/test_remote_data.py index 5ae7a704b..73f720fdc 100644 --- a/src/ocgis/test/test_unfiled/test_remote_data.py +++ b/src/ocgis/test/test_unfiled/test_remote_data.py @@ -1,27 +1,16 @@ -import unittest -from ocgis.test.base import TestBase +from ocgis.test.base import TestBase, attr import ocgis -from unittest.case import SkipTest class Test(TestBase): - + + @attr('remote') def test_geodataportal_prism(self): - raise(SkipTest('file is missing or unreadable')) uri = 'http://cida.usgs.gov/thredds/dodsC/prism' - for variable in ['tmx','tmn','ppt']: -# ocgis.env.VERBOSE = True -# ocgis.env.DEBUG = True - rd = ocgis.RequestDataset(uri,variable,t_calendar='standard') -# dct = rd.inspect_as_dct() - ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[25], - snippet=True,output_format='numpy',aggregate=False, + for variable in ['tmx', 'tmn', 'ppt']: + rd = ocgis.RequestDataset(uri, variable, t_calendar='standard') + ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[25], + snippet=True, output_format='numpy', aggregate=False, prefix=variable) ret = ops.execute() -# print(ret) - self.assertEqual(ret[25].variables[variable].value.shape,(1,1,227,246)) - - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() \ No newline at end of file + self.assertEqual(ret[25].variables[variable].value.shape, (1, 1, 227, 246)) From 4d02e513a4b146b7320df3a49625d0f0b42c7025 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Tue, 9 Dec 2014 14:43:34 -0800 Subject: [PATCH 29/71] add method to write user geometries to collection Added a "write_ugeom" method to "SpatialCollection". The UGID CSV file was also removed. --- src/ocgis/api/collection.py | 62 +++++++ src/ocgis/conv/base.py | 34 ++-- src/ocgis/conv/nc.py | 167 +----------------- .../test_ocgis/test_api/test_collection.py | 101 +++++++++-- .../test/test_ocgis/test_conv/test_csv_shp.py | 20 +-- src/ocgis/test/test_simple/test_simple.py | 13 +- 6 files changed, 178 insertions(+), 219 deletions(-) diff --git a/src/ocgis/api/collection.py b/src/ocgis/api/collection.py index 5af860ae9..40bd56da3 100644 --- a/src/ocgis/api/collection.py +++ b/src/ocgis/api/collection.py @@ -1,7 +1,13 @@ import abc from collections import OrderedDict + +import fiona +from shapely.geometry import mapping, MultiPoint, MultiPolygon +from shapely.geometry.base import BaseMultipartGeometry + from ocgis.interface.base.crs import CFWGS84 from ocgis import constants +from ocgis.util.helpers import get_ordered_dicts_from_records_array from ocgis.util.logging_ocgis import ocgis_lh @@ -88,6 +94,7 @@ def values(self): class SpatialCollection(AbstractCollection): _default_headers = constants.raw_headers + _multi_cast = {'Point': MultiPoint, 'Polygon': MultiPolygon} def __init__(self, meta=None, key=None, crs=None, headers=None, value_keys=None): super(SpatialCollection, self).__init__() @@ -173,3 +180,58 @@ def gvu(self,ugid,alias_variable,alias_field=None): else: field = ref[alias_field] return(field.variables[alias_variable].value) + + def write_ugeom(self, path=None, driver='ESRI Shapefile', fobject=None): + """ + Write the user geometries to a ``fiona``-supported file format. + + :param str path: Full path of file to write. If ``None``, ``fobject`` is required. + :param str driver: The ``fiona`` driver to use for writing. Ignored if ``fobject`` is provided. + :param fobject: An open ``fiona`` file object to write to. If ``path`` is provided and this is not ``None``, + then ``path`` will be ignored. + :type fobject: :class:`fiona.collection.Collection` + """ + + from ocgis.conv.fiona_ import FionaConverter + + build = True if fobject is None else False + is_open = False + needs_casting = False + try: + for ugid, geom in self.geoms.iteritems(): + if build: + # it is possible to end with a mix of singleton and multi-geometries + type_check = set() + for check_geom in self.geoms.itervalues(): + type_check.update([check_geom.geom_type]) + if len(type_check) > 1: + needs_casting = True + for xx in type_check: + if xx.startswith('Multi'): + geometry = xx + else: + cast_target_key = xx + else: + geometry = type_check.pop() + + fiona_properties = OrderedDict() + archetype_properties = self.properties[ugid] + for name in archetype_properties.dtype.names: + fiona_properties[name] = FionaConverter.get_field_type(type(archetype_properties[name][0])) + fiona_schema = {'geometry': geometry, 'properties': fiona_properties} + fiona_kwds = {'schema': fiona_schema, 'driver': driver, 'mode': 'w'} + if self.crs is not None: + fiona_kwds['crs'] = self.crs.value + fobject = fiona.open(path, **fiona_kwds) + is_open = True + build = False + properties = get_ordered_dicts_from_records_array(self.properties[ugid])[0] + if needs_casting: + if not isinstance(geom, BaseMultipartGeometry): + geom = self._multi_cast[cast_target_key]([geom]) + mapped_geom = mapping(geom) + record = {'geometry': mapped_geom, 'properties': properties} + fobject.write(record) + finally: + if is_open: + fobject.close() diff --git a/src/ocgis/conv/base.py b/src/ocgis/conv/base.py index 5f4e4d018..5e74be9bb 100644 --- a/src/ocgis/conv/base.py +++ b/src/ocgis/conv/base.py @@ -2,16 +2,13 @@ import abc import csv import logging -from csv import DictWriter from shapely.geometry.multipolygon import MultiPolygon from shapely.geometry.polygon import Polygon import fiona -from shapely.geometry.geo import mapping from ocgis.interface.base.field import Field from ocgis.conv.meta import MetaConverter -from ocgis.util.helpers import get_ordered_dicts_from_records_array from ocgis.util.inspect import Inspect from ocgis.util.logging_ocgis import ocgis_lh @@ -27,7 +24,7 @@ class AbstractConverter(object): :param :class:~`ocgis.OcgOperations ops: Optional operations definition. This is required for some converters. :param bool add_meta: If False, do not add a source and OCGIS metadata file. :param bool add_auxiliary_files: If False, do not create an output folder. Write only the target ouput file. - :parm bool overwrite: If True, attempt to overwrite any existing output files. + :param bool overwrite: If True, attempt to overwrite any existing output files. """ __metaclass__ = abc.ABCMeta @@ -121,10 +118,10 @@ def write(self): if self._add_ugeom_nest: fiona_path = os.path.join(self._get_or_create_shp_folder_(),ugid_shp_name) - csv_path = os.path.join(self._get_or_create_shp_folder_(),ugid_csv_name) + # csv_path = os.path.join(self._get_or_create_shp_folder_(),ugid_csv_name) else: fiona_path = os.path.join(self.outdir,ugid_shp_name) - csv_path = os.path.join(self.outdir,ugid_csv_name) + # csv_path = os.path.join(self.outdir,ugid_csv_name) if coll.meta is None: # convert the collection properties to fiona properties @@ -151,11 +148,11 @@ def write(self): fiona_meta['schema']['geometry'] = 'MultiPolygon' fiona_object = fiona.open(fiona_path,'w',**fiona_meta) - csv_file = open(csv_path,'w') + # csv_file = open(csv_path,'w') - from ocgis.conv.csv_ import OcgDialect - csv_object = DictWriter(csv_file,fiona_meta['schema']['properties'].keys(),dialect=OcgDialect) - csv_object.writeheader() + # from ocgis.conv.csv_ import OcgDialect + # csv_object = DictWriter(csv_file,fiona_meta['schema']['properties'].keys(),dialect=OcgDialect) + # csv_object.writeheader() build = False self._write_coll_(f,coll) @@ -174,13 +171,10 @@ def write(self): 'ugid':coll.properties.values()[0]['UGID']}) ## if it is unique write the geometry to the output files - properties_to_append = get_ordered_dicts_from_records_array(coll.properties.values()[0])[0] - to_write = {'geometry':mapping(r_geom), - 'properties':properties_to_append} - fiona_object.write(to_write) + coll.write_ugeom(fobject=fiona_object) - ## write the geometry attributes to the corresponding shapefile - csv_object.writerow(properties_to_append) + # ## write the geometry attributes to the corresponding shapefile + # csv_object.writerow(properties_to_append) finally: @@ -203,10 +197,10 @@ def write(self): fiona_object.close() except UnboundLocalError: pass - try: - csv_file.close() - except UnboundLocalError: - pass + # try: + # csv_file.close() + # except UnboundLocalError: + # pass ## the metadata and dataset descriptor files may only be written if ## OCGIS operations are present. diff --git a/src/ocgis/conv/nc.py b/src/ocgis/conv/nc.py index 5fa46788c..a9d0c40ee 100644 --- a/src/ocgis/conv/nc.py +++ b/src/ocgis/conv/nc.py @@ -1,7 +1,8 @@ import datetime +import netCDF4 as nc + import ocgis from ocgis.conv.base import AbstractConverter -import netCDF4 as nc from ocgis import constants @@ -61,169 +62,7 @@ def _write_coll_(self, ds, coll): arch.write_to_netcdf_dataset(ds, file_only=is_file_only) - # ## reference the interfaces - # grid = arch.spatial.grid - # temporal = arch.temporal - # level = arch.level - # meta = arch.meta - # - # # loop through the dimension map, look for a bounds variable, and choose the bounds dimension if possible - # bounds_name = None - # for k, v in meta['dim_map'].iteritems(): - # # it is possible the dimension itself is none - # if v is not None and v['bounds'] is not None: - # bounds_name = meta['variables'][v['bounds']]['dimensions'][1] - # break - # # if the name of the bounds dimension was not found, choose the default - # bounds_name = bounds_name or constants.ocgis_bounds - # - # ## add dataset/global attributes - # for key,value in meta['dataset'].iteritems(): - # setattr(ds,key,value) - # - # ## make dimensions ##################################################### - # - # ## time dimensions - # name_dim_temporal = meta['dim_map']['T']['dimension'] - # name_bounds_temporal = meta['dim_map']['T']['bounds'] - # name_variable_temporal = meta['dim_map']['T']['variable'] - # - # dim_temporal = ds.createDimension(name_dim_temporal) - # - # ## spatial dimensions - # dim_row = ds.createDimension(grid.row.meta['dimensions'][0],grid.row.shape[0]) - # dim_col = ds.createDimension(grid.col.meta['dimensions'][0],grid.col.shape[0]) - # if grid.row.bounds is None: - # dim_bnds = None - # else: - # dim_bnds = ds.createDimension(bounds_name,2) - # - # ## set data + attributes ############################################### - # - # ## time variable - # time_nc_value = arch.temporal.value - # - # ## if bounds are available for the time vector transform those as well - # - # ## flag to indicate climatology bounds are present and hence the normal - # ## bounds attribute should be not be added. - # has_climatology_bounds = False - # - # if isinstance(temporal,TemporalGroupDimension): - # ## update flag to indicate climatology bounds are present on the - # ## output dataset - # has_climatology_bounds = True - # if dim_bnds is None: - # dim_bnds = ds.createDimension(bounds_name,2) - # times_bounds = ds.createVariable('climatology_bounds',time_nc_value.dtype, - # (dim_temporal._name,bounds_name)) - # times_bounds[:] = temporal.bounds - # ## place units and calendar on time dimensions - # times_bounds.units = temporal.units - # times_bounds.calendar = temporal.calendar - # elif temporal.bounds is not None: - # if dim_bnds is None: - # dim_bnds = ds.createDimension(bounds_name,2) - # time_bounds_nc_value = temporal.bounds - # times_bounds = ds.createVariable(name_bounds_temporal,time_bounds_nc_value.dtype,(dim_temporal._name,bounds_name)) - # times_bounds[:] = time_bounds_nc_value - # for key,value in meta['variables'][name_bounds_temporal]['attrs'].iteritems(): - # setattr(times_bounds,key,value) - # ## place units and calendar on time dimensions - # times_bounds.units = temporal.units - # times_bounds.calendar = temporal.calendar - # times = ds.createVariable(name_variable_temporal,time_nc_value.dtype,(dim_temporal._name,)) - # times[:] = time_nc_value - # - # ## always place calendar and units on time dimension - # times.units = temporal.units - # times.calendar = temporal.calendar - # - # ## add time attributes - # for key,value in meta['variables'][name_variable_temporal]['attrs'].iteritems(): - # ## leave off the normal bounds attribute - # if has_climatology_bounds and key == 'bounds': - # if key == 'bounds': - # continue - # setattr(times,key,value) - # - # ## add climatology bounds - # if isinstance(temporal,TemporalGroupDimension): - # setattr(times,'climatology','climatology_bounds') - # - # ## level variable - # ## if there is no level on the variable no need to build one. - # if level is None: - # dim_level = None - # ## if there is a level, create the dimension and set the variable. - # else: - # name_dim_level = meta['dim_map']['Z']['dimension'] - # name_bounds_level = meta['dim_map']['Z']['bounds'] - # name_variable_level = meta['dim_map']['Z']['variable'] - # - # dim_level = ds.createDimension(name_dim_level,len(arch.level.value)) - # levels = ds.createVariable(name_variable_level,arch.level.value.dtype,(dim_level._name,)) - # levels[:] = arch.level.value - # for key,value in meta['variables'][name_variable_level]['attrs'].iteritems(): - # setattr(levels,key,value) - # if level.bounds is not None: - # if dim_bnds is None: - # dim_bnds = ds.createDimension(bounds_name,2) - # levels_bounds = ds.createVariable(name_bounds_level,arch.level.value.dtype,(dim_level._name,bounds_name)) - # levels_bounds[:] = arch.level.bounds - # for key,value in meta['variables'][name_bounds_level]['attrs'].iteritems(): - # setattr(levels,key,value) - # if dim_level is not None: - # value_dims = (dim_temporal._name,dim_level._name,dim_row._name,dim_col._name) - # else: - # value_dims = (dim_temporal._name,dim_row._name,dim_col._name) - # - # ## spatial variables ################################################### - # - # ## create and fill a spatial variable - # def _make_spatial_variable_(ds,name,values,dimension_tuple,meta): - # ret = ds.createVariable(name,values.dtype,[d._name for d in dimension_tuple]) - # ret[:] = values - # ## add variable attributes - # try: - # for key,value in meta['variables'][name]['attrs'].iteritems(): - # setattr(ret,key,value) - # except KeyError: - # pass - # return(ret) - # ## set the spatial data - # _make_spatial_variable_(ds,grid.row.meta['axis']['variable'],grid.row.value,(dim_row,),meta) - # _make_spatial_variable_(ds,grid.col.meta['axis']['variable'],grid.col.value,(dim_col,),meta) - # if grid.row.bounds is not None: - # _make_spatial_variable_(ds,grid.row.meta['axis']['bounds'],grid.row.bounds,(dim_row,dim_bnds),meta) - # _make_spatial_variable_(ds,grid.col.meta['axis']['bounds'],grid.col.bounds,(dim_col,dim_bnds),meta) - # - # ## set the variable(s) ################################################# - # - # ## loop through variables - # for variable in arch.variables.itervalues(): - # value = ds.createVariable(variable.alias, variable.dtype, value_dims, - # fill_value=variable.fill_value) - # ## if this is a file only operation, set the value, otherwise leave - # ## it empty for now. - # try: - # is_file_only = self.ops.file_only - # ## this will happen if there is no operations object. - # except AttributeError: - # is_file_only = False - # if not is_file_only: - # value[:] = variable.value.reshape(*value.shape) - # value.setncatts(variable.meta['attrs']) - # ## and the units, converting to string as passing a NoneType will raise - # ## an exception. - # value.units = '' if variable.units is None else variable.units - # - # ## add projection variable if applicable ############################### - # - # if not isinstance(arch.spatial.crs, CFWGS84): - # arch.spatial.crs.write_to_rootgrp(ds, meta) - - ## append to the history attribute + # append to the history attribute history_str = '\n{dt} UTC ocgis-{release}'.format(dt=datetime.datetime.utcnow(), release=ocgis.__release__) if self.ops is not None: history_str += ': {0}'.format(self.ops) diff --git a/src/ocgis/test/test_ocgis/test_api/test_collection.py b/src/ocgis/test/test_ocgis/test_api/test_collection.py index 90683f06e..682f29279 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_collection.py +++ b/src/ocgis/test/test_ocgis/test_api/test_collection.py @@ -1,19 +1,24 @@ -import unittest +import os +import datetime +from copy import copy, deepcopy + +import fiona +from shapely.geometry import Point, shape, MultiPoint +from shapely.geometry.multipolygon import MultiPolygon +import numpy as np + from ocgis.api.collection import SpatialCollection, AbstractCollection +from ocgis.interface.base.crs import CoordinateReferenceSystem, Spherical from ocgis.test.base import TestBase from ocgis.util.shp_cabinet import ShpCabinet -from shapely.geometry.multipolygon import MultiPolygon -import datetime from ocgis import constants from ocgis.calc.library.statistics import Mean from ocgis.interface.base.variable import Variable from ocgis.interface.base.field import DerivedField, DerivedMultivariateField,\ Field -from copy import copy, deepcopy from ocgis.calc.library.math import Divide from ocgis.test.test_ocgis.test_interface.test_base.test_field import AbstractTestField from ocgis.calc.library.thresholds import Threshold -import numpy as np class TestAbstractCollection(TestBase): @@ -97,13 +102,30 @@ def get_collection(self): sp.add_field(row['properties']['UGID'], row['geom'], field, properties=row['properties']) return sp + def get_collection_for_write_ugeom(self, crs): + pt1 = Point(1, 2) + pt2 = Point(4, 5) + coll = SpatialCollection(crs=crs) + ugid1 = 10 + ugid2 = 11 + coll.geoms[ugid1] = pt1 + coll.geoms[ugid2] = pt2 + pvalue1 = [(ugid1, '06', 25.0, 'California', 'CA')] + pvalue2 = [(ugid2, '08', 26.0, 'Ontario', 'CB')] + pdtype = [('UGID', ' Date: Mon, 15 Dec 2014 12:49:00 -0700 Subject: [PATCH 30/71] added ipython notebook from esgf demo --- examples/ipynb/ESGF-F2F-20141210.ipynb | 785 +++++++++++++++++++++++++ 1 file changed, 785 insertions(+) create mode 100644 examples/ipynb/ESGF-F2F-20141210.ipynb diff --git a/examples/ipynb/ESGF-F2F-20141210.ipynb b/examples/ipynb/ESGF-F2F-20141210.ipynb new file mode 100644 index 000000000..e5e5edbe9 --- /dev/null +++ b/examples/ipynb/ESGF-F2F-20141210.ipynb @@ -0,0 +1,785 @@ +{ + "metadata": { + "name": "", + "signature": "sha256:dccc4310bfeb2cf6e162e3387f02ff940d271f69063f5ce519fa6a9d0a48703c" + }, + "nbformat": 3, + "nbformat_minor": 0, + "worksheets": [ + { + "cells": [ + { + "cell_type": "code", + "collapsed": false, + "input": [ + "import ocgis\n", + "assert(ocgis.__release__ == '1.0.1-next')" + ], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 1 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Configure some environment variables to point to the head directory containing climate data files used in the demo as well as the output directory." + ] + }, + { + "cell_type": "code", + "collapsed": true, + "input": [ + "import tempfile\n", + "ocgis.env.DIR_DATA = '/home/ben.koziol/climate_data'\n", + "ocgis.env.DIR_OUTPUT = tempfile.mkdtemp()" + ], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 2 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Inspect a target file's metadata." + ] + }, + { + "cell_type": "code", + "collapsed": true, + "input": [ + "uri = 'tas_day_CanCM4_decadal2011_r2i1p1_20120101-20211231.nc'\n", + "variable = 'tas'\n", + "rd = ocgis.RequestDataset(uri=uri,variable=variable)\n", + "rd.inspect()" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "metadata": {}, + "output_type": "pyout", + "prompt_number": 3, + "text": [ + "\n", + "URI = /home/ben.koziol/climate_data/CanCM4/tas_day_CanCM4_decadal2011_r2i1p1_20120101-20211231.nc\n", + "VARIABLE = tas\n", + "ALIAS = tas\n", + "DID = None\n", + "\n", + "=== Temporal =============\n", + "\n", + " Start Date = 2012-01-01 00:00:00\n", + " End Date = 2022-01-01 00:00:00\n", + " Calendar = 365_day\n", + " Units = days since 1850-1-1\n", + "Resolution (Days) = 1\n", + " Count = 3650\n", + " Has Bounds = True\n", + "\n", + "=== Spatial ==============\n", + "\n", + "Spatial Reference = CFWGS84\n", + " Proj4 String = +proj=longlat +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +no_defs \n", + " Extent = (-1.40625, -90.0, 358.59375, 90.0)\n", + " Interface Type = SpatialGeometryPolygonDimension\n", + " Resolution = 2.8125\n", + " Count = 8192\n", + "\n", + "=== Level ================\n", + "\n", + "No level dimension found.\n", + "\n", + "=== Dump =================\n", + "\n", + "dimensions:\n", + " time = ISUNLIMITED ; // 3650 currently\n", + " lat = 64 ;\n", + " lon = 128 ;\n", + " bnds = 2 ;\n", + "\n", + "variables:\n", + " float64 time(time) ;\n", + " time:bounds = \"time_bnds\" ;\n", + " time:units = \"days since 1850-1-1\" ;\n", + " time:calendar = \"365_day\" ;\n", + " time:axis = \"T\" ;\n", + " time:long_name = \"time\" ;\n", + " time:standard_name = \"time\" ;\n", + " float64 time_bnds(time, bnds) ;\n", + " float64 lat(lat) ;\n", + " lat:bounds = \"lat_bnds\" ;\n", + " lat:units = \"degrees_north\" ;\n", + " lat:axis = \"Y\" ;\n", + " lat:long_name = \"latitude\" ;\n", + " lat:standard_name = \"latitude\" ;\n", + " float64 lat_bnds(lat, bnds) ;\n", + " float64 lon(lon) ;\n", + " lon:bounds = \"lon_bnds\" ;\n", + " lon:units = \"degrees_east\" ;\n", + " lon:axis = \"X\" ;\n", + " lon:long_name = \"longitude\" ;\n", + " lon:standard_name = \"longitude\" ;\n", + " float64 lon_bnds(lon, bnds) ;\n", + " float64 height() ;\n", + " height:units = \"m\" ;\n", + " height:axis = \"Z\" ;\n", + " height:positive = \"up\" ;\n", + " height:long_name = \"height\" ;\n", + " height:standard_name = \"height\" ;\n", + " float32 tas(time, lat, lon) ;\n", + " tas:standard_name = \"air_temperature\" ;\n", + " tas:long_name = \"Near-Surface Air Temperature\" ;\n", + " tas:units = \"K\" ;\n", + " tas:original_name = \"ST\" ;\n", + " tas:cell_methods = \"time: mean (interval: 15 minutes)\" ;\n", + " tas:cell_measures = \"area: areacella\" ;\n", + " tas:history = \"2012-03-29T19:30:46Z altered by CMOR: Treated scalar dimension: 'height'. 2012-03-29T19:30:46Z altered by CMOR: replaced missing value flag (1e+38) with standard missing value (1e+20).\" ;\n", + " tas:coordinates = \"height\" ;\n", + " tas:missing_value = \"1.00000002004e+20\" ;\n", + " tas:associated_files = \"baseURL: http://cmip-pcmdi.llnl.gov/CMIP5/dataLocation gridspecFile: gridspec_atmos_fx_CanCM4_decadal2011_r0i0p0.nc areacella: areacella_fx_CanCM4_decadal2011_r0i0p0.nc\" ;\n", + "\n", + "// global attributes:\n", + " :institution = CCCma (Canadian Centre for Climate Modelling and Analysis, Victoria, BC, Canada) ;\n", + " :institute_id = CCCma ;\n", + " :experiment_id = decadal2011 ;\n", + " :source = CanCM4 2010 atmosphere: CanAM4 (AGCM15i, T63L35) ocean: CanOM4 (OGCM4.0, 256x192L40) sea ice: CanSIM1 (Cavitating Fluid, T63 Gaussian Grid) land: CLASS2.7 ;\n", + " :model_id = CanCM4 ;\n", + " :forcing = GHG,Oz,SA,BC,OC,LU,Sl,Vl (GHG includes CO2,CH4,N2O,CFC11,effective CFC12) ;\n", + " :parent_experiment_id = N/A ;\n", + " :parent_experiment_rip = N/A ;\n", + " :branch_time = 0.0 ;\n", + " :contact = cccma_info@ec.gc.ca ;\n", + " :references = http://www.cccma.ec.gc.ca/models ;\n", + " :initialization_method = 1 ;\n", + " :physics_version = 1 ;\n", + " :tracking_id = 4c57c1e8-1254-464c-b6a9-baf2133985f9 ;\n", + " :branch_time_YMDH = 2012:01:01:00 ;\n", + " :CCCma_runid = DHFP1B_E002_I2012_M01 ;\n", + " :CCCma_parent_runid = DHFP1_E002 ;\n", + " :CCCma_data_licence = 1) GRANT OF LICENCE - The Government of Canada (Environment Canada) is the \n", + "owner of all intellectual property rights (including copyright) that may exist in this Data \n", + "product. You (as \"The Licensee\") are hereby granted a non-exclusive, non-assignable, \n", + "non-transferable unrestricted licence to use this data product for any purpose including \n", + "the right to share these data with others and to make value-added and derivative \n", + "products from it. This licence is not a sale of any or all of the owner's rights.\n", + "2) NO WARRANTY - This Data product is provided \"as-is\"; it has not been designed or \n", + "prepared to meet the Licensee's particular requirements. Environment Canada makes no \n", + "warranty, either express or implied, including but not limited to, warranties of \n", + "merchantability and fitness for a particular purpose. In no event will Environment Canada \n", + "be liable for any indirect, special, consequential or other damages attributed to the \n", + "Licensee's use of the Data product. ;\n", + " :product = output ;\n", + " :experiment = 10- or 30-year run initialized in year 2011 ;\n", + " :frequency = day ;\n", + " :creation_date = 2012-03-29T19:30:46Z ;\n", + " :history = 2012-03-29T19:30:46Z CMOR rewrote data to comply with CF standards and CMIP5 requirements. ;\n", + " :Conventions = CF-1.4 ;\n", + " :project_id = CMIP5 ;\n", + " :table_id = Table day (28 March 2011) f9d6cfec5981bb8be1801b35a81002f0 ;\n", + " :title = CanCM4 model output prepared for CMIP5 10- or 30-year run initialized in year 2011 ;\n", + " :parent_experiment = N/A ;\n", + " :modeling_realm = atmos ;\n", + " :realization = 2 ;\n", + " :cmor_version = 2.8.0 ;\n" + ] + } + ], + "prompt_number": 3 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Subset a target file by the boundary of California using an intersects GIS operation (the default), and write the data to an ESRI Shapefile. Select the first time coordinate only." + ] + }, + { + "cell_type": "code", + "collapsed": true, + "input": [ + "geom = '/home/ben.koziol/Dropbox/NESII/project/ocg/bin/shp/state_boundaries/state_boundaries.shp'\n", + "ops = ocgis.OcgOperations(dataset=rd,geom=geom,select_ugid=[25],snippet=True,\n", + " output_format='shp',prefix='ca')\n", + "ops.execute()" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "metadata": {}, + "output_type": "pyout", + "prompt_number": 4, + "text": [ + "'/tmp/tmpBA3o_B/ca/ca.shp'" + ] + } + ], + "prompt_number": 4 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + " Also write the model grid to shapefile." + ] + }, + { + "cell_type": "code", + "collapsed": true, + "input": [ + "ocgis.OcgOperations(dataset=rd,output_format='shp',snippet=True,prefix='grid').execute()" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "metadata": {}, + "output_type": "pyout", + "prompt_number": 5, + "text": [ + "'/tmp/tmpBA3o_B/grid/grid.shp'" + ] + } + ], + "prompt_number": 5 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Spatially average the grid cells clipped to the boundary of California for all the June, July, and August months in the target dataset. Write the output data to CSV." + ] + }, + { + "cell_type": "code", + "collapsed": true, + "input": [ + "import webbrowser\n", + "rd = ocgis.RequestDataset(uri=uri,variable=variable,time_region={'month':[6,7,8]})\n", + "ops = ocgis.OcgOperations(dataset=rd,geom=geom,select_ugid=[25],spatial_operation='clip',\n", + " output_format='csv',prefix='ca_spatial_average',aggregate=True)\n", + "ret = ops.execute()\n", + "print(ret)\n", + "webbrowser.open(ret)" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "/tmp/tmp4JU0OE/ca_spatial_average/ca_spatial_average.csv\n" + ] + }, + { + "metadata": {}, + "output_type": "pyout", + "prompt_number": 9, + "text": [ + "True" + ] + } + ], + "prompt_number": 9 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Perform a difference calulation between two variables using a string function. Inspect the metadata of the output NetCDF file." + ] + }, + { + "cell_type": "code", + "collapsed": true, + "input": [ + "rd1 = ocgis.RequestDataset(uri='/home/ben.koziol/ocgis_test_data/nc/CanCM4/tasmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc',\n", + " variable='tasmax')\n", + "rd2 = ocgis.RequestDataset(uri='tasmin_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc',\n", + " variable='tasmin')\n", + "calc = 'diff=tasmax-tasmin'\n", + "ops = ocgis.OcgOperations(dataset=[rd1,rd2],calc=calc,output_format='nc',geom='state_boundaries',\n", + " select_ugid=[25],prefix='diff')\n", + "ret = ops.execute()\n", + "print(ocgis.Inspect(ret))" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "URI = /tmp/tmp4JU0OE/diff/diff.nc\n", + "VARIABLE = None\n", + "\n", + "dimensions:\n", + " time = 3650 ;\n", + " bnds = 2 ;\n", + " lat = 5 ;\n", + " lon = 4 ;\n", + "\n", + "variables:\n", + " float64 time(time) ;\n", + " time:axis = \"T\" ;\n", + " time:bounds = \"time_bnds\" ;\n", + " time:units = \"days since 1850-1-1\" ;\n", + " time:calendar = \"365_day\" ;\n", + " time:long_name = \"time\" ;\n", + " time:standard_name = \"time\" ;\n", + " float64 time_bnds(time, bnds) ;\n", + " time_bnds:calendar = \"365_day\" ;\n", + " time_bnds:units = \"days since 1850-1-1\" ;\n", + " float64 lat(lat) ;\n", + " lat:axis = \"Y\" ;\n", + " lat:bounds = \"lat_bnds\" ;\n", + " lat:units = \"degrees_north\" ;\n", + " lat:long_name = \"latitude\" ;\n", + " lat:standard_name = \"latitude\" ;\n", + " float64 lat_bnds(lat, bnds) ;\n", + " float64 lon(lon) ;\n", + " lon:axis = \"X\" ;\n", + " lon:bounds = \"lon_bnds\" ;\n", + " lon:units = \"degrees_east\" ;\n", + " lon:long_name = \"longitude\" ;\n", + " lon:standard_name = \"longitude\" ;\n", + " float64 lon_bnds(lon, bnds) ;\n", + " |S1 latitude_longitude() ;\n", + " latitude_longitude:proj4 = \"+proj=longlat +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +no_defs \" ;\n", + " latitude_longitude:grid_mapping_name = \"latitude_longitude\" ;\n", + " float32 diff(time, lat, lon) ;\n", + " diff:standard_name = \"\" ;\n", + " diff:long_name = \"\" ;\n", + " diff:grid_mapping = \"latitude_longitude\" ;\n", + " diff:units = \"\" ;\n", + "\n", + "// global attributes:\n", + " :institution = CCCma (Canadian Centre for Climate Modelling and Analysis, Victoria, BC, Canada) ;\n", + " :institute_id = CCCma ;\n", + " :experiment_id = decadal2010 ;\n", + " :source = CanCM4 2010 atmosphere: CanAM4 (AGCM15i, T63L35) ocean: CanOM4 (OGCM4.0, 256x192L40) sea ice: CanSIM1 (Cavitating Fluid, T63 Gaussian Grid) land: CLASS2.7 ;\n", + " :model_id = CanCM4 ;\n", + " :forcing = GHG,Oz,SA,BC,OC,LU,Sl,Vl (GHG includes CO2,CH4,N2O,CFC11,effective CFC12) ;\n", + " :parent_experiment_id = N/A ;\n", + " :parent_experiment_rip = N/A ;\n", + " :branch_time = 0.0 ;\n", + " :contact = cccma_info@ec.gc.ca ;\n", + " :references = http://www.cccma.ec.gc.ca/models ;\n", + " :initialization_method = 1 ;\n", + " :physics_version = 1 ;\n", + " :tracking_id = 64384802-3f0f-4ab4-b569-697bd5430854 ;\n", + " :branch_time_YMDH = 2011:01:01:00 ;\n", + " :CCCma_runid = DHFP1B_E002_I2011_M01 ;\n", + " :CCCma_parent_runid = DHFP1_E002 ;\n", + " :CCCma_data_licence = 1) GRANT OF LICENCE - The Government of Canada (Environment Canada) is the \n", + "owner of all intellectual property rights (including copyright) that may exist in this Data \n", + "product. You (as \"The Licensee\") are hereby granted a non-exclusive, non-assignable, \n", + "non-transferable unrestricted licence to use this data product for any purpose including \n", + "the right to share these data with others and to make value-added and derivative \n", + "products from it. This licence is not a sale of any or all of the owner's rights.\n", + "2) NO WARRANTY - This Data product is provided \"as-is\"; it has not been designed or \n", + "prepared to meet the Licensee's particular requirements. Environment Canada makes no \n", + "warranty, either express or implied, including but not limited to, warranties of \n", + "merchantability and fitness for a particular purpose. In no event will Environment Canada \n", + "be liable for any indirect, special, consequential or other damages attributed to the \n", + "Licensee's use of the Data product. ;\n", + " :product = output ;\n", + " :experiment = 10- or 30-year run initialized in year 2010 ;\n", + " :frequency = day ;\n", + " :creation_date = 2012-03-28T15:32:08Z ;\n", + " :history = 2012-03-28T15:32:08Z CMOR rewrote data to comply with CF standards and CMIP5 requirements.\n", + "2014-12-11 21:09:24.613392 UTC ocgis-1.0.1-next: OcgOperations(calc_sample_size=False, optimizations=None, output_format=\"nc\", select_ugid=(25,), format_time=True, select_nearest=False, output_crs=None, time_range=None, calc_grouping=None, prefix=\"diff\", abstraction=\"None\", regrid_destination=None, allow_empty=False, vector_wrap=False, aggregate=False, interpolate_spatial_bounds=False, dataset=RequestDatasetCollection(request_datasets=[RequestDataset(uri=\"/home/ben.koziol/ocgis_test_data/nc/CanCM4/tasmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc\", variable=\"tasmax\", alias=\"tasmax\", units=None, time_range=None, time_region=None, level_range=None, conform_units_to=None, crs={'no_defs': True, 'ellps': 'WGS84', 'proj': 'longlat', 'towgs84': '0,0,0,0,0,0,0'}, t_units=None, t_calendar=None, did=1, meta={}, s_abstraction=None, dimension_map=None, name=\"tasmax\", driver=\"netCDF\", regrid_source=True, regrid_destination=False), RequestDataset(uri=\"/home/ben.koziol/climate_data/CanCM4/tasmin_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc\", variable=\"tasmin\", alias=\"tasmin\", units=None, time_range=None, time_region=None, level_range=None, conform_units_to=None, crs={'no_defs': True, 'ellps': 'WGS84', 'proj': 'longlat', 'towgs84': '0,0,0,0,0,0,0'}, t_units=None, t_calendar=None, did=2, meta={}, s_abstraction=None, dimension_map=None, name=\"tasmin\", driver=\"netCDF\", regrid_source=True, regrid_destination=False)]), dir_output=\"/tmp/tmp4JU0OE\", backend=\"ocg\", search_radius_mult=2.0, add_auxiliary_files=True, slice=None, callback=None, calc_raw=False, agg_selection=False, level_range=None, snippet=False, time_region=None, geom=\"state_boundaries\", regrid_options={'value_mask': None, 'with_corners': 'choose'}, conform_units_to=None, spatial_operation=\"intersects\", headers=None, calc=[{'meta_attrs': None, 'name': None, 'func': 'diff=tasmax-tasmin', 'kwds': OrderedDict()}], file_only=False, ) ;\n", + " :Conventions = CF-1.4 ;\n", + " :project_id = CMIP5 ;\n", + " :table_id = Table day (28 March 2011) f9d6cfec5981bb8be1801b35a81002f0 ;\n", + " :title = CanCM4 model output prepared for CMIP5 10- or 30-year run initialized in year 2010 ;\n", + " :parent_experiment = N/A ;\n", + " :modeling_realm = atmos ;\n", + " :realization = 2 ;\n", + " :cmor_version = 2.8.0 ;\n", + "\n" + ] + } + ], + "prompt_number": 10 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Calculate a sequence of statistics to produce a July time series conforming the target units from Kelvin to Celsius in the process. Perform the calculations on the spatially averaged data for California." + ] + }, + { + "cell_type": "code", + "collapsed": true, + "input": [ + "import webbrowser\n", + "rd = ocgis.RequestDataset(uri=uri,variable=variable,time_region={'month':[7]},conform_units_to='celsius')\n", + "calc = [{'func':'mean','name':'mean'},\n", + " {'func':'std','name':'stdev'},\n", + " {'func':'min','name':'min'},\n", + " {'func':'max','name':'max'},\n", + " {'func':'median','name':'median'},\n", + " {'func':'freq_perc','name':'fp_95','kwds':{'percentile':95.0}},\n", + " {'func':'freq_perc','name':'fp_5','kwds':{'percentile':5.0}},]\n", + "calc_grouping = ['month','year']\n", + "ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[25],spatial_operation='clip',\n", + " output_format='csv',prefix='ca_calcs',aggregate=True,calc=calc,\n", + " calc_grouping=calc_grouping)\n", + "ret = ops.execute()\n", + "print(ret)\n", + "webbrowser.open(ret)" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "/tmp/tmpBA3o_B/ca_calcs/ca_calcs.csv\n" + ] + }, + { + "metadata": {}, + "output_type": "pyout", + "prompt_number": 6, + "text": [ + "True" + ] + } + ], + "prompt_number": 6 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Perform the same operation as above again but return the data as a collection. Print the derived variable aliases." + ] + }, + { + "cell_type": "code", + "collapsed": true, + "input": [ + "ops.output_format = 'numpy'\n", + "ret = ops.execute()\n", + "print(ret)\n", + "print(ret[25]['tas'].variables.keys())" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "SpatialCollection([(25, {'tas': })])\n", + "['mean', 'stdev', 'min', 'max', 'median', 'fp_95', 'fp_5']\n" + ] + } + ], + "prompt_number": 12 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Variable values are always stored as five dimensions: realization, time, level, row, column" + ] + }, + { + "cell_type": "code", + "collapsed": true, + "input": [ + "print(ret[25]['tas'].variables['mean'])\n", + "print(ret[25]['tas'].variables['mean'].value.shape)" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "DerivedVariable(name=\"mean\", alias=\"mean\", units=\"celsius\")\n", + "(1, 10, 1, 1, 1)\n" + ] + } + ], + "prompt_number": 13 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Print some time values from the temporal dimension." + ] + }, + { + "cell_type": "code", + "collapsed": true, + "input": [ + "print(ret[25]['tas'].temporal.value_datetime)\n", + "print(ret[25]['tas'].temporal.bounds_datetime)" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "[datetime.datetime(2012, 7, 16, 0, 0) datetime.datetime(2013, 7, 16, 0, 0)\n", + " datetime.datetime(2014, 7, 16, 0, 0) datetime.datetime(2015, 7, 16, 0, 0)\n", + " datetime.datetime(2016, 7, 16, 0, 0) datetime.datetime(2017, 7, 16, 0, 0)\n", + " datetime.datetime(2018, 7, 16, 0, 0) datetime.datetime(2019, 7, 16, 0, 0)\n", + " datetime.datetime(2020, 7, 16, 0, 0) datetime.datetime(2021, 7, 16, 0, 0)]\n", + "[[datetime.datetime(2012, 7, 1, 0, 0) datetime.datetime(2012, 8, 1, 0, 0)]\n", + " [datetime.datetime(2013, 7, 1, 0, 0) datetime.datetime(2013, 8, 1, 0, 0)]\n", + " [datetime.datetime(2014, 7, 1, 0, 0) datetime.datetime(2014, 8, 1, 0, 0)]\n", + " [datetime.datetime(2015, 7, 1, 0, 0) datetime.datetime(2015, 8, 1, 0, 0)]\n", + " [datetime.datetime(2016, 7, 1, 0, 0) datetime.datetime(2016, 8, 1, 0, 0)]\n", + " [datetime.datetime(2017, 7, 1, 0, 0) datetime.datetime(2017, 8, 1, 0, 0)]\n", + " [datetime.datetime(2018, 7, 1, 0, 0) datetime.datetime(2018, 8, 1, 0, 0)]\n", + " [datetime.datetime(2019, 7, 1, 0, 0) datetime.datetime(2019, 8, 1, 0, 0)]\n", + " [datetime.datetime(2020, 7, 1, 0, 0) datetime.datetime(2020, 8, 1, 0, 0)]\n", + " [datetime.datetime(2021, 7, 1, 0, 0) datetime.datetime(2021, 8, 1, 0, 0)]]\n" + ] + } + ], + "prompt_number": 14 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Print example variable values." + ] + }, + { + "cell_type": "code", + "collapsed": true, + "input": [ + "print(ret[25]['tas'].variables['mean'].value.squeeze())" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "[27.395919799804688 26.349559783935547 24.475841522216797 25.9202938079834\n", + " 26.619115829467773 23.823287963867188 25.99759864807129 25.825233459472656\n", + " 26.737850189208984 24.689342498779297]\n" + ] + } + ], + "prompt_number": 15 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Geometries are stored as Shapely objects with associated attributes." + ] + }, + { + "cell_type": "code", + "collapsed": true, + "input": [ + "print(type(ret.geoms[25]))\n", + "print(ret.geoms[25]).bounds\n", + "print(ret.properties[25])" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "(-124.39263831223747, 32.53578135776605, -114.12523030267519, 42.00219136658233)\n", + "[(25, '06', 25.0, 'California', 'CA')]\n" + ] + } + ], + "prompt_number": 16 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Read a data file with a coordinate system, and inspect its properties." + ] + }, + { + "cell_type": "code", + "collapsed": true, + "input": [ + "uri = 'tas_RCM3_ncep_1981010103.nc'\n", + "variable = 'tas'\n", + "rd = ocgis.RequestDataset(uri=uri,variable=variable)\n", + "field = rd.get()\n", + "print(type(field.spatial.crs))\n", + "print(field.spatial.crs.value)\n", + "print(field.spatial.crs.sr.ExportToProj4())" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n", + "{'lonc': -97, 'ellps': 'WGS84', 'y_0': 3175000, 'no_defs': True, 'proj': 'omerc', 'x_0': 3925000, 'units': 'm', 'alpha': 360, 'k': 1, 'gamma': 360, 'lat_0': 47.5}\n", + "+proj=omerc +lat_0=47.5 +lonc=-97 +alpha=360 +k=1 +x_0=3925000 +y_0=3175000 +gamma=360 +ellps=WGS84 +units=m +no_defs \n" + ] + } + ], + "prompt_number": 7 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For three variables, calculate monthly averages for the year 2014 for each U.S. state boundary." + ] + }, + { + "cell_type": "code", + "collapsed": true, + "input": [ + "rd1 = ocgis.RequestDataset(uri='/home/ben.koziol/ocgis_test_data/nc/CanCM4/tasmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc',\n", + " variable='tasmax')\n", + "rd2 = ocgis.RequestDataset(uri='tasmin_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc',\n", + " variable='tasmin')\n", + "rd3 = ocgis.RequestDataset(uri='tas_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc',\n", + " variable='tas')\n", + "calc = [{'func':'mean','name':'mean'}]\n", + "calc_grouping = ['month']\n", + "ops = ocgis.OcgOperations(dataset=[rd1,rd2,rd3],geom='state_boundaries',aggregate=True,\n", + " output_format='shp',spatial_operation='clip',prefix='temps',\n", + " calc=calc,calc_grouping=calc_grouping,time_region={'year':[2014]},\n", + " headers=['value','calc_alias','year','month','alias'],conform_units_to='fahrenheit')\n", + "ret = ops.execute()\n", + "print(ret)" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "/tmp/tmpBA3o_B/temps/temps.shp\n" + ] + } + ], + "prompt_number": 8 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "rd_src = ocgis.RequestDataset(uri='tas_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc',\n", + " variable='tas')\n", + "rd_dest = ocgis.RequestDataset(uri='/home/ben.koziol/ocgis_test_data/nc/maurer/2010/nldas_met_update.obs.daily.pr.1991.nc')\n", + "print rd_src.get().spatial.grid.resolution\n", + "print rd_dest.get().spatial.grid.resolution\n", + "\n", + "ops = ocgis.OcgOperations(dataset=rd_src, regrid_destination=rd_dest, select_ugid=[6, 16, 34], \n", + " agg_selection=True, geom='state_boundaries', snippet=True,\n", + " output_format='shp', prefix='regrid')\n", + "print ops.execute()" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "2.8125\n", + "0.125\n", + "/tmp/tmp4JU0OE/regrid/regrid.shp" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n" + ] + } + ], + "prompt_number": 19 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Convert a shapefile to a UGRID netCDF and back to shapefile." + ] + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "import os\n", + "import sys\n", + "import subprocess\n", + "sys.path.append('/home/ben.koziol/Dropbox/NESII/project/ugrid/git/src')\n", + "import ugrid\n", + "\n", + "shp_path = '/home/ben.koziol/Dropbox/NESII/project/ugrid/bin/catchment_San_Guad_3reaches.shp'\n", + "out_nc_path = os.path.join(ocgis.env.DIR_OUTPUT, 'mesh2.nc')\n", + "print ugrid.shapefile_to_mesh2_nc(out_nc_path, shp_path)\n", + "\n", + "shp_path_reconstruct = os.path.join(ocgis.env.DIR_OUTPUT, 'mesh2.shp')\n", + "print ugrid.mesh2_nc_to_shapefile(out_nc_path, shp_path_reconstruct)" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "/tmp/tmp4JU0OE/mesh2.nc\n", + "/tmp/tmp4JU0OE/mesh2.shp" + ] + }, + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "\n" + ] + } + ], + "prompt_number": 20 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [], + "language": "python", + "metadata": {}, + "outputs": [] + } + ], + "metadata": {} + } + ] +} \ No newline at end of file From 8da4188880254ea8dd3ec30090c322a119ed991d Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Thu, 18 Dec 2014 12:31:16 -0700 Subject: [PATCH 31/71] added support for shapefiles through request dataset No support for: - time variables in a shapefile - no documentation yet for shapefile use --- src/ocgis/api/parms/definition.py | 31 ++++---- src/ocgis/api/request/base.py | 17 +++-- src/ocgis/api/request/driver/base.py | 6 +- src/ocgis/api/request/driver/nc.py | 4 + src/ocgis/api/request/driver/vector.py | 76 +++++++++++++++++++ src/ocgis/interface/base/crs.py | 64 ---------------- src/ocgis/interface/base/dimension/spatial.py | 30 ++++---- .../test_api/test_request/test_base.py | 35 ++++++--- .../test_request/test_driver/test_nc.py | 5 ++ .../test_request/test_driver/test_vector.py | 73 ++++++++++++++++++ .../test_interface/test_base/test_crs.py | 3 - .../test_base/test_dimension/test_spatial.py | 7 ++ src/ocgis/util/shp_cabinet.py | 13 ++-- 13 files changed, 244 insertions(+), 120 deletions(-) create mode 100644 src/ocgis/api/request/driver/vector.py create mode 100644 src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py diff --git a/src/ocgis/api/parms/definition.py b/src/ocgis/api/parms/definition.py index 68a46efaf..e9f27f1b8 100644 --- a/src/ocgis/api/parms/definition.py +++ b/src/ocgis/api/parms/definition.py @@ -1,31 +1,32 @@ +from collections import OrderedDict +from os.path import exists +from types import NoneType +import logging +import os +from copy import deepcopy +from types import FunctionType +import itertools +import datetime + from shapely.geometry import MultiPoint from shapely.geometry.base import BaseGeometry +from shapely.geometry.polygon import Polygon +from shapely.geometry.multipolygon import MultiPolygon +from shapely.geometry.point import Point +import numpy as np + from ocgis.api.parms import base from ocgis.exc import DefinitionValidationError from ocgis.api.request.base import RequestDataset, RequestDatasetCollection -from shapely.geometry.polygon import Polygon -from collections import OrderedDict import ocgis -from os.path import exists -from shapely.geometry.multipolygon import MultiPolygon -from types import NoneType -from shapely.geometry.point import Point from ocgis import constants from ocgis.interface.base.dimension.spatial import SpatialDimension from ocgis.interface.base.field import Field -from ocgis.util.helpers import make_poly, iter_array -from ocgis.util.shp_cabinet import ShpCabinetIterator, ShpCabinet +from ocgis.util.shp_cabinet import ShpCabinetIterator from ocgis.calc.library import register from ocgis.interface.base.crs import CoordinateReferenceSystem, CFWGS84 from ocgis.util.logging_ocgis import ocgis_lh -import logging -import os -from copy import deepcopy -from types import FunctionType -import itertools from ocgis.calc.eval_function import EvalFunction, MultivariateEvalFunction -import datetime -import numpy as np class Abstraction(base.StringOptionParameter): diff --git a/src/ocgis/api/request/base.py b/src/ocgis/api/request/base.py index f926adb60..c79d54e31 100644 --- a/src/ocgis/api/request/base.py +++ b/src/ocgis/api/request/base.py @@ -3,6 +3,8 @@ import logging import os import itertools + +from ocgis.api.request.driver.vector import DriverVector from ocgis.interface.base.field import Field from ocgis.api.collection import AbstractCollection from ocgis.api.request.driver.nc import DriverNetcdf @@ -14,6 +16,7 @@ class RequestDataset(object): + #todo: document vector format """ A :class:`ocgis.RequestDataset` contains all the information necessary to find and subset a variable (by time and/or level) contained in a local or OpenDAP-hosted CF dataset. @@ -103,7 +106,7 @@ class RequestDataset(object): .. _time units: http://netcdf4-python.googlecode.com/svn/trunk/docs/netCDF4-module.html#num2date .. _time calendar: http://netcdf4-python.googlecode.com/svn/trunk/docs/netCDF4-module.html#num2date """ - _Drivers = {d.key: d for d in [DriverNetcdf]} + _Drivers = {d.key: d for d in [DriverNetcdf, DriverVector]} def __init__(self, uri=None, variable=None, alias=None, units=None, time_range=None, time_region=None, level_range=None, conform_units_to=None, crs=None, t_units=None, t_calendar=None, did=None, @@ -247,7 +250,7 @@ def level_range(self, value): @property def name(self): - if self._name is None: + if self._name is None and self.alias is not None: ret = '_'.join(self._alias) else: ret = self._name @@ -340,11 +343,11 @@ def get(self, **kwargs): return self.driver.get_field(**kwargs) def inspect(self): - '''Print inspection output using :class:`~ocgis.Inspect`. This is a - convenience method.''' - from ocgis import Inspect - ip = Inspect(request_dataset=self) - return ip + """ + Print a string containing important information about the source driver. + """ + + return self.driver.inspect() def inspect_as_dct(self): ''' diff --git a/src/ocgis/api/request/driver/base.py b/src/ocgis/api/request/driver/base.py index 3af75f892..a3e5345b9 100644 --- a/src/ocgis/api/request/driver/base.py +++ b/src/ocgis/api/request/driver/base.py @@ -49,4 +49,8 @@ def get_source_metadata(self): @abc.abstractmethod def open(self): - return object \ No newline at end of file + return object + + @abc.abstractmethod + def inspect(self): + pass \ No newline at end of file diff --git a/src/ocgis/api/request/driver/nc.py b/src/ocgis/api/request/driver/nc.py index 376dd946a..c6bb155e6 100644 --- a/src/ocgis/api/request/driver/nc.py +++ b/src/ocgis/api/request/driver/nc.py @@ -124,6 +124,10 @@ def get_source_metadata(self): return metadata + def inspect(self): + from ocgis import Inspect + print Inspect(request_dataset=self.rd) + def _get_vector_dimension_(self, k, v, source_metadata): """ :param str k: The string name/key of the dimension to load. diff --git a/src/ocgis/api/request/driver/vector.py b/src/ocgis/api/request/driver/vector.py new file mode 100644 index 000000000..bac90e2bc --- /dev/null +++ b/src/ocgis/api/request/driver/vector.py @@ -0,0 +1,76 @@ +import numpy as np + +from ocgis.interface.base.variable import Variable, VariableCollection +from ocgis.interface.base.field import Field +from ocgis.api.request.driver.base import AbstractDriver + + +class DriverVector(AbstractDriver): + key = 'vector' + + def close(self, obj): + pass + + def get_crs(self): + from ocgis import CoordinateReferenceSystem + return CoordinateReferenceSystem(self.rd.source_metadata['crs']) + + def get_dimensioned_variables(self): + return None + + def get_source_metadata(self): + try: + data = self.open() + return data.sc.get_meta(path=self.rd.uri) + finally: + self.close(data) + + def inspect(self): + from ocgis import CoordinateReferenceSystem + + meta = self.rd.source_metadata + try: + ds = self.open() + n = len(ds) + finally: + self.close(ds) + + lines = [] + lines.append('') + lines.append('URI = {0}'.format(self.rd.uri)) + lines.append('') + lines.append('Geometry Type: {0}'.format(meta['schema']['geometry'])) + lines.append('Geometry Count: {0}'.format(n)) + lines.append('CRS: {0}'.format(CoordinateReferenceSystem(value=meta['crs']).value)) + lines.append('Properties:') + for k, v in meta['schema']['properties'].iteritems(): + lines.append(' {0} {1}'.format(v, k)) + lines.append('') + for line in lines: + print line + + def open(self): + from ocgis import ShpCabinetIterator + return ShpCabinetIterator(path=self.rd.uri) + + def _get_field_(self): + #todo: option to pass select_ugid + #todo: option for time dimension and time subsetting + from ocgis import SpatialDimension + ds = self.open() + try: + records = list(ds) + sdim = SpatialDimension.from_records(records, crs=self.get_crs()) + if self.rd.variable is not None: + vc = VariableCollection() + for xx in self.rd: + value = np.array([yy['properties'][xx['variable']] for yy in records]).reshape(1, 1, 1, 1, -1) + var = Variable(name=xx['variable'], alias=xx['alias'], units=xx['units'], conform_units_to=xx['units'], + value=value) + vc.add_variable(var, assign_new_uid=True) + else: + vc = None + field = Field(spatial=sdim, variables=vc, name=self.rd.name) + return field + finally: + self.close(ds) diff --git a/src/ocgis/interface/base/crs.py b/src/ocgis/interface/base/crs.py index 679559e5b..3183e987b 100644 --- a/src/ocgis/interface/base/crs.py +++ b/src/ocgis/interface/base/crs.py @@ -5,18 +5,14 @@ import logging from osgeo.osr import SpatialReference - from fiona.crs import from_string, to_string import numpy as np from shapely.geometry import Point, Polygon from shapely.geometry.base import BaseMultipartGeometry -from shapely.geometry.multipolygon import MultiPolygon -from shapely.geometry.multipoint import MultiPoint from ocgis import constants from ocgis.util.logging_ocgis import ocgis_lh from ocgis.exc import SpatialWrappingError, ProjectionCoordinateNotFound, ProjectionDoesNotMatch - from ocgis.util.spatial.wrap import Wrapper from ocgis.util.helpers import iter_array @@ -175,66 +171,6 @@ def get_wrapped_state(cls, sdim): break return ret - #todo: remove commented code - # @classmethod - # def get_is_360(cls, spatial): - # """ - # :param spatial: - # :type spatial: :class:`~ocgis.interface.base.dimension.spatial.SpatialDimension` - # """ - # - # if not isinstance(spatial.crs, WrappableCoordinateReferenceSystem): - # msg = 'Wrapped state may only be determined for geographic (i.e. spherical) coordinate systems.' - # raise SpatialWrappingError(msg) - # - # try: - # # if spatial.grid.col.bounds is None: - # check = spatial.grid.col.value - # # else: - # # check = spatial.grid.col.bounds - # except AttributeError as e: - # # column dimension is likely missing - # try: - # if spatial.grid.col is None: - # # if spatial.grid.corners is not None: - # # check = spatial.grid.corners[1] - # # else: - # check = spatial.grid.value[1, :, :] - # else: - # ocgis_lh(exc=e) - # except AttributeError: - # # there may be no grid, access the geometries directly - # if spatial.geom.polygon is not None: - # geoms_to_check = spatial.geom.polygon.value - # else: - # geoms_to_check = spatial.geom.point.value - # geoms_to_check = geoms_to_check.compressed() - # - # # if this is switched to true, there are geometries with coordinate values less than 0 - # for geom in geoms_to_check: - # if type(geom) in [MultiPolygon, MultiPoint]: - # it = geom - # else: - # it = [geom] - # for sub_geom in it: - # try: - # coords = np.array(sub_geom.exterior.coords) - # if np.all(coords[:, 0] >= 0.): - # return True - # ## might be checking a point - # except AttributeError: - # coords = np.array(sub_geom) - # if np.all(coords[0] >= 0.): - # return True - # return False - # - # if np.all(check >= 0.): - # ret = True - # else: - # ret = False - # - # return ret - def unwrap(self, spatial): """ :type spatial: :class:`ocgis.interface.base.dimension.spatial.SpatialDimension` diff --git a/src/ocgis/interface/base/dimension/spatial.py b/src/ocgis/interface/base/dimension/spatial.py index cf9e88bb3..f9aa6e186 100644 --- a/src/ocgis/interface/base/dimension/spatial.py +++ b/src/ocgis/interface/base/dimension/spatial.py @@ -145,19 +145,6 @@ def grid(self, value): assert(isinstance(value, SpatialGridDimension)) self._grid = value - #todo: remove commented code - # @property - # def is_unwrapped(self): - # """ - # Return ``True`` if the coordinates of the spatial data have a 0 to 360 longitudinal domain.""" - # - # try: - # ret = self.crs.get_is_360(self) - # # None and coordinate systems w/out spherical coordinate systems have no wrapping checks - # except AttributeError: - # ret = False - # return ret - @property def shape(self): if self.grid is None: @@ -211,6 +198,16 @@ def assert_uniform_mask(self): for arr1, arr2 in itertools.combinations(to_compare, 2): assert np.all(arr1 == arr2) + # check the mask on corners + if self._grid is not None and self._grid._corners is not None: + corners_mask = self._grid._corners.mask + for (ii, jj), mask_value in iter_array(to_compare[0], return_value=True): + to_check = corners_mask[:, ii, jj, :] + if mask_value: + assert to_check.all() + else: + assert not to_check.any() + @classmethod def from_records(cls, records, crs=None): """ @@ -852,7 +849,12 @@ def get_subset_bbox(self,min_col,min_row,max_col,max_row,return_indices=False,cl return(ret) def set_extrapolated_corners(self): - #todo: doc + """ + Extrapolate corners from grid centroids. If corners are already available, an exception will be raised. + + :raises: BoundsAlreadyAvailableError + """ + if self.corners is not None: raise BoundsAlreadyAvailableError else: diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py index 6b835e3a2..785a6b2dd 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py @@ -1,6 +1,15 @@ from copy import deepcopy -import unittest import itertools +import os +import pickle +from datetime import datetime as dt +import shutil +import datetime + +import numpy as np +from cfunits.cfunits import Units + +from ocgis.util.shp_cabinet import ShpCabinet from ocgis.interface.base.field import Field from ocgis.exc import DefinitionValidationError, NoUnitsError, VariableNotFoundError, RequestValidationError from ocgis.api.request.base import RequestDataset, RequestDatasetCollection, get_tuple, get_is_none @@ -8,14 +17,7 @@ from ocgis import env, constants from ocgis.interface.base.crs import CoordinateReferenceSystem, CFWGS84 from ocgis.test.base import TestBase, nc_scope -import os -import pickle -from datetime import datetime as dt -import shutil -import datetime from ocgis.api.operations import OcgOperations -import numpy as np -from cfunits.cfunits import Units from ocgis.util.helpers import get_iter from ocgis.util.itester import itr_products_keywords @@ -60,6 +62,12 @@ def test_init(self): rd = RequestDataset(uri=self.uri, crs=CFWGS84()) self.assertTrue(rd._has_assigned_coordinate_system) + def test_init_driver(self): + uri = ShpCabinet().get_shp_path('state_boundaries') + rd = RequestDataset(uri=uri, driver='vector') + self.assertIsNone(rd.variable) + self.assertIsInstance(rd.get(), Field) + def test_str(self): rd = self.test_data.get_rd('cancm4_tas') ss = str(rd) @@ -73,6 +81,11 @@ def test_crs_overload(self): field = rd.get() self.assertDictEqual(kwds['crs'].value, field.spatial.crs.value) + def test_name(self): + path = ShpCabinet().get_shp_path('state_boundaries') + rd = RequestDataset(uri=path, driver='vector') + self.assertIsNone(rd.name) + def test_uri_cannot_be_set(self): rd = self.test_data.get_rd('cancm4_tas') other_uri = self.test_data.get_uri('cancm4_rhs') @@ -346,10 +359,14 @@ def test_pickle(self): rd2 = pickle.load(f) self.assertTrue(rd == rd2) - def test_inspect_method(self): + def test_inspect(self): rd = RequestDataset(self.uri, self.variable) rd.inspect() + uri = ShpCabinet().get_shp_path('state_boundaries') + rd = RequestDataset(uri=uri, driver='vector') + str(rd.inspect()) + def test_inspect_as_dct(self): variables = [ self.variable, diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index 7007fa032..02d7b92a0 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -515,6 +515,11 @@ def test_open(self): with self.assertRaises(KeyError): rd.source_metadata + def test_inspect(self): + rd = self.test_data.get_rd('cancm4_tas') + driver = DriverNetcdf(rd) + driver.inspect() + class Test(TestBase): diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py new file mode 100644 index 000000000..3cfa852c7 --- /dev/null +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py @@ -0,0 +1,73 @@ +from ocgis import RequestDataset, ShpCabinet, ShpCabinetIterator +from ocgis.api.request.driver.base import AbstractDriver +from ocgis.api.request.driver.vector import DriverVector +from ocgis.interface.base.crs import WGS84 +from ocgis.test.base import TestBase + + +class TestDriverVector(TestBase): + + def get_driver(self, **kwargs): + rd = self.get_rd(**kwargs) + driver = DriverVector(rd) + return driver + + def get_rd(self, variable=None): + uri = ShpCabinet().get_shp_path('state_boundaries') + rd = RequestDataset(uri=uri, driver='vector', variable=variable) + return rd + + def test_init(self): + self.assertEqual(DriverVector.__bases__, (AbstractDriver,)) + self.assertIsInstance(self.get_driver(), DriverVector) + + def test_close(self): + driver = self.get_driver() + sci = driver.open() + driver.close(sci) + + def test_get_crs(self): + driver = self.get_driver() + self.assertEqual(WGS84(), driver.get_crs()) + + def test_get_dimensioned_variables(self): + driver = self.get_driver() + self.assertIsNone(driver.get_dimensioned_variables()) + + def test_get_field(self): + driver = self.get_driver() + field = driver.get_field() + sub = field[:, :, :, :, 25] + self.assertEqual(sub.spatial.properties.shape, (1,)) + self.assertTrue(len(sub.spatial.properties.dtype.names) > 2) + + # test with a variable + driver = self.get_driver(variable=['ID', 'STATE_NAME']) + field = driver.get_field() + self.assertIn('ID', field.variables) + self.assertEqual(field.variables['ID'].shape, (1, 1, 1, 1, 51)) + + # test an alias and name + rd = self.get_rd(variable='ID') + rd.alias = 'another' + rd.name = 'something_else' + driver = DriverVector(rd) + field = driver.get_field() + self.assertEqual(field.name, rd.name) + self.assertIn('another', field.variables) + + def test_get_source_metadata(self): + driver = self.get_driver() + meta = driver.get_source_metadata() + self.assertIsInstance(meta, dict) + self.assertTrue(len(meta) > 2) + + def test_open(self): + driver = self.get_driver() + sci = driver.open() + self.assertIsInstance(sci, ShpCabinetIterator) + self.assertFalse(sci.as_spatial_dimension) + + def test_inspect(self): + driver = self.get_driver() + driver.inspect() diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py index a1eeaddae..76b75c6e7 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py @@ -1,5 +1,4 @@ import os -import unittest from copy import deepcopy import netCDF4 as nc @@ -96,8 +95,6 @@ def test_get_wrap_action(self): self.assertIsNone(ret) def test_get_wrapped_state(self): - #todo: test only geometries - refv = WrappableCoordinateReferenceSystem refm = refv.get_wrapped_state diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py index 96bc449aa..6c4b3e49c 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py @@ -1293,6 +1293,13 @@ def test_assert_uniform_mask(self): sdim.assert_uniform_mask() sdim.geom.polygon.value.mask[2, 2] = False + sdim.grid.corners.mask[0, 2, 1, 3] = True + with self.assertRaises(AssertionError): + sdim.assert_uniform_mask() + sdim.grid.corners.mask[0, 2, 1, 3] = False + self.assertIsNotNone(sdim.grid.corners) + sdim.assert_uniform_mask() + def test_corners(self): for grid in self.iter_grid_combinations_for_corners(): try: diff --git a/src/ocgis/util/shp_cabinet.py b/src/ocgis/util/shp_cabinet.py index 248a2298e..561027359 100644 --- a/src/ocgis/util/shp_cabinet.py +++ b/src/ocgis/util/shp_cabinet.py @@ -1,18 +1,17 @@ from collections import OrderedDict import os -from ocgis import env +import csv +from copy import deepcopy + import ogr from shapely.geometry.multipolygon import MultiPolygon -import csv from osgeo.ogr import CreateGeometryFromWkb from shapely.geometry.polygon import Polygon from shapely import wkb import fiona -from ocgis.interface.base.crs import CoordinateReferenceSystem -from copy import deepcopy -from ocgis.interface.base.dimension.spatial import SpatialGeometryPolygonDimension, SpatialGeometryDimension, \ - SpatialDimension, SpatialGeometryPointDimension -import numpy as np + +from ocgis import env +from ocgis.interface.base.dimension.spatial import SpatialDimension class ShpCabinetIterator(object): From 9819d2183026ca06dba5531fbe05b00db953ed9d Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Thu, 18 Dec 2014 12:50:01 -0700 Subject: [PATCH 32/71] moved combinatorial simple tests to test_combinatorial --- .../test/test_real_data/test_combinatorial.py | 251 +++++++++++++++++- src/ocgis/test/test_simple/run_simple.py | 6 +- src/ocgis/test/test_simple/test_simple.py | 227 ---------------- 3 files changed, 254 insertions(+), 230 deletions(-) diff --git a/src/ocgis/test/test_real_data/test_combinatorial.py b/src/ocgis/test/test_real_data/test_combinatorial.py index a713744cb..020faf58b 100644 --- a/src/ocgis/test/test_real_data/test_combinatorial.py +++ b/src/ocgis/test/test_real_data/test_combinatorial.py @@ -1,8 +1,21 @@ +from copy import deepcopy +from csv import DictReader import os import shutil +import itertools +import fiona +from ocgis.api.request.base import RequestDataset, RequestDatasetCollection +import ocgis +from ocgis.exc import DefinitionValidationError, ExtentError +from ocgis.interface.base.crs import CFWGS84, CoordinateReferenceSystem +from ocgis.util.spatial.fiona_maker import FionaMaker +from shapely import wkt -from ocgis import OcgOperations +from ocgis import OcgOperations, env from ocgis.test.base import TestBase, attr +from ocgis.test.test_simple.make_test_data import SimpleNc, SimpleNcNoBounds, SimpleNcNoLevel +from ocgis.test.test_simple.test_simple import TestSimpleBase +import numpy as np class TestCombinatorial(TestBase): @@ -73,3 +86,239 @@ def test(self): folder = os.path.split(path)[0] shutil.rmtree(folder) log.debug('success') + + +class TestProjectionCombinations(TestSimpleBase): + base_value = np.array([[1.0, 1.0, 2.0, 2.0], + [1.0, 1.0, 2.0, 2.0], + [3.0, 3.0, 4.0, 4.0], + [3.0, 3.0, 4.0, 4.0]]) + nc_factory = SimpleNc + fn = 'test_simple_spatial_01.nc' + + @attr('slow') + def test_calc_sample_size(self): + rd1 = self.get_dataset() + rd1['alias'] = 'var1' + rd2 = self.get_dataset() + rd2['alias'] = 'var2' + + dataset = [ + # RequestDatasetCollection([rd1]), + RequestDatasetCollection([rd1,rd2]) + ] + calc_sample_size = [ + True, + # False + ] + calc = [ + [{'func':'mean','name':'mean'},{'func':'max','name':'max'}], + # [{'func':'ln','name':'ln'}], + # None, + # [{'func':'divide','name':'divide','kwds':{'arr1':'var1','arr2':'var2'}}] + ] + calc_grouping = [ + # None, + ['month'], + # ['month','year'] + ] + output_format = ['numpy'] + + for ii,tup in enumerate(itertools.product(dataset,calc_sample_size,calc,calc_grouping,output_format)): + kwds = dict(zip(['dataset','calc_sample_size','calc','calc_grouping','output_format'],tup)) + kwds['prefix'] = str(ii) + + try: + ops = OcgOperations(**kwds) + except DefinitionValidationError: + if kwds['calc'] is not None: + ## set functions require a temporal grouping otherwise the calculation + ## is meaningless + if kwds['calc'][0]['func'] == 'mean' and kwds['calc_grouping'] is None: + continue + ## multivariate functions may not implemented with sample size = True + elif kwds['calc_sample_size'] and kwds['calc'][0]['func'] == 'divide': + continue + ## multivariate functions require the correct number of variables + elif kwds['calc'][0]['func'] == 'divide' and len(kwds['dataset']) == 1: + continue + ## only one request dataset may be written to netCDF at this time + elif kwds['output_format'] == 'nc' and len(kwds['dataset']) == 2: + continue + else: + raise + ## only one request dataset may be written to netCDF at this time + elif kwds['output_format'] == 'nc' and len(ops.dataset) == 2: + continue + else: + raise + + ret = ops.execute() + + if kwds['output_format'] == 'nc': + if kwds['calc_sample_size'] and kwds['calc_grouping']: + if kwds['calc'] is not None and kwds['calc'][0]['func'] == 'mean': + with self.nc_scope(ret) as ds: + self.assertEqual(sum([v.startswith('n_') for v in ds.variables.keys()]),2) + self.assertEqual(ds.variables['n_max'][:].mean(),30.5) + + if kwds['output_format'] == 'csv': + if kwds['calc'] is not None and kwds['calc'][0]['func'] == 'mean': + with open(ret,'r') as f: + reader = DictReader(f) + alias_set = set([row['CALC_ALIAS'] for row in reader]) + if len(kwds['dataset']) == 1: + if kwds['calc_sample_size']: + self.assertEqual(alias_set,set(['max','n_max','n_mean','mean'])) + else: + self.assertEqual(alias_set,set(['max','mean'])) + else: + if kwds['calc_sample_size']: + self.assertEqual(alias_set,set(['max_var1','n_max_var1','n_mean_var1','mean_var1', + 'max_var2','n_max_var2','n_mean_var2','mean_var2'])) + else: + self.assertEqual(alias_set,set(['max_var1','mean_var1', + 'max_var2','mean_var2'])) + + @attr('slow') + def test_combinatorial_projection_with_geometries(self): + + # self.get_ret(kwds={'output_format':'shp','prefix':'as_polygon'}) + # self.get_ret(kwds={'output_format':'shp','prefix':'as_point','abstraction':'point'}) + + features = [ + {'NAME':'a','wkt':'POLYGON((-105.020430 40.073118,-105.810753 39.327957,-105.660215 38.831183,-104.907527 38.763441,-104.004301 38.816129,-103.643011 39.802151,-103.643011 39.802151,-103.643011 39.802151,-103.643011 39.802151,-103.959140 40.118280,-103.959140 40.118280,-103.959140 40.118280,-103.959140 40.118280,-104.327957 40.201075,-104.327957 40.201075,-105.020430 40.073118))'}, + {'NAME':'b','wkt':'POLYGON((-102.212903 39.004301,-102.905376 38.906452,-103.311828 37.694624,-103.326882 37.295699,-103.898925 37.220430,-103.846237 36.746237,-102.619355 37.107527,-102.634409 37.724731,-101.874194 37.882796,-102.212903 39.004301))'}, + {'NAME':'c','wkt':'POLYGON((-105.336559 37.175269,-104.945161 37.303226,-104.726882 37.175269,-104.696774 36.844086,-105.043011 36.693548,-105.283871 36.640860,-105.336559 37.175269))'}, + {'NAME':'d','wkt':'POLYGON((-102.318280 39.741935,-103.650538 39.779570,-103.620430 39.448387,-103.349462 39.433333,-103.078495 39.606452,-102.325806 39.613978,-102.325806 39.613978,-102.333333 39.741935,-102.318280 39.741935))'}, + ] + + for filename in ['polygon','point']: + if filename == 'point': + geometry = 'Point' + to_write = deepcopy(features) + for feature in to_write: + geom = wkt.loads(feature['wkt']) + feature['wkt'] = geom.centroid.wkt + else: + to_write = features + geometry = 'Polygon' + + path = os.path.join(self.current_dir_output,'ab_{0}.shp'.format(filename)) + with FionaMaker(path,geometry=geometry) as fm: + fm.write(to_write) + + no_bounds_nc = SimpleNcNoBounds() + no_bounds_nc.write() + no_bounds_uri = os.path.join(env.DIR_OUTPUT,no_bounds_nc.filename) + + no_level_nc = SimpleNcNoLevel() + no_level_nc.write() + no_level_uri = os.path.join(env.DIR_OUTPUT,no_level_nc.filename) + + ocgis.env.DIR_SHPCABINET = self.current_dir_output +# ocgis.env.DEBUG = True +# ocgis.env.VERBOSE = True + + aggregate = [ + False, + True + ] + spatial_operation = [ + 'intersects', + 'clip' + ] + epsg = [ + 2163, + 4326, + None + ] + output_format = [ + 'nc', + 'shp', + 'csv+' + ] + abstraction = [ + 'polygon', + 'point', + None + ] + dataset = [ + self.get_dataset(), + {'uri':no_bounds_uri,'variable':'foo'}, + {'uri':no_level_uri,'variable':'foo'} + ] + geom = [ + 'ab_polygon', + 'ab_point' + ] + calc = [ + None, + [{'func':'mean','name':'my_mean'}] + ] + calc_grouping = ['month'] + + args = (aggregate,spatial_operation,epsg,output_format,abstraction,geom,calc,dataset) + for ii,tup in enumerate(itertools.product(*args)): + a,s,e,o,ab,g,c,d = tup + + if os.path.split(d['uri'])[1] == 'test_simple_spatial_no_bounds_01.nc': + unbounded = True + else: + unbounded = False + + if o == 'nc' and e == 4326: + output_crs = CFWGS84() + else: + output_crs = CoordinateReferenceSystem(epsg=e) if e is not None else None + + kwds = dict(aggregate=a,spatial_operation=s,output_format=o,output_crs=output_crs, + geom=g,abstraction=ab,dataset=d,prefix=str(ii),calc=c, + calc_grouping=calc_grouping) + + try: + ops = OcgOperations(**kwds) + ret = ops.execute() + except DefinitionValidationError: + if o == 'nc': + if e not in [4326,None]: + continue + if s == 'clip': + continue + else: + raise + except ExtentError: + if unbounded or ab == 'point': + continue + else: + raise + + if o == 'shp': + ugid_path = os.path.join(self.current_dir_output,ops.prefix,ops.prefix+'_ugid.shp') + else: + ugid_path = os.path.join(self.current_dir_output,ops.prefix,'shp',ops.prefix+'_ugid.shp') + + if o != 'nc': + with fiona.open(ugid_path,'r') as f: + if e: + second = output_crs + else: + second = CoordinateReferenceSystem(epsg=4326) + self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']),second) + + if o == 'shp': + with fiona.open(ret,'r') as f: + if a and ab == 'point': + second = 'MultiPoint' + elif ab is None: + field = RequestDataset(uri=d['uri'],variable='foo').get() + second = field.spatial.geom.get_highest_order_abstraction()._geom_type + else: + second = ab.title() + + if second in ['Polygon','MultiPolygon']: + second = ['Polygon','MultiPolygon'] + elif second in ['Point','MultiPoint']: + second = ['Point','MultiPoint'] + + self.assertTrue(f.meta['schema']['geometry'] in second) diff --git a/src/ocgis/test/test_simple/run_simple.py b/src/ocgis/test/test_simple/run_simple.py index a97436b83..97a585b95 100644 --- a/src/ocgis/test/test_simple/run_simple.py +++ b/src/ocgis/test/test_simple/run_simple.py @@ -16,8 +16,11 @@ def main(): simple_suite.run(result) + print + print 'Ran {0} tests.'.format(result.testsRun) + print + if len(result.errors) > 0: - print print '#########################################################' print 'There are {0} errors. See below for tracebacks:'.format(len(result.errors)) print '#########################################################' @@ -29,7 +32,6 @@ def main(): print 'There are {0} errors. See above for tracebacks.'.format(len(result.errors)) print '#########################################################' else: - print print 'All tests passed.' print diff --git a/src/ocgis/test/test_simple/test_simple.py b/src/ocgis/test/test_simple/test_simple.py index 01c431900..b13dae016 100644 --- a/src/ocgis/test/test_simple/test_simple.py +++ b/src/ocgis/test/test_simple/test_simple.py @@ -640,90 +640,6 @@ def test_calc_eval_multivariate(self): if of == 'nc': with nc_scope(ret) as ds: self.assertEqual(ds.variables['foo3'][:].mean(), 9.0) - - @attr('slow') - def test_calc_sample_size(self): - rd1 = self.get_dataset() - rd1['alias'] = 'var1' - rd2 = self.get_dataset() - rd2['alias'] = 'var2' - - dataset = [ - # RequestDatasetCollection([rd1]), - RequestDatasetCollection([rd1,rd2]) - ] - calc_sample_size = [ - True, - # False - ] - calc = [ - [{'func':'mean','name':'mean'},{'func':'max','name':'max'}], - # [{'func':'ln','name':'ln'}], - # None, - # [{'func':'divide','name':'divide','kwds':{'arr1':'var1','arr2':'var2'}}] - ] - calc_grouping = [ - # None, - ['month'], - # ['month','year'] - ] - output_format = ['numpy'] - - for ii,tup in enumerate(itertools.product(dataset,calc_sample_size,calc,calc_grouping,output_format)): - kwds = dict(zip(['dataset','calc_sample_size','calc','calc_grouping','output_format'],tup)) - kwds['prefix'] = str(ii) - - try: - ops = OcgOperations(**kwds) - except DefinitionValidationError: - if kwds['calc'] is not None: - ## set functions require a temporal grouping otherwise the calculation - ## is meaningless - if kwds['calc'][0]['func'] == 'mean' and kwds['calc_grouping'] is None: - continue - ## multivariate functions may not implemented with sample size = True - elif kwds['calc_sample_size'] and kwds['calc'][0]['func'] == 'divide': - continue - ## multivariate functions require the correct number of variables - elif kwds['calc'][0]['func'] == 'divide' and len(kwds['dataset']) == 1: - continue - ## only one request dataset may be written to netCDF at this time - elif kwds['output_format'] == 'nc' and len(kwds['dataset']) == 2: - continue - else: - raise - ## only one request dataset may be written to netCDF at this time - elif kwds['output_format'] == 'nc' and len(ops.dataset) == 2: - continue - else: - raise - - ret = ops.execute() - - if kwds['output_format'] == 'nc': - if kwds['calc_sample_size'] and kwds['calc_grouping']: - if kwds['calc'] is not None and kwds['calc'][0]['func'] == 'mean': - with nc_scope(ret) as ds: - self.assertEqual(sum([v.startswith('n_') for v in ds.variables.keys()]),2) - self.assertEqual(ds.variables['n_max'][:].mean(),30.5) - - if kwds['output_format'] == 'csv': - if kwds['calc'] is not None and kwds['calc'][0]['func'] == 'mean': - with open(ret,'r') as f: - reader = DictReader(f) - alias_set = set([row['CALC_ALIAS'] for row in reader]) - if len(kwds['dataset']) == 1: - if kwds['calc_sample_size']: - self.assertEqual(alias_set,set(['max','n_max','n_mean','mean'])) - else: - self.assertEqual(alias_set,set(['max','mean'])) - else: - if kwds['calc_sample_size']: - self.assertEqual(alias_set,set(['max_var1','n_max_var1','n_mean_var1','mean_var1', - 'max_var2','n_max_var2','n_mean_var2','mean_var2'])) - else: - self.assertEqual(alias_set,set(['max_var1','mean_var1', - 'max_var2','mean_var2'])) def test_nc_conversion(self): rd = self.get_dataset() @@ -832,149 +748,6 @@ def test_limiting_headers(self): with self.assertRaises(DefinitionValidationError): OcgOperations(dataset=self.get_dataset(),headers=['foo'],output_format='csv') - - @attr('slow') - def test_combinatorial_projection_with_geometries(self): - - # self.get_ret(kwds={'output_format':'shp','prefix':'as_polygon'}) - # self.get_ret(kwds={'output_format':'shp','prefix':'as_point','abstraction':'point'}) - - features = [ - {'NAME':'a','wkt':'POLYGON((-105.020430 40.073118,-105.810753 39.327957,-105.660215 38.831183,-104.907527 38.763441,-104.004301 38.816129,-103.643011 39.802151,-103.643011 39.802151,-103.643011 39.802151,-103.643011 39.802151,-103.959140 40.118280,-103.959140 40.118280,-103.959140 40.118280,-103.959140 40.118280,-104.327957 40.201075,-104.327957 40.201075,-105.020430 40.073118))'}, - {'NAME':'b','wkt':'POLYGON((-102.212903 39.004301,-102.905376 38.906452,-103.311828 37.694624,-103.326882 37.295699,-103.898925 37.220430,-103.846237 36.746237,-102.619355 37.107527,-102.634409 37.724731,-101.874194 37.882796,-102.212903 39.004301))'}, - {'NAME':'c','wkt':'POLYGON((-105.336559 37.175269,-104.945161 37.303226,-104.726882 37.175269,-104.696774 36.844086,-105.043011 36.693548,-105.283871 36.640860,-105.336559 37.175269))'}, - {'NAME':'d','wkt':'POLYGON((-102.318280 39.741935,-103.650538 39.779570,-103.620430 39.448387,-103.349462 39.433333,-103.078495 39.606452,-102.325806 39.613978,-102.325806 39.613978,-102.333333 39.741935,-102.318280 39.741935))'}, - ] - - for filename in ['polygon','point']: - if filename == 'point': - geometry = 'Point' - to_write = deepcopy(features) - for feature in to_write: - geom = wkt.loads(feature['wkt']) - feature['wkt'] = geom.centroid.wkt - else: - to_write = features - geometry = 'Polygon' - - path = os.path.join(self.current_dir_output,'ab_{0}.shp'.format(filename)) - with FionaMaker(path,geometry=geometry) as fm: - fm.write(to_write) - - no_bounds_nc = SimpleNcNoBounds() - no_bounds_nc.write() - no_bounds_uri = os.path.join(env.DIR_OUTPUT,no_bounds_nc.filename) - - no_level_nc = SimpleNcNoLevel() - no_level_nc.write() - no_level_uri = os.path.join(env.DIR_OUTPUT,no_level_nc.filename) - - ocgis.env.DIR_SHPCABINET = self.current_dir_output -# ocgis.env.DEBUG = True -# ocgis.env.VERBOSE = True - - aggregate = [ - False, - True - ] - spatial_operation = [ - 'intersects', - 'clip' - ] - epsg = [ - 2163, - 4326, - None - ] - output_format = [ - 'nc', - 'shp', - 'csv+' - ] - abstraction = [ - 'polygon', - 'point', - None - ] - dataset = [ - self.get_dataset(), - {'uri':no_bounds_uri,'variable':'foo'}, - {'uri':no_level_uri,'variable':'foo'} - ] - geom = [ - 'ab_polygon', - 'ab_point' - ] - calc = [ - None, - [{'func':'mean','name':'my_mean'}] - ] - calc_grouping = ['month'] - - args = (aggregate,spatial_operation,epsg,output_format,abstraction,geom,calc,dataset) - for ii,tup in enumerate(itertools.product(*args)): - a,s,e,o,ab,g,c,d = tup - - if os.path.split(d['uri'])[1] == 'test_simple_spatial_no_bounds_01.nc': - unbounded = True - else: - unbounded = False - - if o == 'nc' and e == 4326: - output_crs = CFWGS84() - else: - output_crs = CoordinateReferenceSystem(epsg=e) if e is not None else None - - kwds = dict(aggregate=a,spatial_operation=s,output_format=o,output_crs=output_crs, - geom=g,abstraction=ab,dataset=d,prefix=str(ii),calc=c, - calc_grouping=calc_grouping) - - try: - ops = OcgOperations(**kwds) - ret = ops.execute() - except DefinitionValidationError: - if o == 'nc': - if e not in [4326,None]: - continue - if s == 'clip': - continue - else: - raise - except ExtentError: - if unbounded or ab == 'point': - continue - else: - raise - - if o == 'shp': - ugid_path = os.path.join(self.current_dir_output,ops.prefix,ops.prefix+'_ugid.shp') - else: - ugid_path = os.path.join(self.current_dir_output,ops.prefix,'shp',ops.prefix+'_ugid.shp') - - if o != 'nc': - with fiona.open(ugid_path,'r') as f: - if e: - second = output_crs - else: - second = CoordinateReferenceSystem(epsg=4326) - self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']),second) - - if o == 'shp': - with fiona.open(ret,'r') as f: - if a and ab == 'point': - second = 'MultiPoint' - elif ab is None: - field = RequestDataset(uri=d['uri'],variable='foo').get() - second = field.spatial.geom.get_highest_order_abstraction()._geom_type - else: - second = ab.title() - - if second in ['Polygon','MultiPolygon']: - second = ['Polygon','MultiPolygon'] - elif second in ['Point','MultiPoint']: - second = ['Point','MultiPoint'] - - self.assertTrue(f.meta['schema']['geometry'] in second) def test_empty_dataset_for_operations(self): with self.assertRaises(DefinitionValidationError): From e396180754585bba17252d08c4a5ddb7f22457c7 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 19 Dec 2014 10:24:42 -0700 Subject: [PATCH 33/71] added "add_parents" keyword argument to AbstractFunction This keyword argument is set to False by default to not maintain parent variables following a calculation. For operations, the value is still set to True to enable iteration to the various output formats. --- src/ocgis/calc/base.py | 15 +++-- src/ocgis/calc/engine.py | 13 ++-- src/ocgis/interface/base/variable.py | 37 +++++++----- .../test_ocgis/test_api/test_collection.py | 59 +++++++++++-------- .../test/test_ocgis/test_calc/test_base.py | 14 ++++- .../test_calc/test_eval_function.py | 43 +++++++------- .../test_interface/test_base/test_variable.py | 18 +++++- 7 files changed, 126 insertions(+), 73 deletions(-) diff --git a/src/ocgis/calc/base.py b/src/ocgis/calc/base.py index 271ad2c01..c75ee9035 100644 --- a/src/ocgis/calc/base.py +++ b/src/ocgis/calc/base.py @@ -1,13 +1,15 @@ from collections import OrderedDict from copy import deepcopy -import numpy as np import abc import itertools +import logging + +import numpy as np + from ocgis.interface.base.variable import DerivedVariable, VariableCollection from ocgis.util.helpers import get_default_or_apply from ocgis.util.logging_ocgis import ocgis_lh from ocgis import constants -import logging from ocgis.exc import SampleSizeNotImplemented, DefinitionValidationError, UnitsValidationError from ocgis.util.units import get_are_units_equal_by_string_or_cfunits @@ -50,6 +52,7 @@ class AbstractFunction(object): >>> meta_attrs = {'standard_name': 'the_real', 'long_name': 'The Real Long Name', 'note_count': 55} :type meta_attrs: dict + :param bool add_parents: If ``True``, maintain parent variables following a calculation. """ __metaclass__ = abc.ABCMeta @@ -87,7 +90,7 @@ def standard_name(self): _empty_fill = {'fill': None, 'sample_size': None} def __init__(self, alias=None, dtype=None, field=None, file_only=False, vc=None, parms=None, tgd=None, - use_raw_values=False, calc_sample_size=False, fill_value=None, meta_attrs=None): + use_raw_values=False, calc_sample_size=False, fill_value=None, meta_attrs=None, add_parents=False): self.alias = alias or self.key self.dtype = dtype or self.dtype self.fill_value = fill_value @@ -99,6 +102,7 @@ def __init__(self, alias=None, dtype=None, field=None, file_only=False, vc=None, self.use_raw_values = use_raw_values self.calc_sample_size = calc_sample_size self.meta_attrs = deepcopy(meta_attrs) + self.add_parents = add_parents def aggregate_spatial(self, values, weights): """ @@ -286,7 +290,10 @@ def _add_to_collection_(self, units=None, value=None, parent_variables=None, ali attrs['standard_name'] = self.standard_name attrs['long_name'] = self.long_name - parents = VariableCollection(variables=parent_variables) + if self.add_parents: + parents = VariableCollection(variables=parent_variables) + else: + parents = None # if the operation is file only, creating a variable with an empty value will raise an exception. pass a dummy # data source because even if the value is trying to be loaded it should not be accessible! diff --git a/src/ocgis/calc/engine.py b/src/ocgis/calc/engine.py index 6b5a21258..e01e9e124 100644 --- a/src/ocgis/calc/engine.py +++ b/src/ocgis/calc/engine.py @@ -1,11 +1,13 @@ -from copy import deepcopy +import logging + +import numpy as np + from ocgis.util.logging_ocgis import ocgis_lh + from ocgis.interface.base.variable import VariableCollection from ocgis.interface.base.field import DerivedMultivariateField, DerivedField from ocgis.calc.base import AbstractMultivariateFunction -import logging from ocgis.calc.eval_function import EvalFunction -import numpy as np class OcgCalculationEngine(object): @@ -127,12 +129,13 @@ def execute(self,coll,file_only=False,tgds=None): ## initialize the function function = f['ref'](alias=f['name'],dtype=dtype,field=field,file_only=file_only,vc=out_vc, parms=f['kwds'],tgd=new_temporal,use_raw_values=self.use_raw_values, - calc_sample_size=self.calc_sample_size,meta_attrs=f.get('meta_attrs')) + calc_sample_size=self.calc_sample_size,meta_attrs=f.get('meta_attrs'), add_parents=True) except KeyError: ## likely an eval function which does not have the name ## key function = EvalFunction(field=field,file_only=file_only,vc=out_vc, - expr=self.funcs[0]['func'],meta_attrs=self.funcs[0].get('meta_attrs')) + expr=self.funcs[0]['func'],meta_attrs=self.funcs[0].get('meta_attrs'), + add_parents=True) ocgis_lh('calculation initialized',logger='calc.engine',level=logging.DEBUG) diff --git a/src/ocgis/interface/base/variable.py b/src/ocgis/interface/base/variable.py index 0f7fcaf06..cb107d14c 100644 --- a/src/ocgis/interface/base/variable.py +++ b/src/ocgis/interface/base/variable.py @@ -1,13 +1,12 @@ +import abc +from copy import copy, deepcopy + +import numpy as np + from ocgis.api.collection import AbstractCollection from ocgis.interface.base.attributes import Attributes -from ocgis.util.logging_ocgis import ocgis_lh -import abc -from collections import OrderedDict from ocgis.util.helpers import get_iter -import numpy as np -from ocgis import constants from ocgis.exc import NoUnitsError, VariableInCollectionError -from copy import copy, deepcopy class AbstractValueVariable(object): @@ -301,12 +300,22 @@ def get_sliced_variables(self, slc): variables = [v.__getitem__(slc) for v in self.itervalues()] ret = VariableCollection(variables=variables) return ret - - + + class DerivedVariable(Variable): - - def __init__(self,**kwds): - self.fdef = kwds.pop('fdef') - self.parents = kwds.pop('parents') - - super(DerivedVariable,self).__init__(**kwds) + """ + Variable class for derived variables. + + :param dict fdef: The function definition dictionary. + + >>> fdef = {'name': 'mean', 'func': 'mean'} + + :param parents: The parent variables used to derive the current variable. + :type parents: :class:`ocgis.interface.base.variable.VariableCollection` + """ + + def __init__(self, **kwargs): + self.fdef = kwargs.pop('fdef') + self.parents = kwargs.pop('parents', None) + + super(DerivedVariable, self).__init__(**kwargs) diff --git a/src/ocgis/test/test_ocgis/test_api/test_collection.py b/src/ocgis/test/test_ocgis/test_api/test_collection.py index 682f29279..ceda7c259 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_collection.py +++ b/src/ocgis/test/test_ocgis/test_api/test_collection.py @@ -1,5 +1,4 @@ import os -import datetime from copy import copy, deepcopy import fiona @@ -7,6 +6,7 @@ from shapely.geometry.multipolygon import MultiPolygon import numpy as np +import datetime from ocgis.api.collection import SpatialCollection, AbstractCollection from ocgis.interface.base.crs import CoordinateReferenceSystem, Spherical from ocgis.test.base import TestBase @@ -127,16 +127,15 @@ def test_init(self): self.assertEqual(sp[25]['tmax'].variables['tmax'].value.shape,(2, 31, 2, 3, 4)) def test_calculation_iteration(self): - field = self.get_field(with_value=True,month_count=2) - field.variables.add_variable(Variable(value=field.variables['tmax'].value+5, - name='tmin',alias='tmin')) + field = self.get_field(with_value=True, month_count=2) + field.variables.add_variable(Variable(value=field.variables['tmax'].value + 5, name='tmin', alias='tmin')) field.temporal.name_uid = 'tid' field.level.name_uid = 'lid' field.spatial.geom.name_uid = 'gid' grouping = ['month'] tgd = field.temporal.get_grouping(grouping) - mu = Mean(field=field,tgd=tgd,alias='my_mean',dtype=np.float64) + mu = Mean(field=field, tgd=tgd, alias='my_mean', dtype=np.float64, add_parents=True) ret = mu.execute() kwds = copy(field.__dict__) @@ -155,29 +154,33 @@ def test_calculation_iteration(self): sc = ShpCabinet() meta = sc.get_meta('state_boundaries') - sp = SpatialCollection(meta=meta,key='state_boundaries',headers=constants.calc_headers) + sp = SpatialCollection(meta=meta, key='state_boundaries', headers=constants.calc_headers) for row in sc.iter_geoms('state_boundaries'): - sp.add_field(row['properties']['UGID'],row['geom'],cfield,properties=row['properties']) - for ii,row in enumerate(sp.get_iter_dict()): + sp.add_field(row['properties']['UGID'], row['geom'], cfield, properties=row['properties']) + for ii, row in enumerate(sp.get_iter_dict()): if ii == 0: - self.assertEqual(row[0].bounds,(-100.5, 39.5, -99.5, 40.5)) - self.assertDictEqual(row[1],{'lid': 1, 'ugid': 1, 'vid': 1, 'cid': 1, 'did': 1, 'year': 2000, 'time': datetime.datetime(2000, 1, 16, 0, 0), 'calc_alias': 'my_mean_tmax', 'value': 0.44808476666433006, 'month': 1, 'alias': 'tmax', 'variable': 'tmax', 'gid': 1, 'calc_key': 'mean', 'tid': 1, 'level': 50, 'day': 16}) - self.assertEqual(len(row),2) - self.assertEqual(len(row[1]),len(constants.calc_headers)) + self.assertEqual(row[0].bounds, (-100.5, 39.5, -99.5, 40.5)) + self.assertDictEqual(row[1], {'lid': 1, 'ugid': 1, 'vid': 1, 'cid': 1, 'did': 1, 'year': 2000, + 'time': datetime.datetime(2000, 1, 16, 0, 0), + 'calc_alias': 'my_mean_tmax', 'value': 0.44808476666433006, 'month': 1, + 'alias': 'tmax', 'variable': 'tmax', 'gid': 1, 'calc_key': 'mean', + 'tid': 1, 'level': 50, 'day': 16}) + self.assertEqual(len(row), 2) + self.assertEqual(len(row[1]), len(constants.calc_headers)) def test_calculation_iteration_two_calculations(self): - field = self.get_field(with_value=True,month_count=2) - field.variables.add_variable(Variable(value=field.variables['tmax'].value+5, - name='tmin',alias='tmin')) + field = self.get_field(with_value=True, month_count=2) + field.variables.add_variable(Variable(value=field.variables['tmax'].value + 5, name='tmin', alias='tmin')) field.temporal.name_uid = 'tid' field.level.name_uid = 'lid' field.spatial.geom.name_uid = 'gid' grouping = ['month'] tgd = field.temporal.get_grouping(grouping) - mu = Mean(field=field,tgd=tgd,alias='my_mean',dtype=np.float64) + mu = Mean(field=field, tgd=tgd, alias='my_mean', dtype=np.float64, add_parents=True) ret = mu.execute() - thresh = Threshold(field=field,vc=ret,tgd=tgd,alias='a_treshold',parms={'operation':'gte','threshold':0.5}) + thresh = Threshold(field=field, vc=ret, tgd=tgd, alias='a_treshold', add_parents=True, + parms={'operation': 'gte', 'threshold': 0.5}) ret = thresh.execute() kwds = copy(field.__dict__) @@ -196,20 +199,24 @@ def test_calculation_iteration_two_calculations(self): sc = ShpCabinet() meta = sc.get_meta('state_boundaries') - sp = SpatialCollection(meta=meta,key='state_boundaries',headers=constants.calc_headers) + sp = SpatialCollection(meta=meta, key='state_boundaries', headers=constants.calc_headers) for row in sc.iter_geoms('state_boundaries'): - sp.add_field(row['properties']['UGID'],row['geom'],cfield,properties=row['properties']) + sp.add_field(row['properties']['UGID'], row['geom'], cfield, properties=row['properties']) cids = set() - for ii,row in enumerate(sp.get_iter_dict()): + for ii, row in enumerate(sp.get_iter_dict()): cids.update([row[1]['cid']]) if ii == 0: - self.assertEqual(row[0].bounds,(-100.5, 39.5, -99.5, 40.5)) - self.assertDictEqual(row[1],{'lid': 1, 'ugid': 1, 'vid': 1, 'cid': 1, 'did': 1, 'year': 2000, 'time': datetime.datetime(2000, 1, 16, 0, 0), 'calc_alias': 'my_mean_tmax', 'value': 0.44808476666433006, 'month': 1, 'alias': 'tmax', 'variable': 'tmax', 'gid': 1, 'calc_key': 'mean', 'tid': 1, 'level': 50, 'day': 16}) - self.assertEqual(len(row),2) - self.assertEqual(len(row[1]),len(constants.calc_headers)) - self.assertEqual(ii+1,2*2*2*3*4*51*4) - self.assertEqual(len(cids),4) + self.assertEqual(row[0].bounds, (-100.5, 39.5, -99.5, 40.5)) + self.assertDictEqual(row[1], {'lid': 1, 'ugid': 1, 'vid': 1, 'cid': 1, 'did': 1, 'year': 2000, + 'time': datetime.datetime(2000, 1, 16, 0, 0), + 'calc_alias': 'my_mean_tmax', 'value': 0.44808476666433006, 'month': 1, + 'alias': 'tmax', 'variable': 'tmax', 'gid': 1, 'calc_key': 'mean', + 'tid': 1, 'level': 50, 'day': 16}) + self.assertEqual(len(row), 2) + self.assertEqual(len(row[1]), len(constants.calc_headers)) + self.assertEqual(ii + 1, 2 * 2 * 2 * 3 * 4 * 51 * 4) + self.assertEqual(len(cids), 4) def test_get_iter_melted(self): sp = self.get_collection() diff --git a/src/ocgis/test/test_ocgis/test_calc/test_base.py b/src/ocgis/test/test_ocgis/test_calc/test_base.py index 17bcdfb7f..5f2e89be4 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_base.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_base.py @@ -4,7 +4,6 @@ import numpy as np from ocgis.interface.base.variable import VariableCollection, DerivedVariable - from ocgis.test.base import TestBase from ocgis.test.test_ocgis.test_interface.test_base.test_field import AbstractTestField from ocgis.calc.base import AbstractUnivariateFunction, AbstractUnivariateSetFunction, AbstractFunction, \ @@ -65,6 +64,19 @@ def test_add_to_collection(self): 'long_name': constants.default_sample_size_long_name} self.assertDictEqual(ss.attrs, attrs) + def test_add_to_collection_parents(self): + """Test adding parents to the output derived variable.""" + + field = self.get_field(with_value=True) + ff = FooNeedsUnits(field=field) + res = ff.execute() + self.assertIsNone(res['fnu'].parents) + + ff = FooNeedsUnits(field=field, add_parents=True) + res = ff.execute() + var = res['fnu'] + self.assertIsInstance(var.parents, VariableCollection) + def test_execute_meta_attrs(self): """Test overloaded metadata attributes are appropriately applied.""" diff --git a/src/ocgis/test/test_ocgis/test_calc/test_eval_function.py b/src/ocgis/test/test_ocgis/test_calc/test_eval_function.py index 556152a02..df53e02e4 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_eval_function.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_eval_function.py @@ -1,6 +1,7 @@ +import numpy as np + from ocgis.test.base import TestBase from ocgis.calc.eval_function import EvalFunction -import numpy as np class TestEvalFunction(TestBase): @@ -78,41 +79,41 @@ def test_get_eval_string_bad_string(self): expr = 'es=6.1078*exp(foo(17.08085)*(tas-273.16)/(234.175+(tas-273.16)))' with self.assertRaises(ValueError): EvalFunction._get_eval_string_(expr,{'tas':'var.value'}) - + def test_calculation_one_variable_exp_only(self): rd = self.test_data.get_rd('cancm4_tas') field = rd.get() - field = field[:,0:10,:,:,:] + field = field[:, 0:10, :, :, :] expr = 'es=6.1078*exp(17.08085*(tas-273.16)/(234.175+(tas-273.16)))' - ef = EvalFunction(expr=expr,field=field) + ef = EvalFunction(expr=expr, field=field, add_parents=True) ret = ef.execute() - self.assertEqual(ret.keys(),['es']) - self.assertEqual(ret['es'].units,None) - self.assertEqual(ret['es'].alias,'es') - self.assertEqual(ret['es'].name,'es') - self.assertEqual(ret['es'].parents.keys(),['tas']) - + self.assertEqual(ret.keys(), ['es']) + self.assertEqual(ret['es'].units, None) + self.assertEqual(ret['es'].alias, 'es') + self.assertEqual(ret['es'].name, 'es') + self.assertEqual(ret['es'].parents.keys(), ['tas']) + var = field.variables['tas'] - actual_value = 6.1078*np.exp(17.08085*(var.value-273.16)/(234.175+(var.value-273.16))) - self.assertNumpyAll(ret['es'].value,actual_value) - + actual_value = 6.1078 * np.exp(17.08085 * (var.value - 273.16) / (234.175 + (var.value - 273.16))) + self.assertNumpyAll(ret['es'].value, actual_value) + def test_calculation_two_variables_exp_only(self): rd = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('cancm4_tasmax_2001') field = rd.get() field2 = rd2.get() - field.variables.add_variable(field2.variables['tasmax'],assign_new_uid=True) - field = field[:,0:10,:,:,:] + field.variables.add_variable(field2.variables['tasmax'], assign_new_uid=True) + field = field[:, 0:10, :, :, :] expr = 'foo=log(1000*(tasmax-tas))/3' - ef = EvalFunction(expr=expr,field=field) + ef = EvalFunction(expr=expr, field=field, add_parents=True) ret = ef.execute() - self.assertEqual(ret.keys(),['foo']) - self.assertEqual(set(ret['foo'].parents.keys()),set(['tas','tasmax'])) - + self.assertEqual(ret.keys(), ['foo']) + self.assertEqual(set(ret['foo'].parents.keys()), set(['tas', 'tasmax'])) + tas = field.variables['tas'] tasmax = field.variables['tasmax'] - actual_value = np.log(1000*(tasmax.value-tas.value))/3 - self.assertNumpyAll(ret['foo'].value,actual_value) + actual_value = np.log(1000 * (tasmax.value - tas.value)) / 3 + self.assertNumpyAll(ret['foo'].value, actual_value) def test_calculation_one_variable_exp_and_log(self): rd = self.test_data.get_rd('cancm4_tas') diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py index b4cfb8c96..a5a8dba84 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py @@ -1,11 +1,14 @@ from collections import OrderedDict + from numpy.ma import MaskedArray from cfunits import Units +import numpy as np + from ocgis.exc import VariableInCollectionError, NoUnitsError from ocgis.interface.base.attributes import Attributes from ocgis.test.base import TestBase -from ocgis.interface.base.variable import Variable, VariableCollection, AbstractSourcedVariable, AbstractValueVariable -import numpy as np +from ocgis.interface.base.variable import Variable, VariableCollection, AbstractSourcedVariable, AbstractValueVariable, \ + DerivedVariable from ocgis.util.helpers import get_iter from ocgis.util.itester import itr_products_keywords @@ -65,6 +68,17 @@ def test_init(self): self.assertEqual(av.value, k.value) +class TestDerivedVariable(TestBase): + + def test_init(self): + self.assertEqual(DerivedVariable.__bases__, (Variable,)) + + fdef = [{'func': 'mean', 'name': 'mean'}] + dv = DerivedVariable(fdef=fdef) + self.assertEqual(dv.fdef, fdef) + self.assertIsNone(dv.parents) + + class TestVariable(TestBase): def test_init(self): From 4a4f25ffadf7893994785a66dc3e989c1ddf0186 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 22 Dec 2014 11:26:10 -0700 Subject: [PATCH 34/71] add percentile icclim functions #318 Added a number of new ICCLIM functions. --- src/ocgis/api/parms/definition.py | 3 + .../index/dynamic_kernel_percentile.py | 4 +- src/ocgis/calc/library/register.py | 3 +- src/ocgis/calc/library/statistics.py | 227 +++++++++++++- src/ocgis/contrib/library_icclim.py | 286 +++++++++++++++++- .../test_api/test_parms/test_definition.py | 4 + .../test_calc/test_library/test_statistics.py | 50 ++- .../test_contrib/test_library_icclim.py | 173 ++++++++++- 8 files changed, 726 insertions(+), 24 deletions(-) diff --git a/src/ocgis/api/parms/definition.py b/src/ocgis/api/parms/definition.py index e9f27f1b8..dfb4c3879 100644 --- a/src/ocgis/api/parms/definition.py +++ b/src/ocgis/api/parms/definition.py @@ -127,6 +127,9 @@ def __str__(self): cb = deepcopy(self.value) for ii in cb: ii.pop('ref') + for k, v in ii['kwds'].iteritems(): + if type(v) not in [str, unicode, float, int, basestring]: + ii['kwds'][k] = type(v) ret = '{0}={1}'.format(self.name, cb) return ret diff --git a/src/ocgis/calc/library/index/dynamic_kernel_percentile.py b/src/ocgis/calc/library/index/dynamic_kernel_percentile.py index 9e21adb5e..e2f5f81c7 100644 --- a/src/ocgis/calc/library/index/dynamic_kernel_percentile.py +++ b/src/ocgis/calc/library/index/dynamic_kernel_percentile.py @@ -11,7 +11,7 @@ class DynamicDailyKernelPercentileThreshold(AbstractUnivariateSetFunction, Abstr parms_definition = {'operation': str, 'percentile': float, 'daily_percentile': None, 'width': int} dtype = constants.np_int description = 'Implementation of moving window percentile threshold calculations similar to ECA indices: http://eca.knmi.nl/documents/atbd.pdf' - standard_name = 'dynamice_kernel_percentile' + standard_name = 'dynamic_kernel_percentile' long_name = 'Dynamic Kernel Percentile' def __init__(self, *args, **kwargs): @@ -75,7 +75,7 @@ def calculate(self, values, operation=None, percentile=None, daily_percentile=No def get_daily_percentile(arr, dt_arr, percentile, window_width, only_leap_years=False): """ Creates a dictionary with keys=calendar day (month,day) and values=numpy.ndarray (2D) - Example - to get the 2D percentile array corresponding to the 15th Mai: percentile_dict[5,15] + Example - to get the 2D percentile array corresponding to the 15th May: percentile_dict[5,15] :param arr: array of values :type arr: :class:`numpy.ndarray` (3D) of float diff --git a/src/ocgis/calc/library/register.py b/src/ocgis/calc/library/register.py index 5d8159b11..fb18d8a13 100644 --- a/src/ocgis/calc/library/register.py +++ b/src/ocgis/calc/library/register.py @@ -14,7 +14,8 @@ def __init__(self): self.reg += [math.Divide,math.NaturalLogarithm,math.Sum] self.reg += [statistics.FrequencyPercentile,statistics.Mean,statistics.StandardDeviation, - statistics.Max,statistics.Median,statistics.Min, Convolve1D,statistics.MovingWindow] + statistics.Max,statistics.Median,statistics.Min, Convolve1D,statistics.MovingWindow, + statistics.DailyPercentile] self.reg += [thresholds.Between,thresholds.Threshold] self.reg += [dynamic_kernel_percentile.DynamicDailyKernelPercentileThreshold, heat_index.HeatIndex,duration.Duration,duration.FrequencyDuration] diff --git a/src/ocgis/calc/library/statistics.py b/src/ocgis/calc/library/statistics.py index 460f57add..960905c9a 100644 --- a/src/ocgis/calc/library/statistics.py +++ b/src/ocgis/calc/library/statistics.py @@ -1,10 +1,13 @@ +import calendar +from collections import OrderedDict, defaultdict import itertools + import numpy as np + +from datetime import datetime from ocgis.calc import base from ocgis import constants from ocgis.calc.base import AbstractUnivariateFunction, AbstractParameterizedFunction -from ocgis.calc.library.math import Convolve1D -from ocgis.util.helpers import iter_array class MovingWindow(AbstractUnivariateFunction, AbstractParameterizedFunction): @@ -123,6 +126,226 @@ def _iter_kernel_values_(values, k, mode='valid'): raise NotImplementedError(mode) +class DailyPercentile(base.AbstractUnivariateFunction, base.AbstractParameterizedFunction): + key = 'daily_perc' + parms_definition = {'percentile': float, 'window_width': int, 'only_leap_years': bool} + description = '' + dtype = constants.np_float + standard_name = 'daily_percentile' + long_name = 'Daily Percentile' + + def __init__(self, *args, **kwargs): + super(DailyPercentile, self).__init__(*args, **kwargs) + + if self.file_only: + self.tgd = self.field.temporal.get_grouping(['month', 'day']) + self.field.temporal = self.tgd + + def calculate(self, values, percentile=None, window_width=None, only_leap_years=False): + assert(values.shape[0] == 1) + assert(values.shape[2] == 1) + # assert(self.tgd is not None) + # dtype = [('month', int), ('day', int), ('value', object)] + arr = values[0, :, 0, :, :] + assert(arr.ndim == 3) + dt_arr = self.field.temporal.value_datetime + dp = self.get_daily_percentile(arr, dt_arr, percentile, window_width, only_leap_years=only_leap_years) + shape_fill = list(values.shape) + shape_fill[1] = len(dp) + fill = np.zeros(shape_fill, dtype=self.dtype) + fill = np.ma.array(fill, mask=False) + tgd = self.field.temporal.get_grouping(['month', 'day']) + month_day_map = {(dt.month, dt.day): ii for ii, dt in enumerate(tgd.value_datetime)} + for key, value in dp.iteritems(): + fill[0, month_day_map[key], 0, :, :] = value + self.field.temporal = tgd + for idx in range(fill.shape[1]): + fill.mask[0, idx, 0, :, :] = values.mask[0, 0, 0, :, :] + return fill + + @staticmethod + def get_daily_percentile_from_request_dataset(rd, alias=None): + ret = {} + alias = alias or rd.alias + field = rd.get() + dt = field.temporal.value_datetime + value = field.variables[alias].value + for idx in range(len(dt)): + curr = dt[idx] + ret[(curr.month, curr.day)] = value[0, idx, 0, :, :] + return ret + + def get_daily_percentile(self, arr, dt_arr, percentile, window_width, only_leap_years=False): + """ + Creates a dictionary with keys=calendar day (month,day) and values=numpy.ndarray (2D) + Example - to get the 2D percentile array corresponding to the 15th May: percentile_dict[5,15] + + :param arr: array of values + :type arr: :class:`numpy.ndarray` (3D) of float + :param dt_arr: Corresponding time steps vector (base period: usually 1961-1990). + :type dt_arr: :class:`numpy.ndarray` (1D) of :class:`datetime.datetime` objects + :param percentile: Percentile to compute which must be between 0 and 100 inclusive. + :type percentile: int + :param window_width: Window width - must be odd. + :type window_width: int + :param only_leap_years: Option for February 29th. If ``True``, use only leap years when computing the basis. + :type only_leap_years: bool + :rtype: dict + """ + + # we reduce the number of dimensions + if arr.ndim == 5: + arr = arr[0, :, 0, :, :] + elif arr.ndim == 3: + pass + else: + raise NotImplementedError(arr.ndim) + dt_arr = dt_arr.squeeze() + + # step1: creation of the dictionary with all calendar days: + dic_caldays = self.get_dict_caldays(dt_arr) + + percentile_dict = OrderedDict() + + dt_hour = dt_arr[0].hour # (we get hour of a date only one time, because usually the hour is the same for all dates in input dt_arr) + + for month in dic_caldays.keys(): + for day in dic_caldays[month]: + + # step2: we do a mask for the datetime vector for current calendar day (day/month) + dt_arr_mask = self.get_mask_dt_arr(dt_arr, month, day, dt_hour, window_width, only_leap_years) + + # step3: we are looking for the indices of non-masked dates (i.e. where dt_arr_mask==False) + indices_non_masked = np.where(dt_arr_mask==False)[0] + + # step4: we subset our arr + arr_subset = arr[indices_non_masked, :, :] + + # step5: we compute the percentile for current arr_subset + ############## WARNING: type(arr_subset) = numpy.ndarray. Numpy.percentile does not work with masked arrays, + ############## so if arr_subset has aberrant values like 999999 or 1e+20, the result will be wrong. + ############## Check with numpy.nanpercentile (Numpy version 1.9) !!! + arr_percentille_current_calday = np.percentile(arr_subset, percentile, axis=0) + + # step6: we add to the dictionnary... + percentile_dict[month,day] = arr_percentille_current_calday + + # print 'Creating percentile dictionary: month ', month, '---> OK' + + # print 'Percentile dictionary is created.' + + return percentile_dict + + @staticmethod + def get_dict_caldays(dt_arr): + """ + Create a dictionary of calendar days, where keys=months, values=days. + + :param dt_arr: time steps vector + :type dt_arr: :class:`numpy.core.multiarray.ndarray` (1D) of :class:`datetime.datetime` objects + :rtype: dict + """ + + dic = defaultdict(list) + + for dt in dt_arr: + dic[dt.month].append(dt.day) + + for key in dic.keys(): + dic[key] = list(set(dic[key])) + + return dic + + @staticmethod + def get_masked(current_date, month, day, hour, window_width, only_leap_years): + """ + Returns ``True`` if ``current_date`` is not in the window centered on the given calendar day (month-day). Returns + ``False``, if it enters in the window. + + :param current_date: The date to check for inclusion in a given window. + :type current_date: :class:`datetime.datetime` + :param month: Month of the corresponding calendar day. + :type month: int + :param day: Day of the corresponding calendar day. + :type day: int + :param hour: Hour of the current day. + :type hour: int + :param window_width: Window width - must be odd. + :type window_width: int + :param only_leap_years: Option for February 29th. If ``True``, use only date from other leap years when constructing + the comparison basis. + :type only_leap_years: bool + :rtype: bool (if ``True``, the date will be masked) + """ + + yyyy = current_date.year + + if (day==29 and month==02): + if calendar.isleap(yyyy): + dt1 = datetime(yyyy,month,day,hour) + diff = abs(current_date-dt1).days + toReturn = diff > window_width/2 + else: + if only_leap_years: + toReturn=True + else: + dt1 = datetime(yyyy,02,28,hour) + diff = (current_date-dt1).days + toReturn = (diff < (-(window_width/2) + 1)) or (diff > window_width/2) + else: + d1 = datetime(yyyy,month,day, hour) + + # In the case the current date is in December and calendar day (day-month) is at the beginning of year. + # For example we are looking for dates around January 2nd, and the current date is 31 Dec 1999, + # we will compare it with 02 Jan 2000 (1999 + 1) + d2 = datetime(yyyy+1,month,day, hour) + + # In the case the current date is in January and calendar day (day-month) is at the end of year. + # For example we are looking for dates around December 31st, and the current date is 02 Jan 2003, + # we will compare it with 01 Jan 2002 (2003 - 1) + d3 = datetime(yyyy-1,month,day, hour) + + diff=min(abs(current_date-d1).days,abs(current_date-d2).days,abs(current_date-d3).days) + toReturn = diff > window_width/2 + + return toReturn + + def get_mask_dt_arr(self, dt_arr, month, day, dt_hour, window_width, only_leap_years): + """ + Creates a binary mask for a datetime vector for a given calendar day (month-day). + + :param dt_arr: Time steps vector. + :type dt_arr: :class:`numpy.ndarray` (1D) of :class:`datetime.datetime` objects + :param month: Month of a calendar day. + :type month: int + :param day: Day of a calendar day. + :type day: int + :param window_width: Window width - must be odd. + :type window_width: int + :param only_leap_years: Option for February 29th. If ``True``, use only leap years when constructing the basis. + :type only_leap_years: bool + :rtype: :class:`numpy.ndarray` (1D) + """ + + mask = np.array([self.get_masked(dt, month, day, dt_hour, window_width, only_leap_years) for dt in dt_arr]) + return mask + + @staticmethod + def get_year_list(dt_arr): + """ + Just to get a list of all years conteining in time steps vector (dt_arr). + """ + + year_list = [] + for dt in dt_arr: + year_list.append(dt.year) + + year_list = list(set(year_list)) + + return year_list + + + class FrequencyPercentile(base.AbstractUnivariateSetFunction,base.AbstractParameterizedFunction): key = 'freq_perc' parms_definition = {'percentile':float} diff --git a/src/ocgis/contrib/library_icclim.py b/src/ocgis/contrib/library_icclim.py index 803913c92..91536c4e7 100644 --- a/src/ocgis/contrib/library_icclim.py +++ b/src/ocgis/contrib/library_icclim.py @@ -1,15 +1,16 @@ -from ocgis.interface.base.attributes import Attributes -from ocgis.calc.base import AbstractUnivariateSetFunction,\ - AbstractMultivariateFunction -from ocgis import constants -from icclim import calc_indice -from icclim import set_longname_units as slu -from icclim import set_globattr import abc -import numpy as np from collections import OrderedDict from copy import deepcopy import json + +from icclim.percentile_dict import get_percentile_dict +from icclim import calc_indice, calc_indice_perc +from icclim import set_longname_units as slu +from icclim import set_globattr +import numpy as np + +from ocgis.calc.base import AbstractUnivariateSetFunction, AbstractMultivariateFunction, AbstractParameterizedFunction +from ocgis import constants from ocgis.exc import DefinitionValidationError from ocgis.api.parms.definition import CalcGrouping @@ -46,6 +47,24 @@ 'icclim_SD5cm':{'func':calc_indice.SD5cm_calculation,'meta':slu.SD5cm_setvarattr}, 'icclim_SD50cm':{'func':calc_indice.SD50cm_calculation,'meta':slu.SD50cm_setvarattr}, 'icclim_CDD':{'func':calc_indice.CDD_calculation,'meta':slu.CDD_setvarattr}, + 'icclim_TG10p':{'func':calc_indice_perc.TG10p_calculation,'meta':slu.TG10p_setvarattr}, + 'icclim_TX10p':{'func':calc_indice_perc.TX10p_calculation,'meta':slu.TX10p_setvarattr}, + 'icclim_TN10p':{'func':calc_indice_perc.TN10p_calculation,'meta':slu.TN10p_setvarattr}, + 'icclim_TG90p':{'func':calc_indice_perc.TG90p_calculation,'meta':slu.TG90p_setvarattr}, + 'icclim_TX90p':{'func':calc_indice_perc.TX90p_calculation,'meta':slu.TX90p_setvarattr}, + 'icclim_TN90p':{'func':calc_indice_perc.TN90p_calculation,'meta':slu.TN90p_setvarattr}, + 'icclim_WSDI':{'func':calc_indice_perc.WSDI_calculation,'meta':slu.WSDI_setvarattr}, + 'icclim_CSDI':{'func':calc_indice_perc.CSDI_calculation,'meta':slu.CSDI_setvarattr}, + 'icclim_R75p':{'func':calc_indice_perc.R75p_calculation,'meta':slu.R75p_setvarattr}, + 'icclim_R75TOT':{'func':calc_indice_perc.R75TOT_calculation,'meta':slu.R75TOT_setvarattr}, + 'icclim_R95p':{'func':calc_indice_perc.R95p_calculation,'meta':slu.R95p_setvarattr}, + 'icclim_R95TOT':{'func':calc_indice_perc.R95TOT_calculation,'meta':slu.R95TOT_setvarattr}, + 'icclim_R99p':{'func':calc_indice_perc.R99p_calculation,'meta':slu.R99p_setvarattr}, + 'icclim_R99TOT':{'func':calc_indice_perc.R99TOT_calculation,'meta':slu.R99TOT_setvarattr}, + 'icclim_CD': {'func': calc_indice_perc.CD_calculation, 'meta': slu.CD_setvarattr}, + 'icclim_CW': {'func': calc_indice_perc.CW_calculation, 'meta': slu.CW_setvarattr}, + 'icclim_WD': {'func': calc_indice_perc.WD_calculation, 'meta': slu.WD_setvarattr}, + 'icclim_WW': {'func': calc_indice_perc.WW_calculation, 'meta': slu.WW_setvarattr}, } @@ -151,12 +170,15 @@ class AbstractIcclimUnivariateSetFunction(AbstractIcclimFunction,AbstractUnivari __metaclass__ = abc.ABCMeta def calculate(self,values): - return(_icclim_function_map[self.key]['func'](values,values.fill_value)) + return self._get_icclim_func_()(values, values.fill_value) @classmethod def validate(cls,ops): - cls.validate_icclim(cls,ops) - super(AbstractIcclimUnivariateSetFunction,cls).validate(ops) + cls.validate_icclim(cls, ops) + super(AbstractIcclimUnivariateSetFunction, cls).validate(ops) + + def _get_icclim_func_(self): + return _icclim_function_map[self.key]['func'] class AbstractIcclimMultivariateFunction(AbstractIcclimFunction,AbstractMultivariateFunction): @@ -166,8 +188,56 @@ class AbstractIcclimMultivariateFunction(AbstractIcclimFunction,AbstractMultivar def validate(cls,ops): cls.validate_icclim(cls,ops) super(AbstractIcclimMultivariateFunction,cls).validate(ops) - - + + +class AbstractIcclimPercentileIndice(AbstractIcclimUnivariateSetFunction, AbstractParameterizedFunction): + __metaclass__ = abc.ABCMeta + parms_definition = {'percentile_dict': dict} + window_width = 5 + only_leap_years = False + + def __init__(self, *args, **kwargs): + self._storage_percentile_dict = {} + AbstractIcclimUnivariateSetFunction.__init__(self, *args, **kwargs) + + if self.field is not None: + assert(self.field.shape[0] == 1) + assert(self.field.shape[2] == 1) + + @abc.abstractproperty + def percentile(self): + """ + The percentile value to use for computing the percentile basis. Value is between 0 and 100. + + :type: int + """ + pass + + def calculate(self, values, percentile_dict=None): + + # if the percentile dictionary is not provided compute it + if percentile_dict is None: + try: + percentile_dict = self._storage_percentile_dict[self._curr_variable.alias] + except KeyError: + variable = self.field.variables[self._curr_variable.alias] + value = variable.value[0, :, 0, :, :] + assert(value.ndim == 3) + percentile_dict = get_percentile_dict(value, self.field.temporal.value_datetime, self.percentile, + self.window_width, only_leap_years=self.only_leap_years) + self._storage_percentile_dict[self._curr_variable.alias] = percentile_dict + + dt_arr = self.field.temporal.value_datetime[self._curr_group] + ret = _icclim_function_map[self.key]['func'](values, dt_arr, percentile_dict, fill_val=values.fill_value) + return ret + + @staticmethod + def get_percentile_dict(*args, **kwargs): + """See :func:`icclim.percentile_dict.get_percentile_dict` documentation.""" + + return get_percentile_dict(*args, **kwargs) + + class IcclimTG(AbstractIcclimUnivariateSetFunction): dtype = constants.np_float key = 'icclim_TG' @@ -321,3 +391,193 @@ class IcclimSD50(IcclimCSU): class IcclimCDD(IcclimCSU): dtype = constants.np_float key = 'icclim_CDD' + + +class IcclimTG10p(AbstractIcclimPercentileIndice): + key = 'icclim_TG10p' + dtype = constants.np_float + percentile = 10 + + +class IcclimTX10p(AbstractIcclimPercentileIndice): + key = 'icclim_TX10p' + dtype = constants.np_float + percentile = 10 + + +class IcclimTN10p(AbstractIcclimPercentileIndice): + key = 'icclim_TN10p' + dtype = constants.np_float + percentile = 10 + + +class IcclimTG90p(AbstractIcclimPercentileIndice): + key = 'icclim_TG90p' + dtype = constants.np_float + percentile = 90 + + +class IcclimTX90p(AbstractIcclimPercentileIndice): + key = 'icclim_TX90p' + dtype = constants.np_float + percentile = 10 + + +class IcclimTN90p(AbstractIcclimPercentileIndice): + key = 'icclim_TN90p' + dtype = constants.np_float + percentile = 10 + + +class IcclimWSDI(AbstractIcclimPercentileIndice): + key = 'icclim_WSDI' + dtype = constants.np_float + percentile = 90 + + +class IcclimCSDI(AbstractIcclimPercentileIndice): + key = 'icclim_CSDI' + dtype = constants.np_float + percentile = 10 + + +class IcclimR75p(AbstractIcclimPercentileIndice): + key = 'icclim_R75p' + dtype = constants.np_float + percentile = 75 + + +class IcclimR75TOT(AbstractIcclimPercentileIndice): + key = 'icclim_R75TOT' + dtype = constants.np_float + percentile = 75 + + +class IcclimR95p(AbstractIcclimPercentileIndice): + key = 'icclim_R95p' + dtype = constants.np_float + percentile = 95 + + +class IcclimR95TOT(AbstractIcclimPercentileIndice): + key = 'icclim_R95TOT' + dtype = constants.np_float + percentile = 95 + + +class IcclimR99p(AbstractIcclimPercentileIndice): + key = 'icclim_R99p' + dtype = constants.np_float + percentile = 99 + + +class IcclimR99TOT(AbstractIcclimPercentileIndice): + key = 'icclim_R99TOT' + dtype = constants.np_float + percentile = 99 + + +class IcclimCD(AbstractIcclimMultivariateFunction, AbstractParameterizedFunction): + key = 'icclim_CD' + dtype = constants.np_float + required_variables = ['tas', 'pr'] + time_aggregation_external = False + parms_definition = {'tas_25th_percentile_dict': dict, 'pr_25th_percentile_dict': dict} + window_width = 5 + percentile_tas = 25 + percentile_pr = 25 + + def __init__(self, *args, **kwargs): + self._storage_percentile_dict = {} + super(IcclimCD, self).__init__(*args, **kwargs) + + def calculate(self, tas=None, pr=None, tas_25th_percentile_dict=None, pr_25th_percentile_dict=None): + """ + See documentation for :func:`icclim.calc_indice_perc.CD_calculation`. + """ + + return self._calculate_(tas=tas, pr=pr, tas_percentile_dict=tas_25th_percentile_dict, + pr_percentile_dict=pr_25th_percentile_dict) + + def _calculate_(self, tas=None, pr=None, tas_percentile_dict=None, pr_percentile_dict=None): + """ + Allows subclasses to overload parameter definitions for `calculate`. + """ + + assert(tas.ndim == 3) + assert(pr.ndim == 3) + + try: + dt_arr = self.field.temporal.value_datetime[self._curr_group] + except AttributeError: + if not hasattr(self, '_curr_group'): + dt_arr = self.field.temporal.value_datetime + else: + raise + + if tas_percentile_dict is None: + try: + tas_percentile_dict = self._storage_percentile_dict['tas'] + pr_percentile_dict = self._storage_percentile_dict['pr'] + except KeyError: + dt_arr_perc = self.field.temporal.value_datetime + alias_tas = self.parms['tas'] + alias_pr = self.parms['pr'] + t_arr_perc = self.field.variables[alias_tas].value.squeeze() + p_arr_perc = self.field.variables[alias_pr].value.squeeze() + tas_percentile_dict = get_percentile_dict(t_arr_perc, dt_arr_perc, self.percentile_tas, self.window_width) + pr_percentile_dict = get_percentile_dict(p_arr_perc, dt_arr_perc, self.percentile_pr, self.window_width) + self._storage_percentile_dict['tas'] = tas_percentile_dict + self._storage_percentile_dict['pr'] = pr_percentile_dict + + ret = _icclim_function_map[self.key]['func'](tas, tas_percentile_dict, pr, pr_percentile_dict, dt_arr, + fill_val=tas.fill_value) + # convert output to a masked array + ret_mask = ret == tas.fill_value + ret = np.ma.array(ret, mask=ret_mask, fill_value=tas.fill_value) + return ret + + +class IcclimCW(IcclimCD): + key = 'icclim_CW' + parms_definition = {'tas_25th_percentile_dict': dict, 'pr_75th_percentile_dict': dict} + percentile_tas = 25 + percentile_pr = 75 + + def calculate(self, tas=None, pr=None, tas_25th_percentile_dict=None, pr_75th_percentile_dict=None): + """ + See documentation for :func:`icclim.calc_indice_perc.CW_calculation`. + """ + + return self._calculate_(tas=tas, pr=pr, tas_percentile_dict=tas_25th_percentile_dict, + pr_percentile_dict=pr_75th_percentile_dict) + + +class IcclimWD(IcclimCD): + key = 'icclim_WD' + parms_definition = {'tas_75th_percentile_dict': dict, 'pr_25th_percentile_dict': dict} + percentile_tas = 75 + percentile_pr = 25 + + def calculate(self, tas=None, pr=None, tas_75th_percentile_dict=None, pr_25th_percentile_dict=None): + """ + See documentation for :func:`icclim.calc_indice_perc.WD_calculation`. + """ + + return self._calculate_(tas=tas, pr=pr, tas_percentile_dict=tas_75th_percentile_dict, + pr_percentile_dict=pr_25th_percentile_dict) + + +class IcclimWW(IcclimCD): + key = 'icclim_WW' + parms_definition = {'tas_75th_percentile_dict': dict, 'pr_75th_percentile_dict': dict} + percentile_tas = 75 + percentile_pr = 75 + + def calculate(self, tas=None, pr=None, tas_75th_percentile_dict=None, pr_75th_percentile_dict=None): + """ + See documentation for :func:`icclim.calc_indice_perc.WW_calculation`. + """ + + return self._calculate_(tas=tas, pr=pr, tas_percentile_dict=tas_75th_percentile_dict, + pr_percentile_dict=pr_75th_percentile_dict) diff --git a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py index 74a8d2a07..9a14b20cf 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py +++ b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py @@ -224,6 +224,10 @@ def test_str(self): cc = Calc(None) self.assertEqual(str(cc), 'calc=None') + calc = [{'func': 'mean', 'name': 'my_mean', 'kwds': {'a': np.zeros(1000)}}] + cc = Calc(calc) + self.assertEqual(str(cc), "calc=[{'meta_attrs': None, 'name': 'my_mean', 'func': 'mean', 'kwds': OrderedDict([('a', )])}]") + def test_get_meta(self): for poss in Calc._possible: cc = Calc(poss) diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py index e4da7512a..8e5a94743 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_statistics.py @@ -4,12 +4,60 @@ import numpy as np from cfunits.cfunits import Units from ocgis.api.parms.definition import Calc -from ocgis.calc.library.statistics import Mean, FrequencyPercentile, MovingWindow +from ocgis.calc.library.statistics import Mean, FrequencyPercentile, MovingWindow, DailyPercentile from ocgis.interface.base.variable import DerivedVariable, Variable from ocgis.test.base import nc_scope from ocgis.test.test_ocgis.test_interface.test_base.test_field import AbstractTestField import ocgis from ocgis.util.itester import itr_products_keywords +from ocgis.util.large_array import compute + + +class TestDailyPercentile(AbstractTestField): + + def test_calculate(self): + field = self.get_field(with_value=True, month_count=2) + field = field[0, :, 0, :, :] + # field = self.test_data.get_rd('cancm4_tas').get() + tgd = field.temporal.get_grouping(['month', 'day']) + parms = {'percentile': 90, 'window_width': 5} + dp = DailyPercentile(field=field, tgd=tgd, parms=parms) + vc = dp.execute() + self.assertAlmostEqual(vc['daily_perc'].value.mean(), 0.76756388346354165) + + def test_operations(self): + rd = self.test_data.get_rd('cancm4_tas') + kwds = {'percentile': 90, 'window_width': 5} + calc = [{'func': 'daily_perc', 'name': 'dp', 'kwds': kwds}] + for output_format in ['numpy', 'nc']: + ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[23], calc=calc, + output_format=output_format, time_region={'year': [2002, 2003]}) + ret = ops.execute() + if output_format == 'numpy': + self.assertEqual(ret[23]['tas'].variables['dp'].value.mask.sum(), 730) + + def test_compute(self): + rd = self.test_data.get_rd('cancm4_tas') + kwds = {'percentile': 90, 'window_width': 5} + calc = [{'func': 'daily_perc', 'name': 'dp', 'kwds': kwds}] + ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[23], calc=calc, + output_format='nc', time_region={'year': [2002, 2003]}) + ret = compute(ops, 2, verbose=False) + rd = ocgis.RequestDataset(uri=ret) + self.assertEqual(rd.get().shape, (1, 365, 1, 4, 3)) + + def test_get_daily_percentile_from_request_dataset(self): + rd = self.test_data.get_rd('cancm4_tas') + kwds = {'percentile': 90, 'window_width': 5} + calc = [{'func': 'daily_perc', 'name': 'dp', 'kwds': kwds}] + ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[23], calc=calc, + output_format='nc', time_region={'year': [2002, 2003]}) + ret = ops.execute() + new_rd = ocgis.RequestDataset(ret) + for alias in [None, 'dp']: + dp = DailyPercentile.get_daily_percentile_from_request_dataset(new_rd, alias=alias) + self.assertEqual(len(dp.keys()), 365) + self.assertAlmostEqual(dp[(4, 15)].mean(), 280.73696289062502) class TestMovingWindow(AbstractTestField): diff --git a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py index 9672b5537..372093152 100644 --- a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py +++ b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py @@ -1,14 +1,24 @@ +from netCDF4 import date2num import unittest import json from collections import OrderedDict from copy import deepcopy +# noinspection PyUnresolvedReferences +from datetime import datetime +from numpy.ma import MaskedArray import numpy as np +from ocgis.calc.base import AbstractParameterizedFunction +from ocgis.interface.base.variable import Variable, VariableCollection +from ocgis.interface.base.field import Field +from ocgis.interface.base.dimension.spatial import SpatialGridDimension, SpatialDimension +from ocgis.interface.base.dimension.base import VectorDimension +from ocgis.interface.nc.temporal import NcTemporalDimension from ocgis.test.base import TestBase, nc_scope, attr -from ocgis.contrib.library_icclim import IcclimTG, IcclimSU, AbstractIcclimFunction,\ - IcclimDTR, IcclimETR, IcclimTN, IcclimTX,\ - AbstractIcclimUnivariateSetFunction, AbstractIcclimMultivariateFunction +from ocgis.contrib.library_icclim import IcclimTG, IcclimSU, AbstractIcclimFunction, IcclimDTR, IcclimETR, IcclimTN, \ + IcclimTX, AbstractIcclimUnivariateSetFunction, AbstractIcclimMultivariateFunction, IcclimTG10p, \ + AbstractIcclimPercentileIndice, IcclimCD from ocgis.calc.library.statistics import Mean from ocgis.api.parms.definition import Calc, CalcGrouping from ocgis.calc.library.register import FunctionRegistry, register_icclim @@ -18,6 +28,7 @@ import ocgis from ocgis.util.helpers import itersubclasses from ocgis.contrib import library_icclim +from ocgis.util.large_array import compute class TestLibraryIcclim(TestBase): @@ -26,10 +37,15 @@ def test_standard_AbstractIcclimFunction(self): shapes = ([('month',), 12],[('month', 'year'), 24],[('year',),2]) ocgis.env.OVERWRITE = True keys = set(library_icclim._icclim_function_map.keys()) + ignore = [AbstractIcclimPercentileIndice] for klass in [ AbstractIcclimUnivariateSetFunction, AbstractIcclimMultivariateFunction]: for subclass in itersubclasses(klass): + + if any([subclass == i for i in ignore]): + continue + keys.remove(subclass.key) self.assertEqual([('month',),('month','year'),('year',)],subclass._allowed_temporal_groupings) for cg in CalcGrouping.iter_possible(): @@ -44,7 +60,11 @@ def test_standard_AbstractIcclimFunction(self): rd = [tasmin,tasmax] for r in rd: r.time_region = {'year':[2001,2002]} - calc[0].update({'kwds':{'tasmin':'tasmin','tasmax':'tasmax'}}) + if subclass == IcclimCD or issubclass(subclass, IcclimCD): + kwds = {'tas': 'tasmax', 'pr': 'tasmin'} + else: + kwds = {'tasmin': 'tasmin', 'tasmax': 'tasmax'} + calc[0].update({'kwds': kwds}) try: ops = ocgis.OcgOperations(dataset=rd, output_format='nc', @@ -84,7 +104,150 @@ def test_bad_icclim_key_to_operations(self): value = [{'func':'icclim_TG_bad','name':'TG'}] with self.assertRaises(DefinitionValidationError): Calc(value) - + + +class TestCD(TestBase): + + def get_field_tdim(self): + np.random.seed(1) + start = datetime(2000, 1, 1) + end = datetime(2001, 12, 31) + time_series = self.get_time_series(start, end) + row = VectorDimension(value=[1, 2]) + col = VectorDimension(value=[3, 4, 5]) + grid = SpatialGridDimension(row=row, col=col) + sdim = SpatialDimension(grid=grid) + calendar = 'standard' + units = 'days since 1500-01-01' + time_series = date2num(time_series, units, calendar) + tdim = NcTemporalDimension(value=time_series, calendar=calendar, units=units) + field = Field(temporal=tdim, spatial=sdim, variables=VariableCollection()) + var_tas = Variable(name='tas', value=np.random.rand(*field.shape)) + value_pr = np.random.lognormal(0.0, 0.5, field.shape) + var_pr = Variable(name='pr', value=value_pr) + field.variables.add_variable(var_tas) + field.variables.add_variable(var_pr) + return field, tdim + + def test_init(self): + self.assertEqual(IcclimCD.__bases__, (AbstractIcclimMultivariateFunction, AbstractParameterizedFunction)) + + icd = IcclimCD() + self.assertEqual(icd._storage_percentile_dict, {}) + + def test_calculate(self): + field, tdim = self.get_field_tdim() + tas = field.variables['tas'].value.squeeze() + pr = field.variables['pr'].value.squeeze() + tgd = tdim.get_grouping(['month']) + icd = IcclimCD(field=field, tgd=tgd, parms={'tas': 'tas', 'pr': 'pr'}) + res = icd.calculate(tas=tas, pr=pr) + self.assertEqual(res.shape, tuple(field.shape[-2:])) + self.assertIsInstance(res, MaskedArray) + self.assertSetEqual(set(icd._storage_percentile_dict.keys()), set(['tas', 'pr'])) + icd.calculate(tas=tas, pr=pr) + self.assertSetEqual(set(icd._storage_percentile_dict.keys()), set(['tas', 'pr'])) + + def test_execute(self): + field, tdim = self.get_field_tdim() + field.meta['dataset'] = {} + tgd = tdim.get_grouping(['month']) + icd = IcclimCD(field=field, tgd=tgd, parms={'tas': 'tas', 'pr': 'pr'}) + res = icd.execute() + self.assertIsInstance(res, VariableCollection) + var = res[IcclimCD.key] + self.assertEqual(var.units, 'days') + self.assertEqual(var.name, IcclimCD.key) + self.assertEqual(var.shape, (1, 12, 1, 2, 3)) + + def test_operations(self): + calc_grouping = ['month'] + + calc = [{'func': 'icclim_CD', 'name': 'CD'}] + rd = self.test_data.get_rd('cancm4_tas') + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping) + + # test overloaded parameters make their way into the function + rd2 = self.test_data.get_rd('cancm4_tas') + rd2.alias = 'pr' + calc = [{'func': 'icclim_CD', 'name': 'CD', + 'kwds': {'tas': 'tas', 'pr': 'pr', 'tas_25th_percentile_dict': {}, 'pr_25th_percentile_dict': {}}}] + ops = OcgOperations(dataset=[rd, rd2], calc=calc, calc_grouping=calc_grouping, + slice=[None, None, None, [40, 43], [40, 43]]) + try: + ops.execute() + except KeyError as e: + # the empty dictionary contains no keys + self.assertEqual(e.message, (1, 1)) + + # test with different aliases for the multivariate variables + rd.alias = 'tas1' + rd2.alias = 'pr1' + calc = [{'func': 'icclim_CD', 'name': 'CD', 'kwds': {'tas': 'tas1', 'pr': 'pr1'}}] + ops = OcgOperations(dataset=[rd, rd2], calc=calc, calc_grouping=calc_grouping, + slice=[None, [0, 366], None, [40, 43], [40, 43]]) + ops.execute() + + +class TestTG10p(TestBase): + + def test_init(self): + tg = IcclimTG10p() + + def test_execute(self): + for pd in [False, True]: + + if pd: + percentile_dict = {} + parms = {'percentile_dict': percentile_dict} + else: + parms = None + + tas = self.test_data.get_rd('cancm4_tas').get() + tas = tas[:, :, :, 10:12, 20:22] + tgd = tas.temporal.get_grouping(['month']) + tg = IcclimTG10p(field=tas, tgd=tgd, parms=parms) + try: + ret = tg.execute() + except KeyError: + self.assertTrue(pd) + continue + self.assertEqual(ret['icclim_TG10p'].shape, (1, 12, 1, 2, 2)) + self.assertEqual(ret['icclim_TG10p'].value.mean(), 30.0625) + + def test_large_array_compute_local(self): + """Test tiling works for percentile-based indice on a local dataset.""" + + calc = [{'func': 'icclim_TG10p', 'name': 'itg'}] + calc_grouping = ['month'] + rd = self.test_data.get_rd('cancm4_tas') + ops = ocgis.OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping, output_format='nc', geom='state_boundaries', + select_ugid=[24]) + ret = compute(ops, 5, verbose=False) + + with nc_scope(ret) as ds: + try: + self.assertAlmostEqual(ds.variables['itg'][:].mean(), np.float32(29.518518)) + except Exception as e: + import ipdb;ipdb.set_trace() + pass + + @attr('remote') + def test_large_array_compute_remote(self): + """Test tiling works for percentile-based indice getting data from a remote URL.""" + + calc = [{'func': 'icclim_TG10p', 'name': 'itg'}] + calc_grouping = ['month'] + uri = 'http://opendap.knmi.nl/knmi/thredds/dodsC/IS-ENES/TESTSETS/tasmax_day_EC-EARTH_rcp26_r8i1p1_20760101-21001231.nc' + rd = ocgis.RequestDataset(uri=uri, variable='tasmax') + ops = ocgis.OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping, output_format='nc', geom='state_boundaries', + select_ugid=[24]) + ret = compute(ops, 5, verbose=False) + + with nc_scope(ret) as ds: + self.assertAlmostEqual(ds.variables['itg'][:].mean(), 78.113095238095241) + class TestDTR(TestBase): From dd7208bf311b8e4e73aa2a3502d566b9a7f87ee0 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Tue, 23 Dec 2014 17:55:04 -0700 Subject: [PATCH 35/71] add documentation for ShpProcess #248 Added new function for adding a unique attribute to a shapefile. Documentation was updated to reflect this should be used in place of ShpProcess which was unnecessarily complicated. The ShpProcess object is still used in the new function. closes #248 --- doc/api.rst | 4 +- doc/utility.rst | 8 +- src/ocgis/api/operations.py | 10 +- src/ocgis/constants.py | 3 + .../test/test_ocgis/test_util/test_helpers.py | 56 ++- .../test_ocgis/test_util/test_shp_process.py | 23 +- src/ocgis/util/helpers.py | 456 ++++++------------ src/ocgis/util/shp_process.py | 150 +++--- 8 files changed, 289 insertions(+), 421 deletions(-) diff --git a/doc/api.rst b/doc/api.rst index 4683ed9ca..fc2b2f6a0 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -86,9 +86,9 @@ geom .. warning:: Subsetting with multiple geometries to netCDF will result in :ref:`agg_selection` being set to ``True``. Indexing multiple geometries using netCDF-CF convention is currently not possible. -If a geometry(s) is provided, it is used to subset `every` :class:`ocgis.RequestDataset` object. Again, supplying a value of `None` (the default) results in the return of the entire spatial domain. Any shapefiles used for subsetting must include a unique integer attribute called `UGID` and have a WGS84 latitude/longitude geographic coordinate system. +If a geometry(s) is provided, it is used to subset `every` :class:`ocgis.RequestDataset` object. Supplying a value of ``None`` (the default) results in the return of the entire spatial domain. Any shapefiles used for subsetting must include a unique integer attribute matching the value of :attr:`ocgis.constants.ocgis_unique_geometry_identifier` and have a WGS84 latitude/longitude geographic coordinate system. If an ESRI Shapefile is being accessed and the file does not contain the unique identifier, the function :func:`~ocgis.util.helpers.add_shapefile_unique_identifier` may be used to add the appropriate unique identifier attribute. -There are a number of ways to parameterize the `geom` keyword argument: +There are a number of ways to parameterize the ``geom`` keyword argument: 1. Bounding Box diff --git a/doc/utility.rst b/doc/utility.rst index b3fd6c94a..aea600229 100644 --- a/doc/utility.rst +++ b/doc/utility.rst @@ -1,12 +1,12 @@ -================= -Utility Functions -================= +======================== +Utility/Helper Functions +======================== .. automodule:: ocgis :members: format_return .. automodule:: ocgis.util.helpers - :members: get_bounds_from_1d, get_sorted_uris_by_time_dimension + :members: add_shapefile_unique_identifier, get_bounds_from_1d, get_sorted_uris_by_time_dimension .. automodule:: ocgis.util.large_array :members: compute diff --git a/src/ocgis/api/operations.py b/src/ocgis/api/operations.py index 9c8412467..85964f629 100644 --- a/src/ocgis/api/operations.py +++ b/src/ocgis/api/operations.py @@ -33,22 +33,22 @@ class OcgOperations(object): sequence of :class:`~ocgis.RequestDataset`/:class:`~ocgis.Field` objects :param spatial_operation: The geometric operation to be performed. :type spatial_operation: str - :param geom: The selection geometry(s) used for the spatial subset. If `None`, selection defaults to entire spatial + :param geom: The selection geometry(s) used for the spatial subset. If ``None``, selection defaults to entire spatial domain. :type geom: list of dict, list of float, str - :param aggregate: If `True`, dataset geometries are aggregated to coincident + :param aggregate: If ``True``, dataset geometries are aggregated to coincident selection geometries. :type aggregate: bool :param calc: Calculations to be performed on the dataset subset. :type calc: list of dictionaries or string-based function :param calc_grouping: Temporal grouping to apply for calculations. :type calc_grouping: list of str or int - :param calc_raw: If `True`, perform calculations on the "raw" data regardless of `aggregation` flag. + :param calc_raw: If ``True``, perform calculations on the "raw" data regardless of ``aggregation`` flag. :type calc_raw: bool :param abstraction: The geometric abstraction to use for the dataset geometries. If `None` (the default), use the highest order geometry available. :type abstraction: str - :param snippet: If `True`, return a data "snippet" composed of the first time point, first level (if applicable), + :param snippet: If ``True``, return a data "snippet" composed of the first time point, first level (if applicable), and the entire spatial domain. :type snippet: bool :param backend: The processing backend to use. @@ -57,7 +57,7 @@ class OcgOperations(object): :type prefix: str :param output_format: The desired output format. :type output_format: str - :param agg_selection: If `True`, the selection geometry will be aggregated prior to any spatial operations. + :param agg_selection: If ``True``, the selection geometry will be aggregated prior to any spatial operations. :type agg_selection: bool :param select_ugid: The unique identifiers of specific geometries contained in canned geometry datasets. These unique identifiers will be selected and used for spatial operations. diff --git a/src/ocgis/constants.py b/src/ocgis/constants.py index 1c43d0186..46fd914c9 100644 --- a/src/ocgis/constants.py +++ b/src/ocgis/constants.py @@ -3,6 +3,9 @@ #: Standard bounds name used when none is available from the input data. ocgis_bounds = 'bounds' +#: Standard name for the unique identifier in GIS files. +ocgis_unique_geometry_identifier = 'UGID' + #: Default netCDF4 output file type netCDF_default_data_model = 'NETCDF4' diff --git a/src/ocgis/test/test_ocgis/test_util/test_helpers.py b/src/ocgis/test/test_ocgis/test_util/test_helpers.py index 6be1406b8..0e2584d14 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_helpers.py +++ b/src/ocgis/test/test_ocgis/test_util/test_helpers.py @@ -1,19 +1,24 @@ from collections import OrderedDict import os import itertools -from datetime import datetime as dt, datetime +import fiona import numpy as np -from shapely.geometry import Point +from shapely.geometry import Point, mapping +from datetime import datetime as dt, datetime +from ocgis.constants import ocgis_unique_geometry_identifier +from ocgis.interface.base.crs import Spherical, CoordinateReferenceSystem from ocgis.exc import SingleElementError, ShapeError from ocgis.test.test_ocgis.test_interface.test_base.test_dimension.test_spatial import AbstractTestSpatialDimension from ocgis.util.helpers import format_bool, iter_array, validate_time_subset,\ get_formatted_slice, get_is_date_between, get_trimmed_array_by_mask,\ get_added_slice, get_iter, get_ordered_dicts_from_records_array, get_sorted_uris_by_time_dimension, \ get_bounds_from_1d, get_date_list, get_bounds_vector_from_centroids, get_extrapolated_corners_esmf, get_is_increasing, \ - get_extrapolated_corners_esmf_vector, set_name_attributes, get_ocgis_corners_from_esmf_corners + get_extrapolated_corners_esmf_vector, set_name_attributes, get_ocgis_corners_from_esmf_corners, \ + add_shapefile_unique_identifier from ocgis.test.base import TestBase +from ocgis.util.shp_cabinet import ShpCabinetIterator class Test1(AbstractTestSpatialDimension): @@ -213,6 +218,51 @@ def test_get_ocgis_corners_from_esmf_corners(self): class Test2(TestBase): + def test_add_shapefile_unique_identifier(self): + in_path = os.path.join(self.current_dir_output, 'foo_in.shp') + + # create a shapefile without a ugid and another integer attribute + data = [{'geom': Point(1, 2), 'fid': 6}, {'geom': Point(2, 3), 'fid': 60}] + crs = Spherical() + driver = 'ESRI Shapefile' + schema = {'properties': {'fid': 'int'}, 'geometry': 'Point'} + with fiona.open(in_path, 'w', driver=driver, crs=crs.value, schema=schema) as source: + for xx in data: + record = {'properties': {'fid': xx['fid']}, 'geometry': mapping(xx['geom'])} + source.write(record) + + out_path = os.path.join(self.current_dir_output, 'foo_out.shp') + add_shapefile_unique_identifier(in_path, out_path) + + sci = ShpCabinetIterator(path=out_path) + records = list(sci) + self.assertAsSetEqual([1, 2], [xx['properties'][ocgis_unique_geometry_identifier] for xx in records]) + self.assertAsSetEqual([6, 60], [xx['properties']['fid'] for xx in records]) + self.assertEqual(CoordinateReferenceSystem(records[0]['meta']['crs']), crs) + + # test it works for the current working directory + cwd = os.getcwd() + os.chdir(self.current_dir_output) + try: + add_shapefile_unique_identifier(in_path, 'foo3.shp') + self.assertTrue(os.path.exists(os.path.join(self.current_dir_output, 'foo3.shp'))) + finally: + os.chdir(cwd) + + # test using a template attribute + out_path = os.path.join(self.current_dir_output, 'template.shp') + add_shapefile_unique_identifier(in_path, out_path, template='fid') + sci = ShpCabinetIterator(path=out_path) + records = list(sci) + self.assertAsSetEqual([6, 60], [xx['properties'][ocgis_unique_geometry_identifier] for xx in records]) + + # test with a different name attribute + out_path = os.path.join(self.current_dir_output, 'name.shp') + add_shapefile_unique_identifier(in_path, out_path, template='fid', name='new_id') + with fiona.open(out_path) as sci: + records = list(sci) + self.assertAsSetEqual([6, 60], [xx['properties']['new_id'] for xx in records]) + def test_get_iter(self): element = 'hi' ret = list(get_iter(element)) diff --git a/src/ocgis/test/test_ocgis/test_util/test_shp_process.py b/src/ocgis/test/test_ocgis/test_util/test_shp_process.py index aa369ba5f..68348f06f 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_shp_process.py +++ b/src/ocgis/test/test_ocgis/test_util/test_shp_process.py @@ -1,10 +1,12 @@ -from ocgis.test.base import TestBase import os import shutil -from ocgis.util.shp_process import ShpProcess import tempfile + +import fiona + +from ocgis.test.base import TestBase +from ocgis.util.shp_process import ShpProcess from ocgis.util.shp_cabinet import ShpCabinet -import subprocess class TestShpProcess(TestBase): @@ -26,3 +28,18 @@ def test_shp_process(self): self.assertEqual(len(geoms), 3) names = [item['properties']['NAME'] for item in geoms] self.assertEqual(set(names), set(['Canada', 'Mexico', 'United States'])) + + def test_process_name(self): + copy_path = os.path.join(self.current_dir_output, 'test_shp_process') + sc = ShpCabinet() + test_path = os.path.split(sc.get_shp_path('wc_4326'))[0] + shutil.copytree(test_path, copy_path) + + shp_path = os.path.join(copy_path, 'wc_4326.shp') + out_folder = tempfile.mkdtemp(dir=self.current_dir_output) + sp = ShpProcess(shp_path, out_folder) + sp.process(key='world_countries', ugid=None, name='new_id') + path = os.path.join(out_folder, 'world_countries.shp') + with fiona.open(path, 'r') as sci: + uids = [record['properties']['new_id'] for record in sci] + self.assertEqual(uids, range(1, 212)) diff --git a/src/ocgis/util/helpers.py b/src/ocgis/util/helpers.py index 0dcc90529..0cd856dc4 100644 --- a/src/ocgis/util/helpers.py +++ b/src/ocgis/util/helpers.py @@ -3,19 +3,20 @@ import os import tempfile import sys -import datetime from copy import deepcopy from tempfile import mkdtemp import numpy as np +from shapely.geometry import Point from shapely.geometry.polygon import Polygon -from osgeo import ogr from osgeo.ogr import CreateGeometryFromWkb from shapely.wkb import loads as wkb_loads import fiona from shapely.geometry.geo import mapping from fiona.crs import from_epsg +from ocgis.util.shp_process import ShpProcess +import datetime from ocgis.exc import SingleElementError, ShapeError @@ -45,6 +46,28 @@ def endProgress(self): sys.stdout.flush() +def add_shapefile_unique_identifier(in_path, out_path, name=None, template=None): + """ + >>> add_shapefile_unique_identifier('/path/to/foo.shp', '/path/to/new_foo.shp') + '/path/to/new_foo.shp' + + :param str in_path: Full path to the input shapefile. + :param str out_path: Full path to the output shapefile. + :param str name: The name of the unique identifer. If ``None``, defaults to + :attr:`ocgis.constants.ocgis_unique_geometry_identifier`. + :param str template: The integer attribute to copy as the unique identifier. + :returns: Path to the copied shapefile with the addition of a unique integer attribute called ``name``. + :rtype: str + """ + + out_folder, key = os.path.split(out_path) + sp = ShpProcess(in_path, out_folder) + key = os.path.splitext(key)[0] + sp.process(key=key, ugid=template, name=name) + + return out_path + + def format_bool(value): """ Format a string to boolean. @@ -412,6 +435,26 @@ def get_ocgis_corners_from_esmf_corners(ecorners): return grid_corners +def get_ordered_dicts_from_records_array(arr): + """ + Convert a NumPy records array to an ordered dictionary. + + :param arr: The records array to convert with shape (m,). + :type arr: :class:`numpy.core.multiarray.ndarray` + :rtype: list[:class:`collections.OrderedDict`] + """ + + ret = [] + _names = arr.dtype.names + for ii in range(arr.shape[0]): + fill = OrderedDict() + row = arr[ii] + for name in _names: + fill[name] = row[name] + ret.append(fill) + return ret + + def get_reduced_slice(arr): arr_min, arr_max = arr.min(), arr.max() assert (arr_max - arr_min + 1 == arr.shape[0]) @@ -517,6 +560,99 @@ def get_trimmed_array_by_mask(arr,return_adjustments=False): return(ret) +def itersubclasses(cls, _seen=None): + """ + itersubclasses(cls) + + Generator over all subclasses of a given class, in depth first order. + + >>> list(itersubclasses(int)) == [bool] + True + >>> class A(object): pass + >>> class B(A): pass + >>> class C(A): pass + >>> class D(B,C): pass + >>> class E(D): pass + >>> + >>> for cls in itersubclasses(A): + ... print(cls.__name__) + B + D + E + C + >>> # get ALL (new-style) classes currently defined + >>> [cls.__name__ for cls in itersubclasses(object)] #doctest: +ELLIPSIS + ['type', ...'tuple', ...] + """ + + if not isinstance(cls, type): + raise TypeError('itersubclasses must be called with ' + 'new-style classes, not %.100r' % cls) + if _seen is None: _seen = set() + try: + subs = cls.__subclasses__() + except TypeError: # fails only when cls is type + subs = cls.__subclasses__(cls) + for sub in subs: + if sub not in _seen: + _seen.add(sub) + yield sub + for sub in itersubclasses(sub, _seen): + yield sub + + +def iter_array(arr, use_mask=True, return_value=False): + try: + shp = arr.shape + # assume array is not a numpy array + except AttributeError: + arr = np.array(arr, ndmin=1) + shp = arr.shape + iter_args = [range(0, ii) for ii in shp] + if use_mask and not np.ma.isMaskedArray(arr): + use_mask = False + else: + try: + mask = arr.mask + # if the mask is not being used, to skip some objects, set the arr to the underlying data value after + # referencing the mask. + if not use_mask: + arr = arr.data + # array is not masked + except AttributeError: + pass + + for ii in itertools.product(*iter_args): + if use_mask: + try: + if mask[ii]: + continue + else: + idx = ii + # occurs with singleton dimension of masked array + except IndexError: + if mask: + continue + else: + idx = ii + else: + idx = ii + if return_value: + ret = (idx, arr[ii]) + else: + ret = idx + yield ret + +def locate(pattern, root=os.curdir, followlinks=True): + """ + Locate all files matching supplied filename pattern in and below supplied root directory. + """ + + for path, dirs, files in os.walk(os.path.abspath(root), followlinks=followlinks): + for filename in filter(lambda x: x == pattern, files): + yield os.path.join(path, filename) + + def project_shapely_geometry(geom, from_sr, to_sr): if from_sr.IsSame(to_sr) == 1: ret = geom @@ -617,6 +753,7 @@ def _check_years_(targets,min_range_year,max_range_year): ret = True return(ret) + def write_geom_dict(dct, path=None, filename=None, epsg=4326, crs=None): """ :param dct: @@ -642,172 +779,6 @@ def write_geom_dict(dct, path=None, filename=None, epsg=4326, crs=None): source.write(rec) return path - -def locate(pattern, root=os.curdir, followlinks=True): - """ - Locate all files matching supplied filename pattern in and below supplied root directory. - """ - - for path, dirs, files in os.walk(os.path.abspath(root), followlinks=followlinks): - for filename in filter(lambda x: x == pattern, files): - yield os.path.join(path, filename) - - -def get_ordered_dicts_from_records_array(arr): - """ - Convert a NumPy records array to an ordered dictionary. - - :param arr: The records array to convert with shape (m,). - :type arr: :class:`numpy.core.multiarray.ndarray` - :rtype: list[:class:`collections.OrderedDict`] - """ - - ret = [] - _names = arr.dtype.names - for ii in range(arr.shape[0]): - fill = OrderedDict() - row = arr[ii] - for name in _names: - fill[name] = row[name] - ret.append(fill) - return ret - - -def iter_array(arr, use_mask=True, return_value=False): - try: - shp = arr.shape - # assume array is not a numpy array - except AttributeError: - arr = np.array(arr, ndmin=1) - shp = arr.shape - iter_args = [range(0, ii) for ii in shp] - if use_mask and not np.ma.isMaskedArray(arr): - use_mask = False - else: - try: - mask = arr.mask - # if the mask is not being used, to skip some objects, set the arr to the underlying data value after - # referencing the mask. - if not use_mask: - arr = arr.data - # array is not masked - except AttributeError: - pass - - for ii in itertools.product(*iter_args): - if use_mask: - try: - if mask[ii]: - continue - else: - idx = ii - # occurs with singleton dimension of masked array - except IndexError: - if mask: - continue - else: - idx = ii - else: - idx = ii - if return_value: - ret = (idx, arr[ii]) - else: - ret = idx - yield ret - - -def itersubclasses(cls, _seen=None): - """ - itersubclasses(cls) - - Generator over all subclasses of a given class, in depth first order. - - >>> list(itersubclasses(int)) == [bool] - True - >>> class A(object): pass - >>> class B(A): pass - >>> class C(A): pass - >>> class D(B,C): pass - >>> class E(D): pass - >>> - >>> for cls in itersubclasses(A): - ... print(cls.__name__) - B - D - E - C - >>> # get ALL (new-style) classes currently defined - >>> [cls.__name__ for cls in itersubclasses(object)] #doctest: +ELLIPSIS - ['type', ...'tuple', ...] - """ - - if not isinstance(cls, type): - raise TypeError('itersubclasses must be called with ' - 'new-style classes, not %.100r' % cls) - if _seen is None: _seen = set() - try: - subs = cls.__subclasses__() - except TypeError: # fails only when cls is type - subs = cls.__subclasses__(cls) - for sub in subs: - if sub not in _seen: - _seen.add(sub) - yield sub - for sub in itersubclasses(sub, _seen): - yield sub - -def approx_resolution(vec): - """ - >>> vec = [1,2,3,4,5] - >>> approx_resolution(vec) - 1.0 - """ - diff = [] - for i in range(len(vec)): - curr = vec[i] - try: - nxt = vec[i+1] - diff.append(abs(curr-nxt)) - except IndexError: - break - return(np.mean(diff)) - -def keep(prep_igeom=None,igeom=None,target=None): - test_geom = prep_igeom or igeom - if test_geom.intersects(target) and not target.touches(igeom): - ret = True - else: - ret = False - return(ret) - -def prep_keep(prep_igeom,igeom,target): - if prep_igeom.intersects(target) and not target.touches(igeom): - ret = True - else: - ret = False - return(ret) - -def contains(grid,lower,upper,res=0.0): - - ## small ranges on coordinates requires snapping to closest coordinate - ## to ensure values are selected through logical comparison. - ugrid = np.unique(grid) - lower = ugrid[np.argmin(np.abs(ugrid-(lower-0.5*res)))] - upper = ugrid[np.argmin(np.abs(ugrid-(upper+0.5*res)))] - - s1 = grid >= lower - s2 = grid <= upper - ret = s1*s2 - - return(ret) - -#def itr_array(a): -# "a -- 2-d ndarray" -# assert(len(a.shape) == 2) -# ix = a.shape[0] -# jx = a.shape[1] -# for ii,jj in itertools.product(range(ix),range(jx)): -# yield ii,jj def make_poly(rtup,ctup): """ @@ -853,152 +824,3 @@ def _get_wd_(): f.close() ret = f.name return(str(ret)) - -#def get_wkt_from_shp(path,objectid,layer_idx=0): -# """ -# >>> path = '/home/bkoziol/git/OpenClimateGIS/bin/shp/state_boundaries.shp' -# >>> objectid = 10 -# >>> wkt = get_wkt_from_shp(path,objectid) -# >>> assert(wkt.startswith('POLYGON ((-91.730366281818348 43.499571367976877,')) -# """ -# ds = ogr.Open(path) -# try: -# lyr = ds.GetLayerByIndex(layer_idx) -# lyr_name = lyr.GetName() -# if objectid is None: -# sql = 'SELECT * FROM {0}'.format(lyr_name) -# else: -# sql = 'SELECT * FROM {0} WHERE ObjectID = {1}'.format(lyr_name,objectid) -# data = ds.ExecuteSQL(sql) -# #import pdb; pdb.set_trace() -# feat = data.GetNextFeature() -# geom = feat.GetGeometryRef() -# wkt = geom.ExportToWkt() -# return(wkt) -# finally: -# ds.Destroy() -# -# -#class ShpIterator(object): -# -# def __init__(self,path): -# assert(os.path.exists(path)) -# self.path = path -# -# def get_fields(self): -# ds = ogr.Open(self.path) -# try: -# lyr = ds.GetLayerByIndex(0) -# lyr.ResetReading() -# feat = lyr.GetNextFeature() -# return(feat.keys()) -# finally: -# ds.Destroy() -# -# def iter_features(self,fields,lyridx=0,geom='geom',skiperrors=False, -# to_shapely=False): -# -# ds = ogr.Open(self.path) -# try: -# lyr = ds.GetLayerByIndex(lyridx) -# lyr.ResetReading() -# for feat in lyr: -# ## get the values -# values = [] -# for field in fields: -# try: -# values.append(feat.GetField(field)) -# except: -# try: -# if skiperrors is True: -# warnings.warn('Error in GetField("{0}")'.format(field)) -# else: -# raise -# except ValueError: -# msg = 'Illegal field requested in GetField("{0}")'.format(field) -# raise ValueError(msg) -## values = [feat.GetField(field) for field in fields] -# attrs = dict(zip(fields,values)) -# ## get the geometry -# -# wkt_str = feat.GetGeometryRef().ExportToWkt() -## geom_obj = feat.GetGeometryRef() -## geom_obj.TransformTo(to_sr) -## wkt_str = geom_obj.ExportToWkt() -# -# if to_shapely: -# ## additional load to clean geometries -# geom_data = wkt.loads(wkt_str) -# geom_data = wkb.loads(geom_data.wkb) -# else: -# geom_data = wkt_str -# attrs.update({geom:geom_data}) -# yield attrs -# finally: -# ds.Destroy() -# -# -#def get_shp_as_multi(path,uid_field=None,attr_fields=[],make_id=False,id_name='ugid'): -# """ -# >>> path = '/home/bkoziol/git/OpenClimateGIS/bin/shp/state_boundaries.shp' -# >>> uid_field = 'objectid' -# >>> ret = get_shp_as_multi(path,uid_field) -# """ -# ## the iterator object instantiated here to make sure the shapefile exists -# ## and there is access to the field acquisition. -# shpitr = ShpIterator(path) -# -# if uid_field is None or uid_field == '': -# uid_field = [] -# else: -# uid_field = [str(uid_field)] -# try: -# fields = uid_field + attr_fields -# except TypeError: -# if attr_fields.lower() == 'all': -# fields = shpitr.get_fields() -# fields = [f.lower() for f in fields] -# try: -# if uid_field[0].lower() in fields: -# fields.pop(uid_field[0].lower()) -# except IndexError: -# if len(uid_field) == 0: -# pass -# else: -# raise -# fields = uid_field + fields -# else: -# raise -# data = [feat for feat in shpitr.iter_features(fields,to_shapely=True)] -# ## add unique identifier if requested and the passed uid field is none -# for ii,gd in enumerate(data,start=1): -# if len(uid_field) == 0 and make_id is True: -# gd[id_name] = ii -# else: -# geom_id = gd.pop(uid_field[0]) -# gd[id_name] = int(geom_id) -# -# ## check the WKT is a polygon and the unique identifier is a unique integer -# uids = [] -# for feat in data: -# if len(uid_field) > 0: -# feat[uid_field[0]] = int(feat[uid_field[0]]) -# uids.append(feat[uid_field[0]]) -# assert(len(uids) == len(set(uids))) -# return(data) -# -#def get_sr(srid): -# sr = osr.SpatialReference() -# sr.ImportFromEPSG(srid) -# return(sr) - -def get_area(geom,sr_orig,sr_dest): - geom = ogr.CreateGeometryFromWkb(geom.wkb) - geom.AssignSpatialReference(sr_orig) - geom.TransformTo(sr_dest) - return(geom.GetArea()) - -#def get_area_srid(geom,srid_orig,srid_dest): -# sr = get_sr(srid_orig) -# sr2 = get_sr(srid_dest) -# return(get_area(geom,sr,sr2)) \ No newline at end of file diff --git a/src/ocgis/util/shp_process.py b/src/ocgis/util/shp_process.py index c8808e38c..694d57fd9 100644 --- a/src/ocgis/util/shp_process.py +++ b/src/ocgis/util/shp_process.py @@ -1,129 +1,105 @@ -import ocgis -import fiona -from shapely.geometry.geo import shape, mapping import os from collections import OrderedDict -import shutil from warnings import warn import argparse +import fiona +from shapely.geometry.geo import shape, mapping + +from ocgis.constants import ocgis_unique_geometry_identifier + class ShpProcess(object): - ''' + """ :param str path: Path to shapefile to process. :param out_folder: Path to the folder to write processed shapefiles to. - ''' - - def __init__(self,path,out_folder): + """ + + def __init__(self, path, out_folder): self.path = path self.out_folder = out_folder - - def process(self,key=None,ugid=None): - ''' + + def process(self, key=None, ugid=None, name=None): + """ :param str key: The name of the new output shapefile. :param str ugid: The integer attribute to copy as the unique identifier. - ''' - ## get the original shapefile file name + :param str name: The name of the unique identifer. If ``None``, defaults to + :attr:`ocgis.constants.ocgis_unique_geometry_identifier`. + """ + + # get the original shapefile file name original_name = os.path.split(self.path)[1] - ## get the new name if a key is passed - if key == None: + # get the new name if a key is passed + if key is None: new_name = original_name else: - new_name = key+'.shp' - ## the name of the new shapefile - new_shp = os.path.join(self.out_folder,new_name) - ## update the schema to include UGID + new_name = key + '.shp' + # the name of the new shapefile + new_shp = os.path.join(self.out_folder, new_name) + # update the schema to include UGID meta = self._get_meta_() - if 'UGID' in meta['schema']['properties']: - meta['schema']['properties'].pop('UGID') - new_properties = OrderedDict({'UGID':'int'}) + + identifier = name or ocgis_unique_geometry_identifier + if identifier in meta['schema']['properties']: + meta['schema']['properties'].pop(identifier) + new_properties = OrderedDict({identifier: 'int'}) new_properties.update(meta['schema']['properties']) meta['schema']['properties'] = new_properties ctr = 1 - with fiona.open(new_shp, 'w',**meta) as sink: + with fiona.open(new_shp, 'w', **meta) as sink: for feature in self._iter_source_(): if ugid is None: - feature['properties'].update({'UGID':ctr}) + feature['properties'].update({identifier: ctr}) ctr += 1 else: - feature['properties'].update({'UGID':int(feature['properties'][ugid])}) + feature['properties'].update({identifier: int(feature['properties'][ugid])}) sink.write(feature) - ## remove the cpg file. this raises many, many warnings on occasion - os.remove(new_shp.replace('.shp','.cpg')) - ## try to copy the cfg file - try: - shutil.copy2(self.path.replace('.shp','.cfg'),new_shp.replace('.shp','.cfg')) - except: - warn('unable to copy configuration file - if it exists') - - return(new_shp) - + + # remove the cpg file. this raises many, many warnings on occasion + # os.remove(new_shp.replace('.shp', '.cpg')) + + return new_shp + def _get_meta_(self): - with fiona.open(self.path,'r') as source: - return(source.meta) - + with fiona.open(self.path, 'r') as source: + return source.meta + def _iter_source_(self): - with fiona.open(self.path,'r') as source: + with fiona.open(self.path, 'r') as source: for feature in source: - ## ensure the feature is valid - ## https://github.com/Toblerity/Fiona/blob/master/examples/with-shapely.py + # ensure the feature is valid + # https://github.com/Toblerity/Fiona/blob/master/examples/with-shapely.py + geom = shape(feature['geometry']) try: - geom = shape(feature['geometry']) if not geom.is_valid: clean = geom.buffer(0.0) geom = clean feature['geometry'] = mapping(geom) - assert(clean.is_valid) - assert(clean.geom_type == 'Polygon') - except (AssertionError,AttributeError) as e: + assert clean.is_valid + assert (clean.geom_type == 'Polygon') + except (AssertionError, AttributeError) as e: warn('{2}. Invalid geometry found with id={0} and properties: {1}'.format(feature['id'], - feature['properties'], - e)) + feature['properties'], + e)) feature['shapely'] = geom - yield(feature) - - -def main(pargs): - sp = ShpProcess(pargs.in_shp,pargs.folder) - if pargs.folder is None: - pargs.folder = os.getcwd() + yield feature + + +def main(cargs): + sp = ShpProcess(cargs.in_shp, cargs.folder) + if cargs.folder is None: + cargs.folder = os.getcwd() print(sp.process()) - + + if __name__ == '__main__': parser = argparse.ArgumentParser(description='add ugid to shapefile') - - parser.add_argument('--ugid',help='name of ugid variable, default is None',default=None) - parser.add_argument('--folder',help='path to the output folder',nargs='?') - parser.add_argument('--key',help='optional new name for the shapefile',nargs=1,type=str) - parser.add_argument('in_shp',help='path to input shapefile') + + parser.add_argument('--ugid', help='name of ugid variable, default is None', default=None) + parser.add_argument('--folder', help='path to the output folder', nargs='?') + parser.add_argument('--key', help='optional new name for the shapefile', nargs=1, type=str) + parser.add_argument('in_shp', help='path to input shapefile') parser.set_defaults(func=main) pargs = parser.parse_args() pargs.func(pargs) - - -################################################################################# -# -#config = ConfigParser.ConfigParser() -#config.read('setup.cfg') -# -#parser = argparse.ArgumentParser(description='install/uninstall OpenClimateGIS. use "setup.cfg" to find or set default values.') -#parser.add_argument('-v','--verbose',action='store_true',help='print potentially useful information') -#subparsers = parser.add_subparsers() -# -#pinstall = subparsers.add_parser('install',help='install the OpenClimateGIS Python package') -#pinstall.set_defaults(func=install) -# -#pubuntu = subparsers.add_parser('install_dependencies_ubuntu',help='attempt to install OpenClimateGIS dependencies using standard Ubuntu Linux operations') -#pubuntu.set_defaults(func=install_dependencies_ubuntu) -# -#puninstall = subparsers.add_parser('uninstall',help='instructions on how to uninstall the OpenClimateGIS Python package') -#puninstall.set_defaults(func=uninstall) -# -#ppackage = subparsers.add_parser('package',help='utilities for packaging shapefile and NetCDF test datasets') -#ppackage.set_defaults(func=package) -#ppackage.add_argument('target',type=str,choices=['shp','nc','all'],help='Select the files to package.') -#ppackage.add_argument('-d','--directory',dest='d',type=str,metavar='dir',help='the destination directory. if not specified, it defaults to the current working directory.') -# -#pargs = parser.parse_args() -#pargs.func(pargs) \ No newline at end of file From 7a65f81d5dd904f8f0af50a16b0bd4367dab918b Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 26 Dec 2014 10:01:12 -0700 Subject: [PATCH 36/71] added Variable import to ocgis.__init__ --- src/ocgis/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ocgis/__init__.py b/src/ocgis/__init__.py index 825ea51ce..c8ed70514 100644 --- a/src/ocgis/__init__.py +++ b/src/ocgis/__init__.py @@ -1,7 +1,6 @@ from osgeo import ogr, osr from ocgis.util.environment import env - from ocgis.api.collection import SpatialCollection from ocgis.api.operations import OcgOperations from ocgis.api.request.base import RequestDataset, RequestDatasetCollection @@ -14,6 +13,7 @@ from ocgis.util.shp_cabinet import ShpCabinet, ShpCabinetIterator from ocgis.util.zipper import format_return from ocgis.interface.base.dimension.temporal import TemporalDimension +from ocgis.interface.base.variable import Variable __version__ = '1.0.1' From 26c6d43471fe79f3527f57921e2881e69b9b4882 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 26 Dec 2014 10:04:42 -0700 Subject: [PATCH 37/71] added imports to ocgis.__init__ --- src/ocgis/__init__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/ocgis/__init__.py b/src/ocgis/__init__.py index c8ed70514..9bf96d6ff 100644 --- a/src/ocgis/__init__.py +++ b/src/ocgis/__init__.py @@ -1,19 +1,22 @@ from osgeo import ogr, osr from ocgis.util.environment import env + from ocgis.api.collection import SpatialCollection from ocgis.api.operations import OcgOperations from ocgis.api.request.base import RequestDataset, RequestDatasetCollection from ocgis.calc.library.register import FunctionRegistry from ocgis.interface.base import crs from ocgis.interface.base.crs import CoordinateReferenceSystem -from ocgis.interface.base.dimension.spatial import SpatialDimension +from ocgis.interface.base.dimension.spatial import SpatialDimension, SpatialGridDimension, SpatialGeometryDimension, \ + SpatialGeometryPolygonDimension, SpatialGeometryPointDimension from ocgis.interface.base.field import Field from ocgis.util.inspect import Inspect from ocgis.util.shp_cabinet import ShpCabinet, ShpCabinetIterator from ocgis.util.zipper import format_return from ocgis.interface.base.dimension.temporal import TemporalDimension from ocgis.interface.base.variable import Variable +from ocgis.interface.base.dimension.base import VectorDimension __version__ = '1.0.1' From 12211db65cd86883e8402bc97234c9441c7b8d22 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Sat, 27 Dec 2014 15:26:26 -0700 Subject: [PATCH 38/71] migrate to setuptools #346 setup.py now uses setuptools instead of distutils. closes #346 --- setup.py | 44 ++++++-------------------------------------- 1 file changed, 6 insertions(+), 38 deletions(-) diff --git a/setup.py b/setup.py index d009519f2..ea8cbe95f 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -from distutils.core import setup, Command +from setuptools import setup, Command, find_packages import sys from subprocess import check_call import os @@ -126,49 +126,17 @@ def nc4p_make(): os.chdir(cwd) print('dependencies installed.') + ######################################################################################################################## # check python version ######################################################################################################################## + python_version = float(sys.version_info[0]) + float(sys.version_info[1]) / 10 if python_version != 2.7: raise (ImportError( 'This software requires Python version 2.7.x. You have {0}.x'.format(python_version))) -######################################################################################################################## -# attempt package imports -######################################################################################################################## - -pkgs = ['numpy', 'netCDF4', 'osgeo', 'shapely', 'fiona'] -for pkg in pkgs: - __import__(pkg) - - -######################################################################################################################## -# get package structure -######################################################################################################################## - - -def _get_dot_(path, root='src'): - ret = [] - path_parse = path - while True: - path_parse, tail = os.path.split(path_parse) - if tail == root: - break - else: - ret.append(tail) - ret.reverse() - return '.'.join(ret) - - -package_dir = {'': 'src'} -src_path = os.path.join(package_dir.keys()[0], package_dir.values()[0], 'ocgis') -packages = [] -for dirpath, dirnames, filenames in os.walk(src_path): - if '__init__.py' in filenames: - package = _get_dot_(dirpath) - packages.append(package) ######################################################################################################################## # set up data files for installation @@ -193,11 +161,11 @@ def _get_dot_(path, root='src'): url='http://ncpp.github.io/ocgis/install.html#installing-openclimategis', license='NCSA License', platforms=['all'], - packages=packages, - package_dir=package_dir, + packages=find_packages(where='./src'), + package_dir={'': 'src'}, package_data=package_data, cmdclass={'uninstall': UninstallCommand, 'install_dependencies_ubuntu': InstallDependenciesUbuntu, 'test': SimpleTestCommand}, - requires=['numpy', 'netCDF4', 'fiona', 'shapely'], + install_requires=['numpy', 'netCDF4', 'fiona', 'shapely'], ) From 7f60d10e9a5fa2caa50d95ae4932b67e22e5c099 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Sat, 27 Dec 2014 16:16:14 -0700 Subject: [PATCH 39/71] write polygons to ugrid Added files from the "ugrid" package. Added a method to the polygon dimension to write to a netCDF file using the UGRID convention. --- src/ocgis/interface/base/dimension/spatial.py | 19 +- .../test_base/test_dimension/test_spatial.py | 27 +- src/ocgis/util/ugrid/__init__.py | 2 + src/ocgis/util/ugrid/constants.py | 2 + src/ocgis/util/ugrid/convert.py | 41 +++ src/ocgis/util/ugrid/helpers.py | 297 ++++++++++++++++++ 6 files changed, 386 insertions(+), 2 deletions(-) create mode 100644 src/ocgis/util/ugrid/__init__.py create mode 100644 src/ocgis/util/ugrid/constants.py create mode 100644 src/ocgis/util/ugrid/convert.py create mode 100644 src/ocgis/util/ugrid/helpers.py diff --git a/src/ocgis/interface/base/dimension/spatial.py b/src/ocgis/interface/base/dimension/spatial.py index f9aa6e186..3d443294d 100644 --- a/src/ocgis/interface/base/dimension/spatial.py +++ b/src/ocgis/interface/base/dimension/spatial.py @@ -1,8 +1,8 @@ from collections import deque import itertools from copy import copy - import numpy as np + from shapely.geometry.point import Point from shapely.geometry.polygon import Polygon from shapely.prepared import prep @@ -21,6 +21,7 @@ get_added_slice, make_poly, set_name_attributes, get_extrapolated_corners_esmf, get_ocgis_corners_from_esmf_corners from ocgis import constants, env from ocgis.exc import EmptySubsetError, SpatialWrappingError, MultipleElementsFound, BoundsAlreadyAvailableError +from ocgis.util.ugrid.helpers import get_update_feature, write_to_netcdf_dataset class GeomMapping(object): @@ -1247,6 +1248,22 @@ def area(self): def weights(self): return(self.area/self.area.max()) + def write_to_netcdf_dataset_ugrid(self, dataset): + """ + Write a UGRID formatted netCDF4 file following conventions: https://github.com/ugrid-conventions/ugrid-conventions/tree/v0.9.0 + + :param dataset: An open netCDF4 dataset object. + :type dataset: :class:`netCDF4.Dataset` + """ + + def _iter_features_(): + for ctr, geom in enumerate(self.value.data.flat): + yld = {'geometry': {'type': geom.geom_type, 'coordinates': [np.array(geom.exterior.coords).tolist()]}} + yld = get_update_feature(ctr, yld) + yield yld + + write_to_netcdf_dataset(dataset, list(_iter_features_())) + def _get_value_(self): fill = self._get_geometry_fill_() r_data = fill.data diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py index 6c4b3e49c..26c963eb7 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py @@ -1,8 +1,8 @@ from copy import deepcopy, copy import os import itertools - import numpy as np + from shapely import wkt import fiona from fiona.crs import from_epsg @@ -20,6 +20,7 @@ WrappableCoordinateReferenceSystem from ocgis.interface.base.dimension.base import VectorDimension from ocgis.util.itester import itr_products_keywords +from ocgis.util.ugrid.convert import mesh2_nc_to_shapefile class AbstractTestSpatialDimension(TestBase): @@ -1158,6 +1159,30 @@ def test_get_value(self): poly = sdim.geom.polygon.value self.assertGeometriesAlmostEquals(poly, actual) + def test_write_to_netcdf_dataset_ugrid(self): + ugrid_polygons = [ + 'POLYGON((-1.5019011406844105 0.18377693282636276,-1.25475285171102646 0.02534854245880869,-1.35614702154626099 -0.28517110266159684,-1.68567807351077303 -0.50697084917617241,-1.99619771863117879 -0.41191381495564006,-2.08491761723700897 -0.24714828897338403,-1.9264892268694549 -0.03802281368821281,-1.88212927756653992 0.13307984790874539,-1.5019011406844105 0.18377693282636276))', + 'POLYGON((-2.25602027883396694 0.63371356147021585,-1.76172370088719887 0.51330798479087481,-1.88212927756653992 0.13307984790874539,-1.9264892268694549 -0.03802281368821281,-2.30671736375158432 0.01901140684410674,-2.51584283903675532 0.27249683143219272,-2.52217997465145771 0.48795944233206612,-2.25602027883396694 0.63371356147021585))', + 'POLYGON((-1.55893536121673004 0.86818757921419554,-1.03929024081115307 0.65906210392902409,-1.07097591888466415 0.46261089987325743,-1.5019011406844105 0.18377693282636276,-1.88212927756653992 0.13307984790874539,-1.76172370088719887 0.51330798479087481,-1.55893536121673004 0.86818757921419554))', + 'POLYGON((-2.13561470215462634 0.87452471482889749,-1.83143219264892276 0.98225602027883419,-1.83143219264892276 0.98225602027883419,-1.55893536121673004 0.86818757921419554,-1.58428390367553851 0.66539923954372648,-1.76172370088719887 0.51330798479087481,-2.12294043092522156 0.44993662864385309,-2.25602027883396694 0.63371356147021585,-2.13561470215462634 0.87452471482889749))' + ] + + polygons = [wkt.loads(xx) for xx in ugrid_polygons] + polygons = np.atleast_2d(np.array(polygons)) + spoly = SpatialGeometryPolygonDimension(value=polygons) + + path = os.path.join(self.current_dir_output, 'foo.nc') + with self.nc_scope(path, 'w') as ds: + spoly.write_to_netcdf_dataset_ugrid(ds) + + shp_path = os.path.join(self.current_dir_output, 'ugrid.shp') + mesh2_nc_to_shapefile(path, shp_path) + with fiona.open(shp_path) as source: + for record in source: + geom = shape(record['geometry']) + check = [geom.almost_equals(xx) for xx in polygons.flat] + self.assertEqual(sum(check), 1) + class TestSpatialGridDimension(AbstractTestSpatialDimension): diff --git a/src/ocgis/util/ugrid/__init__.py b/src/ocgis/util/ugrid/__init__.py new file mode 100644 index 000000000..34b05063a --- /dev/null +++ b/src/ocgis/util/ugrid/__init__.py @@ -0,0 +1,2 @@ +# https://github.com/NESII/ugrid +__version__ = '0.01.0-master' \ No newline at end of file diff --git a/src/ocgis/util/ugrid/constants.py b/src/ocgis/util/ugrid/constants.py new file mode 100644 index 000000000..e44771095 --- /dev/null +++ b/src/ocgis/util/ugrid/constants.py @@ -0,0 +1,2 @@ +#: The UGRID conventions version. See https://github.com/ugrid-conventions/ugrid-conventions. +CONVENTIONS_VERSION = 'UGRID=0.9.0' \ No newline at end of file diff --git a/src/ocgis/util/ugrid/convert.py b/src/ocgis/util/ugrid/convert.py new file mode 100644 index 000000000..78c38f730 --- /dev/null +++ b/src/ocgis/util/ugrid/convert.py @@ -0,0 +1,41 @@ +import netCDF4 as nc + +from helpers import get_features_from_shapefile, mesh2_to_shapefile, write_to_netcdf_dataset + + +def mesh2_nc_to_shapefile(in_nc, out_path): + """ + :param str in_nc: Path to the input UGRID netCDF file. + :param str out_path: Path to the output shapefile. + :returns: Path to the output shapefile. + :rtype str: + """ + + ds = nc.Dataset(in_nc, 'r') + try: + Mesh2_face_nodes = ds.variables['Mesh2_face_nodes'][:] + Mesh2_node_x = ds.variables['Mesh2_node_x'][:] + Mesh2_node_y = ds.variables['Mesh2_node_y'][:] + mesh2_to_shapefile(out_path, Mesh2_face_nodes, Mesh2_node_x, Mesh2_node_y) + finally: + ds.close() + + return out_path + + +def shapefile_to_mesh2_nc(out_nc_path, shp_path, frmt='NETCDF4'): + """ + :param str out_nc_path: Full path to the output netCDF file. + :param str shp_path: Full path to the input shapefile. + :param str frmt: The netCDF file format to write. See http://unidata.github.io/netcdf4-python/netCDF4.Dataset-class.html. + :returns: Path to the output netCDF file. + :rtype: str + """ + + features = get_features_from_shapefile(shp_path) + ds = nc.Dataset(out_nc_path, 'w', format=frmt) + try: + write_to_netcdf_dataset(ds, features) + finally: + ds.close() + return out_nc_path diff --git a/src/ocgis/util/ugrid/helpers.py b/src/ocgis/util/ugrid/helpers.py new file mode 100644 index 000000000..29aedc588 --- /dev/null +++ b/src/ocgis/util/ugrid/helpers.py @@ -0,0 +1,297 @@ +from collections import deque +import numpy as np + +import fiona +from fiona.crs import from_epsg +from shapely.geometry import shape, Polygon, mapping +from shapely.geometry.point import Point + +from constants import CONVENTIONS_VERSION + + +def get_update_feature(fid, feature): + #todo: doc + + # create the geometry object + obj = shape(feature['geometry']) + # add this to feature dictionary + feature['geometry'].update({'object': obj}) + # coordinates read from shapefile are clockwise, update to anti-clockwise + feature['geometry']['coordinates'][0].reverse() + # only polygons are acceptable geometry types + assert (feature['geometry']['type'] == 'Polygon') + # add a custom feature identifier + feature['fid'] = fid + return feature + + +def iter_edge_nodes(idx_nodes): + #todo: doc + + for ii in range(len(idx_nodes)): + try: + yld = (idx_nodes[ii], idx_nodes[ii + 1]) + # the last node pair requires linking back to the first node + except IndexError: + yld = (idx_nodes[-1], idx_nodes[0]) + yield yld + + +def get_features_from_shapefile(shp_path): + """ + :param str shp_path: Full path to a the target shapefile to extract features from. + :returns: A ``deque`` containing feature dictionaries. This is the standard record collection as returned by + ``fiona`` with the addition of a ``fid`` key and ``object`` key in the ``geometry`` dictionary. + :rtype: :class:`collections.deque` + """ + + # load features from disk constructing geometry objects + features = deque() + idx_fid = 0 + with fiona.open(shp_path, 'r') as source: + for feature in source: + features.append(get_update_feature(idx_fid, feature)) + idx_fid += 1 + return features + + +def get_mesh2_variables(features): + """ + :param features: A features sequence as returned from :func:`ugrid.helpers.get_features_from_shapefile`. + :type features: :class:`collections.deque` + :returns: A tuple of arrays with index locations corresponding to: + + ===== ================ ============================= + Index Name Type + ===== ================ ============================= + 0 Mesh2_face_nodes :class:`numpy.ma.MaskedArray` + 1 Mesh2_face_edges :class:`numpy.ma.MaskedArray` + 2 Mesh2_edge_nodes :class:`numpy.ndarray` + 3 Mesh2_node_x :class:`numpy.ndarray` + 4 Mesh2_node_y :class:`numpy.ndarray` + 5 Mesh2_face_links :class:`numpy.ndarray` + ===== ================ ============================= + + Information on individual variables may be found here: https://github.com/ugrid-conventions/ugrid-conventions/blob/9b6540405b940f0a9299af9dfb5e7c04b5074bf7/ugrid-conventions.md#2d-flexible-mesh-mixed-triangles-quadrilaterals-etc-topology + + :rtype: tuple (see table for array types) + """ + + # construct the links between faces (e.g. neighbors) + Mesh2_face_links = deque() + for idx_source in range(len(features)): + ref_object = features[idx_source]['geometry']['object'] + for idx_target in range(len(features)): + # skip if it is checking against itself + if idx_source == idx_target: + continue + else: + # if the objects only touch they are neighbors and share nodes + if ref_object.touches(features[idx_target]['geometry']['object']): + Mesh2_face_links.append([features[idx_source]['fid'], features[idx_target]['fid']]) + else: + continue + # convert to numpy array for faster comparisons + Mesh2_face_links = np.array(Mesh2_face_links, dtype=np.int32) + + # the number of faces + nMesh2_face = len(features) + # for polygon geometries the first coordinate is repeated at the end of the sequence. + nMaxMesh2_face_nodes = max([len(feature['geometry']['coordinates'][0]) - 1 for feature in features]) + Mesh2_face_nodes = np.ma.array(np.zeros((nMesh2_face, nMaxMesh2_face_nodes), dtype=np.int32), mask=True) + # the edge mapping has the same shape as the node mapping + Mesh2_face_edges = np.zeros_like(Mesh2_face_nodes) + + # holds the start and nodes for each edge + Mesh2_edge_nodes = deque() + # holds the raw coordinates of the nodes + Mesh2_node_x = deque() + Mesh2_node_y = deque() + + # flag to indicate if this is the first face encountered + first = True + # global point index counter + idx_point = 0 + # global edge index counter + idx_edge = 0 + # holds point geometry objects + points_obj = deque() + # loop through each polygon + for feature in features: + # reference the face index + fid = feature['fid'] + # just load everything if this is the first polygon + if first: + # store the point values. remember to ignore the last coordinate. + for ii in range(len(feature['geometry']['coordinates'][0]) - 1): + coords = feature['geometry']['coordinates'][0][ii] + # create and store the point geometry object + points_obj.append(Point(coords[0], coords[1])) + # store the x and y coordinates + Mesh2_node_x.append(coords[0]) + Mesh2_node_y.append(coords[1]) + # increment the point index + idx_point += 1 + # map the node indices for the face + Mesh2_face_nodes[fid, 0:idx_point] = range(0, idx_point) + # construct the edges. compress the node slice to remove any masked values at the tail. + for start_node_idx, end_node_idx in iter_edge_nodes(Mesh2_face_nodes[fid, :].compressed()): + Mesh2_edge_nodes.append((start_node_idx, end_node_idx)) + idx_edge += 1 + # map the edges to faces + Mesh2_face_edges[fid, 0:idx_edge] = range(0, idx_edge) + # switch the loop flag to indicate the first face has been dealt with + first = False + else: + # holds new node coordinates for the face + new_Mesh2_face_nodes = deque() + # only search neighboring faces + neighbor_face_indices = Mesh2_face_links[Mesh2_face_links[:, 0] == fid, 1] + for ii in range(len(feature['geometry']['coordinates'][0]) - 1): + # logic flag to indicate if the point has been found + found = False + coords = feature['geometry']['coordinates'][0][ii] + pt = Point(coords[0], coords[1]) + # search the neighboring faces for matching nodes + for neighbor_face_index in neighbor_face_indices.flat: + # break out of loop if the point has been found + if found: + break + # search over the neighboring face's nodes + for neighbor_face_node_index in Mesh2_face_nodes[neighbor_face_index, :].compressed(): + if pt.almost_equals(points_obj[neighbor_face_node_index]): + new_Mesh2_face_nodes.append(neighbor_face_node_index) + # point is found, no need to continue with loop + found = True + break + # add the new node if it has not been found + if not found: + # add the point object to the collection + points_obj.append(pt) + # add the coordinates of the new point + Mesh2_node_x.append(coords[0]) + Mesh2_node_y.append(coords[1]) + # append the index of this new point + new_Mesh2_face_nodes.append(idx_point) + # increment the point index + idx_point += 1 + # map the node indices for the face + Mesh2_face_nodes[fid, 0:len(new_Mesh2_face_nodes)] = new_Mesh2_face_nodes + # find and map the edges + new_Mesh2_face_edges = deque() + for start_node_idx, end_node_idx in iter_edge_nodes(Mesh2_face_nodes[fid, :].compressed()): + # flag to indicate if edge has been found + found_edge = False + # search existing edge-node combinations accounting for ordering + for idx_edge_nodes, edge_nodes in enumerate(Mesh2_edge_nodes): + # swap the node ordering + if edge_nodes == (start_node_idx, end_node_idx) or edge_nodes == (end_node_idx, start_node_idx): + new_Mesh2_face_edges.append(idx_edge_nodes) + found_edge = True + break + if not found_edge: + Mesh2_edge_nodes.append((start_node_idx, end_node_idx)) + new_Mesh2_face_edges.append(idx_edge) + idx_edge += 1 + # update the face-edge mapping + Mesh2_face_edges[fid, 0:len(new_Mesh2_face_edges)] = new_Mesh2_face_edges + + return Mesh2_face_nodes, \ + Mesh2_face_edges, \ + np.array(Mesh2_edge_nodes, dtype=np.int32), \ + np.array(Mesh2_node_x, dtype=np.float32), \ + np.array(Mesh2_node_y, dtype=np.float32), \ + np.array(Mesh2_face_links, dtype=np.int32) + + +def mesh2_to_shapefile(out_path, Mesh2_face_nodes, Mesh2_node_x, Mesh2_node_y): + #todo: doc + + crs = from_epsg(4326) + driver = 'ESRI Shapefile' + schema = {'geometry': 'Polygon', + 'properties': {}} + + with fiona.open(out_path, 'w', driver=driver, crs=crs, schema=schema) as f: + for feature_idx in range(Mesh2_face_nodes.shape[0]): + coordinates = deque() + for node_idx in Mesh2_face_nodes[feature_idx, :].compressed(): + coordinates.append((Mesh2_node_x[node_idx], Mesh2_node_y[node_idx])) + polygon = Polygon(coordinates) + feature = {'id': feature_idx, 'properties': {}, 'geometry': mapping(polygon)} + f.write(feature) + + return out_path + + +def write_to_netcdf_dataset(ds, features): + """ + Write to an open dataset object. + + :param ds: :class:`netCDF4.Dataset` + :param features: A feature sequence as returned from :func:`ugrid.helpers.get_features_from_shapefile`. + """ + + start_index = 0 + + Mesh2_face_nodes, Mesh2_face_edges, Mesh2_edge_nodes, Mesh2_node_x, \ + Mesh2_node_y, Mesh2_face_links = get_mesh2_variables(features) + nMesh2_node = ds.createDimension('nMesh2_node', size=Mesh2_node_x.shape[0]) + nMesh2_edge = ds.createDimension('nMesh2_edge', size=Mesh2_edge_nodes.shape[0]) + nMesh2_face = ds.createDimension('nMesh2_face', size=Mesh2_face_nodes.shape[0]) + nMesh2_face_links = ds.createDimension('nMesh2_face_links', size=Mesh2_face_links.shape[0]) + nMaxMesh2_face_nodes = ds.createDimension('nMaxMesh2_face_nodes', size=Mesh2_face_nodes.shape[1]) + Two = ds.createDimension('Two', size=2) + vMesh2 = ds.createVariable('Mesh2', np.int32) + vMesh2.cf_role = "mesh_topology" + vMesh2.long_name = "Topology data of 2D unstructured mesh" + vMesh2.topology_dimension = 2 + vMesh2.node_coordinates = "Mesh2_node_x Mesh2_node_y" + vMesh2.face_node_connectivity = "Mesh2_face_nodes" + vMesh2.edge_node_connectivity = "Mesh2_edge_nodes" + vMesh2.edge_coordinates = "Mesh2_edge_x Mesh2_edge_y" + vMesh2.face_coordinates = "Mesh2_face_x Mesh2_face_y" + vMesh2.face_edge_connectivity = "Mesh2_face_edges" + vMesh2.face_face_connectivity = "Mesh2_face_links" + vMesh2_face_nodes = ds.createVariable('Mesh2_face_nodes', + Mesh2_face_nodes.dtype, + dimensions=(nMesh2_face._name, nMaxMesh2_face_nodes._name), + fill_value=Mesh2_face_nodes.fill_value) + vMesh2_face_nodes[:] = Mesh2_face_nodes + vMesh2_face_nodes.cf_role = "face_node_connectivity" + vMesh2_face_nodes.long_name = "Maps every face to its corner nodes." + vMesh2_face_nodes.start_index = start_index + vMesh2_edge_nodes = ds.createVariable('Mesh2_edge_nodes', Mesh2_edge_nodes.dtype, + dimensions=(nMesh2_edge._name, Two._name)) + vMesh2_edge_nodes[:] = Mesh2_edge_nodes + vMesh2_edge_nodes.cf_role = "edge_node_connectivity" + vMesh2_edge_nodes.long_name = "Maps every edge to the two nodes that it connects." + vMesh2_edge_nodes.start_index = start_index + vMesh2_face_edges = ds.createVariable('Mesh2_face_edges', Mesh2_face_edges.dtype, + dimensions=(nMesh2_face._name, nMaxMesh2_face_nodes._name), + fill_value=Mesh2_face_edges.fill_value) + vMesh2_face_edges[:] = Mesh2_face_edges + vMesh2_face_edges.cf_role = "face_edge_connectivity" + vMesh2_face_edges.long_name = "Maps every face to its edges." + vMesh2_face_edges.start_index = start_index + vMesh2_face_links = ds.createVariable('Mesh2_face_links', Mesh2_face_links.dtype, + dimensions=(nMesh2_face_links._name, Two._name)) + vMesh2_face_links[:] = Mesh2_face_links + vMesh2_face_links.cf_role = "face_face_connectivity" + vMesh2_face_links.long_name = "Indicates which faces are neighbors." + vMesh2_face_links.start_index = start_index + vMesh2_node_x = ds.createVariable('Mesh2_node_x', Mesh2_node_x.dtype, + dimensions=(nMesh2_node._name,)) + vMesh2_node_x[:] = Mesh2_node_x + vMesh2_node_x.standard_name = "longitude" + vMesh2_node_x.long_name = "Longitude of 2D mesh nodes." + vMesh2_node_x.units = "degrees_east" + vMesh2_node_y = ds.createVariable('Mesh2_node_y', Mesh2_node_y.dtype, + dimensions=(nMesh2_node._name,)) + vMesh2_node_y[:] = Mesh2_node_y + vMesh2_node_y.standard_name = "latitude" + vMesh2_node_y.long_name = "Latitude of 2D mesh nodes." + vMesh2_node_y.units = "degrees_north" + + # add global variables + ds.Conventions = CONVENTIONS_VERSION \ No newline at end of file From 87cc7469e4ba7b2342dbf4012e58f5c2a9859b17 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 29 Dec 2014 14:16:35 -0700 Subject: [PATCH 40/71] autodiscover appropriate driver The default value for a request dataset's driver is now None. There is an autodiscover feature if the value is None checking for netCDF first. --- src/ocgis/api/request/base.py | 60 ++++++++++++++----- src/ocgis/api/request/driver/base.py | 9 ++- src/ocgis/api/request/driver/nc.py | 2 +- src/ocgis/api/request/driver/vector.py | 1 + .../test_api/test_request/test_base.py | 50 ++++++++++++---- .../test_request/test_driver/test_nc.py | 9 ++- .../test_request/test_driver/test_vector.py | 3 + 7 files changed, 101 insertions(+), 33 deletions(-) diff --git a/src/ocgis/api/request/base.py b/src/ocgis/api/request/base.py index c79d54e31..96a10d7c9 100644 --- a/src/ocgis/api/request/base.py +++ b/src/ocgis/api/request/base.py @@ -1,3 +1,4 @@ +from collections import OrderedDict from copy import deepcopy import inspect import logging @@ -93,7 +94,15 @@ class RequestDataset(object): :param conform_units_to: Destination units for conversion. If this parameter is set, then the :mod:`cfunits` module must be installed. :type conform_units_to: str or :class:`cfunits.Units` or sequence - :param str driver: Only valid for ``'netCDF'``. Additional drivers may be added in the future. + :param str driver: If ``None``, autodiscover the appropriate driver. Other accepted values are listed below. + + ============ ================= ============================================= + Value File Extension(s) Description + ============ ================= ============================================= + ``'netCDF'`` ``'nc'`` A netCDF file using a CF metadata convention. + ``'vector'`` ``'shp'`` An ESRI Shapefile. + ============ ================= ============================================= + :param str name: Name of the requested data in the output collection. If ``None``, defaults to ``alias``. If this is a multivariate request (i.e. more than one variable) and this is ``None``, then the aliases will be joined by ``'_'`` to create the name. @@ -106,11 +115,15 @@ class RequestDataset(object): .. _time units: http://netcdf4-python.googlecode.com/svn/trunk/docs/netCDF4-module.html#num2date .. _time calendar: http://netcdf4-python.googlecode.com/svn/trunk/docs/netCDF4-module.html#num2date """ - _Drivers = {d.key: d for d in [DriverNetcdf, DriverVector]} + + # contains key-value links to drivers. as new drivers are added, this dictionary must be updated. + _Drivers = OrderedDict() + _Drivers[DriverNetcdf.key] = DriverNetcdf + _Drivers[DriverVector.key] = DriverVector def __init__(self, uri=None, variable=None, alias=None, units=None, time_range=None, time_region=None, level_range=None, conform_units_to=None, crs=None, t_units=None, t_calendar=None, did=None, - meta=None, s_abstraction=None, dimension_map=None, name=None, driver='netCDF', regrid_source=True, + meta=None, s_abstraction=None, dimension_map=None, name=None, driver=None, regrid_source=True, regrid_destination=False): self._is_init = True @@ -124,10 +137,14 @@ def __init__(self, uri=None, variable=None, alias=None, units=None, time_range=N else: self._uri = self._get_uri_(uri) - try: - self.driver = self._Drivers[driver](self) - except KeyError: - raise RequestValidationError('driver', 'Driver not found: {0}'.format(driver)) + if driver is None: + klass = self._get_autodiscovered_driver_(self.uri) + else: + try: + klass = self._Drivers[driver] + except KeyError: + raise RequestValidationError('driver', 'Driver not found: {0}'.format(driver)) + self.driver = klass(self) self.variable = variable @@ -370,6 +387,19 @@ def inspect_as_dct(self): ret = ip._as_dct_() return ret + @classmethod + def _get_autodiscovered_driver_(cls, uri): + #todo: doc + + for element in get_iter(uri): + extension = os.path.splitext(element)[1][1:] + for driver in cls._Drivers.itervalues(): + if extension in driver.extensions: + return driver + + msg = 'Driver not found for URI: {0}'.format(uri) + raise RequestValidationError('driver/uri', msg) + def _get_meta_rows_(self): if self.time_range is None: tr = None @@ -562,14 +592,6 @@ def _set_unique_id_(target, uid): target.uid = uid -def get_tuple(value): - if isinstance(value, basestring) or value is None: - ret = (value,) - else: - ret = tuple(value) - return ret - - def get_first_or_sequence(value): if len(value) > 1: ret = value @@ -582,6 +604,14 @@ def get_is_none(value): return all([v is None for v in get_iter(value)]) +def get_tuple(value): + if isinstance(value, basestring) or value is None: + ret = (value,) + else: + ret = tuple(value) + return ret + + def validate_units(keyword, sequence): from cfunits import Units try: diff --git a/src/ocgis/api/request/driver/base.py b/src/ocgis/api/request/driver/base.py index a3e5345b9..38ce053f5 100644 --- a/src/ocgis/api/request/driver/base.py +++ b/src/ocgis/api/request/driver/base.py @@ -13,6 +13,13 @@ def __eq__(self, other): def __str__(self): return '"{0}"'.format(self.key) + @abc.abstractproperty + def extensions(self): + """ + :returns: A list of acceptable extensions for this driver. + :rtype: (str,) + """ + @abc.abstractproperty def key(self): str @@ -53,4 +60,4 @@ def open(self): @abc.abstractmethod def inspect(self): - pass \ No newline at end of file + pass diff --git a/src/ocgis/api/request/driver/nc.py b/src/ocgis/api/request/driver/nc.py index c6bb155e6..ecef322ae 100644 --- a/src/ocgis/api/request/driver/nc.py +++ b/src/ocgis/api/request/driver/nc.py @@ -2,7 +2,6 @@ import logging import netCDF4 as nc from warnings import warn - import numpy as np from ocgis.interface.nc.spatial import NcSpatialGridDimension @@ -21,6 +20,7 @@ class DriverNetcdf(AbstractDriver): + extensions = ('nc',) key = 'netCDF' def __init__(self, *args, **kwargs): diff --git a/src/ocgis/api/request/driver/vector.py b/src/ocgis/api/request/driver/vector.py index bac90e2bc..ba4d70146 100644 --- a/src/ocgis/api/request/driver/vector.py +++ b/src/ocgis/api/request/driver/vector.py @@ -6,6 +6,7 @@ class DriverVector(AbstractDriver): + extensions = ('shp',) key = 'vector' def close(self, obj): diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py index 785a6b2dd..8102eea74 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py @@ -1,14 +1,17 @@ +from collections import OrderedDict from copy import deepcopy import itertools import os import pickle -from datetime import datetime as dt import shutil -import datetime - import numpy as np + from cfunits.cfunits import Units +from datetime import datetime as dt +import datetime +from ocgis.api.request.driver.nc import DriverNetcdf +from ocgis.api.request.driver.vector import DriverVector from ocgis.util.shp_cabinet import ShpCabinet from ocgis.interface.base.field import Field from ocgis.exc import DefinitionValidationError, NoUnitsError, VariableNotFoundError, RequestValidationError @@ -24,6 +27,16 @@ class Test(TestBase): + def test_get_is_none_false(self): + possible_false = ['a', ['a', 'b'], ['b', None]] + for p in possible_false: + self.assertFalse(get_is_none(p)) + + def test_get_is_none_true(self): + possible_true = [None, [None, None]] + for p in possible_true: + self.assertTrue(get_is_none(p)) + def test_get_tuple(self): value = [4, 5] ret = get_tuple(value) @@ -32,16 +45,6 @@ def test_get_tuple(self): self.assertEqual(value, [4, 10]) self.assertEqual(ret, (4, 5)) - def test_get_is_none_true(self): - possible_true = [None, [None, None]] - for p in possible_true: - self.assertTrue(get_is_none(p)) - - def test_get_is_none_false(self): - possible_false = ['a', ['a', 'b'], ['b', None]] - for p in possible_false: - self.assertFalse(get_is_none(p)) - class TestRequestDataset(TestBase): @@ -81,6 +84,27 @@ def test_crs_overload(self): field = rd.get() self.assertDictEqual(kwds['crs'].value, field.spatial.crs.value) + def test_Drivers(self): + # always test for netcdf first + self.assertIsInstance(RequestDataset._Drivers, OrderedDict) + self.assertEqual(RequestDataset._Drivers.values()[0], DriverNetcdf) + + def test_get_autodiscovered_driver(self): + uri_shp = '/path/to/shapefile.shp' + uri_nc = '/path/to/netcdf/file/foo.nc' + + driver = RequestDataset._get_autodiscovered_driver_(uri_shp) + self.assertEqual(driver, DriverVector) + + for poss in [uri_nc, [uri_nc, uri_nc]]: + driver = RequestDataset._get_autodiscovered_driver_(poss) + self.assertEqual(driver, DriverNetcdf) + + with self.assertRaises(RequestValidationError): + RequestDataset._get_autodiscovered_driver_('something/meaninglyess.foobuar') + with self.assertRaises(RequestValidationError): + RequestDataset._get_autodiscovered_driver_('something/meaninglyess') + def test_name(self): path = ShpCabinet().get_shp_path('state_boundaries') rd = RequestDataset(uri=path, driver='vector') diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index 02d7b92a0..fc60a2b39 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -3,14 +3,14 @@ import shutil import tempfile import netCDF4 as nc -from datetime import datetime as dt -import datetime from collections import OrderedDict - import numpy as np + import fiona from shapely.geometry.geo import shape +from datetime import datetime as dt +import datetime from ocgis.interface.nc.spatial import NcSpatialGridDimension from ocgis.interface.base.dimension.base import VectorDimension from ocgis import constants @@ -60,6 +60,9 @@ def get_2d_state_boundaries_sdim(self): sdim = SpatialDimension(geom=geom,properties=attrs,crs=WGS84()) return(sdim) + def test_extensions(self): + self.assertEqual(DriverNetcdf.extensions, ('nc',)) + def test_get_dimensioned_variables_one_variable_in_target_dataset(self): uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(uri=uri) diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py index 3cfa852c7..aa869a0f0 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py @@ -26,6 +26,9 @@ def test_close(self): sci = driver.open() driver.close(sci) + def test_extensions(self): + self.assertEqual(DriverVector.extensions, ('shp',)) + def test_get_crs(self): driver = self.get_driver() self.assertEqual(WGS84(), driver.get_crs()) From 41aa4007d9887d93d1b02ad0d6ba23087293c6c0 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 29 Dec 2014 15:56:24 -0700 Subject: [PATCH 41/71] autodiscover appropriate driver The autodiscover would have failed for remote datasets. The extension testing was changed to more flexible regular expressions. --- src/ocgis/api/request/base.py | 14 ++++++++++---- src/ocgis/api/request/driver/base.py | 4 +++- src/ocgis/api/request/driver/nc.py | 2 +- src/ocgis/api/request/driver/vector.py | 2 +- .../test_ocgis/test_api/test_request/test_base.py | 3 ++- .../test_api/test_request/test_driver/test_nc.py | 3 --- .../test_request/test_driver/test_vector.py | 3 --- 7 files changed, 17 insertions(+), 14 deletions(-) diff --git a/src/ocgis/api/request/base.py b/src/ocgis/api/request/base.py index 96a10d7c9..5c0ee6f08 100644 --- a/src/ocgis/api/request/base.py +++ b/src/ocgis/api/request/base.py @@ -4,6 +4,7 @@ import logging import os import itertools +import re from ocgis.api.request.driver.vector import DriverVector from ocgis.interface.base.field import Field @@ -389,13 +390,18 @@ def inspect_as_dct(self): @classmethod def _get_autodiscovered_driver_(cls, uri): - #todo: doc + """ + :param str uri: The target URI containing data for which to choose a driver. + :returns: The correct driver for opening the ``uri``. + :rtype: :class:`ocgis.api.request.driver.base.AbstractDriver` + :raises: RequestValidationError + """ for element in get_iter(uri): - extension = os.path.splitext(element)[1][1:] for driver in cls._Drivers.itervalues(): - if extension in driver.extensions: - return driver + for pattern in driver.extensions: + if re.match(pattern, element) is not None: + return driver msg = 'Driver not found for URI: {0}'.format(uri) raise RequestValidationError('driver/uri', msg) diff --git a/src/ocgis/api/request/driver/base.py b/src/ocgis/api/request/driver/base.py index 38ce053f5..6c17ecaf8 100644 --- a/src/ocgis/api/request/driver/base.py +++ b/src/ocgis/api/request/driver/base.py @@ -16,8 +16,10 @@ def __str__(self): @abc.abstractproperty def extensions(self): """ - :returns: A list of acceptable extensions for this driver. + :returns: A sequence of regular expressions used to match appropriate URIs. :rtype: (str,) + + >>> ('.*\.shp',) """ @abc.abstractproperty diff --git a/src/ocgis/api/request/driver/nc.py b/src/ocgis/api/request/driver/nc.py index ecef322ae..2039922ba 100644 --- a/src/ocgis/api/request/driver/nc.py +++ b/src/ocgis/api/request/driver/nc.py @@ -20,7 +20,7 @@ class DriverNetcdf(AbstractDriver): - extensions = ('nc',) + extensions = ('.*\.nc', 'http.*') key = 'netCDF' def __init__(self, *args, **kwargs): diff --git a/src/ocgis/api/request/driver/vector.py b/src/ocgis/api/request/driver/vector.py index ba4d70146..9d06a3ae1 100644 --- a/src/ocgis/api/request/driver/vector.py +++ b/src/ocgis/api/request/driver/vector.py @@ -6,7 +6,7 @@ class DriverVector(AbstractDriver): - extensions = ('shp',) + extensions = ('.*\.shp',) key = 'vector' def close(self, obj): diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py index 8102eea74..784d82538 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py @@ -92,11 +92,12 @@ def test_Drivers(self): def test_get_autodiscovered_driver(self): uri_shp = '/path/to/shapefile.shp' uri_nc = '/path/to/netcdf/file/foo.nc' + uri_nc_opendap = 'http://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml' driver = RequestDataset._get_autodiscovered_driver_(uri_shp) self.assertEqual(driver, DriverVector) - for poss in [uri_nc, [uri_nc, uri_nc]]: + for poss in [uri_nc, [uri_nc, uri_nc], uri_nc_opendap, [uri_nc_opendap, uri_nc_opendap]]: driver = RequestDataset._get_autodiscovered_driver_(poss) self.assertEqual(driver, DriverNetcdf) diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index fc60a2b39..a21decac6 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -60,9 +60,6 @@ def get_2d_state_boundaries_sdim(self): sdim = SpatialDimension(geom=geom,properties=attrs,crs=WGS84()) return(sdim) - def test_extensions(self): - self.assertEqual(DriverNetcdf.extensions, ('nc',)) - def test_get_dimensioned_variables_one_variable_in_target_dataset(self): uri = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(uri=uri) diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py index aa869a0f0..3cfa852c7 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py @@ -26,9 +26,6 @@ def test_close(self): sci = driver.open() driver.close(sci) - def test_extensions(self): - self.assertEqual(DriverVector.extensions, ('shp',)) - def test_get_crs(self): driver = self.get_driver() self.assertEqual(WGS84(), driver.get_crs()) From 3a77c0ee41d88649a0231a8fea41f9d2115b18d9 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 29 Dec 2014 16:17:43 -0700 Subject: [PATCH 42/71] autodiscover appropriate driver Added explicit test on the request dataset for driver overload. --- .../test/test_ocgis/test_api/test_request/test_base.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py index 784d82538..2b4e814df 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py @@ -84,6 +84,14 @@ def test_crs_overload(self): field = rd.get() self.assertDictEqual(kwds['crs'].value, field.spatial.crs.value) + def test_driver(self): + uri_nc = self.test_data.get_uri('cancm4_tas') + rd = RequestDataset(uri_nc) + self.assertIsInstance(rd.driver, DriverNetcdf) + + rd = RequestDataset(uri_nc, driver='vector') + self.assertIsInstance(rd.driver, DriverVector) + def test_Drivers(self): # always test for netcdf first self.assertIsInstance(RequestDataset._Drivers, OrderedDict) From 6063a737267ce2a97cca83f95464952e7857a535 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 29 Dec 2014 16:19:21 -0700 Subject: [PATCH 43/71] autodiscover appropriate driver Moved explicit driver test. --- .../test_ocgis/test_api/test_request/test_base.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py index 2b4e814df..b6f4481fc 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py @@ -71,6 +71,13 @@ def test_init_driver(self): self.assertIsNone(rd.variable) self.assertIsInstance(rd.get(), Field) + uri_nc = self.test_data.get_uri('cancm4_tas') + rd = RequestDataset(uri_nc) + self.assertIsInstance(rd.driver, DriverNetcdf) + + rd = RequestDataset(uri_nc, driver='vector') + self.assertIsInstance(rd.driver, DriverVector) + def test_str(self): rd = self.test_data.get_rd('cancm4_tas') ss = str(rd) @@ -84,14 +91,6 @@ def test_crs_overload(self): field = rd.get() self.assertDictEqual(kwds['crs'].value, field.spatial.crs.value) - def test_driver(self): - uri_nc = self.test_data.get_uri('cancm4_tas') - rd = RequestDataset(uri_nc) - self.assertIsInstance(rd.driver, DriverNetcdf) - - rd = RequestDataset(uri_nc, driver='vector') - self.assertIsInstance(rd.driver, DriverVector) - def test_Drivers(self): # always test for netcdf first self.assertIsInstance(RequestDataset._Drivers, OrderedDict) From 31e5c0b804a251252cd322c75cd3bbd23443d159 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 29 Dec 2014 16:44:32 -0700 Subject: [PATCH 44/71] capitalize all constants #347 Constant attributes have been renamed to use all caps. closes #347 --- src/ocgis/api/collection.py | 2 +- src/ocgis/api/parms/definition.py | 14 ++-- src/ocgis/api/request/driver/nc.py | 6 +- src/ocgis/api/subset.py | 8 +-- src/ocgis/calc/base.py | 9 ++- src/ocgis/calc/eval_function.py | 17 ++--- src/ocgis/calc/library/index/duration.py | 9 +-- .../index/dynamic_kernel_percentile.py | 7 +- src/ocgis/calc/library/index/heat_index.py | 5 +- src/ocgis/calc/library/math.py | 11 +-- src/ocgis/calc/library/statistics.py | 17 +++-- src/ocgis/calc/library/thresholds.py | 7 +- src/ocgis/constants.py | 60 ++++++++-------- src/ocgis/contrib/library_icclim.py | 70 +++++++++---------- src/ocgis/conv/nc.py | 6 +- src/ocgis/interface/base/crs.py | 10 +-- src/ocgis/interface/base/dimension/base.py | 13 ++-- src/ocgis/interface/base/dimension/spatial.py | 14 ++-- .../interface/base/dimension/temporal.py | 20 +++--- .../test_ocgis/test_api/test_collection.py | 14 ++-- .../test_ocgis/test_api/test_operations.py | 12 ++-- .../test_api/test_parms/test_definition.py | 4 +- .../test_api/test_request/test_base.py | 2 +- .../test_request/test_driver/test_nc.py | 6 +- .../test/test_ocgis/test_calc/test_base.py | 10 +-- .../test/test_ocgis/test_conv/test_nc.py | 8 +-- .../test_interface/test_base/test_crs.py | 6 +- .../test_base/test_dimension/test_base.py | 10 +-- .../test_base/test_dimension/test_spatial.py | 24 +++---- .../test_base/test_dimension/test_temporal.py | 22 +++--- .../test_interface/test_base/test_field.py | 14 ++-- .../test/test_ocgis/test_util/test_helpers.py | 8 +-- src/ocgis/test/test_simple/test_simple.py | 16 ++--- src/ocgis/util/helpers.py | 4 +- src/ocgis/util/shp_process.py | 6 +- 35 files changed, 239 insertions(+), 232 deletions(-) diff --git a/src/ocgis/api/collection.py b/src/ocgis/api/collection.py index 40bd56da3..d9848bde4 100644 --- a/src/ocgis/api/collection.py +++ b/src/ocgis/api/collection.py @@ -93,7 +93,7 @@ def values(self): class SpatialCollection(AbstractCollection): - _default_headers = constants.raw_headers + _default_headers = constants.HEADERS_RAW _multi_cast = {'Point': MultiPoint, 'Polygon': MultiPolygon} def __init__(self, meta=None, key=None, crs=None, headers=None, value_keys=None): diff --git a/src/ocgis/api/parms/definition.py b/src/ocgis/api/parms/definition.py index dfb4c3879..e30c347d7 100644 --- a/src/ocgis/api/parms/definition.py +++ b/src/ocgis/api/parms/definition.py @@ -6,15 +6,15 @@ from copy import deepcopy from types import FunctionType import itertools -import datetime +import numpy as np from shapely.geometry import MultiPoint from shapely.geometry.base import BaseGeometry from shapely.geometry.polygon import Polygon from shapely.geometry.multipolygon import MultiPolygon from shapely.geometry.point import Point -import numpy as np +import datetime from ocgis.api.parms import base from ocgis.exc import DefinitionValidationError from ocgis.api.request.base import RequestDataset, RequestDatasetCollection @@ -215,7 +215,7 @@ def _parse_(self,value): ## library is an optional dependency. except KeyError: ## this will register the icclim indices. - if function_key.startswith('{0}_'.format(constants.prefix_icclim_function_key)): + if function_key.startswith('{0}_'.format(constants.ICCLIM_PREFIX_FUNCTION_KEY)): register.register_icclim(fr) else: raise(DefinitionValidationError(self,dve_msg)) @@ -661,7 +661,7 @@ class Headers(base.IterableParameter,base.OcgParameter): name = 'headers' default = None return_type = tuple - valid = set(constants.raw_headers+constants.calc_headers+constants.multi_headers) + valid = set(constants.HEADERS_RAW+constants.HEADERS_CALC+constants.HEADERS_MULTI) input_types = [list,tuple] nullable = True element_type = str @@ -679,10 +679,10 @@ def __repr__(self): return(msg) def parse_all(self,value): - for header in constants.required_headers: + for header in constants.HEADERS_REQUIRED: if header in value: value.remove(header) - return(constants.required_headers+value) + return(constants.HEADERS_REQUIRED+value) def validate_all(self,values): if len(values) == 0: @@ -772,7 +772,7 @@ def _get_meta_(self): class OutputFormat(base.StringOptionParameter): name = 'output_format' default = 'numpy' - valid = constants.output_formats + valid = constants.OUTPUT_FORMATS @classmethod def iter_possible(cls): diff --git a/src/ocgis/api/request/driver/nc.py b/src/ocgis/api/request/driver/nc.py index 2039922ba..759704ae8 100644 --- a/src/ocgis/api/request/driver/nc.py +++ b/src/ocgis/api/request/driver/nc.py @@ -163,7 +163,7 @@ def _get_vector_dimension_(self, k, v, source_metadata): # extract the data length to use when creating the source index arrays. length = source_metadata['dimensions'][ref_axis['dimension']]['len'] - src_idx = np.arange(0, length, dtype=constants.np_int) + src_idx = np.arange(0, length, dtype=constants.NP_INT) # get the target data type for the dimension try: @@ -248,8 +248,8 @@ def _get_temporal_adds_(ref_attrs): else: shape_src_idx = [source_metadata['dimensions'][xx]['len'] for xx in kwds_grid['row']['dimensions']] src_idx = {} - src_idx['row'] = np.arange(0, shape_src_idx[0], dtype=constants.np_int) - src_idx['col'] = np.arange(0, shape_src_idx[1], dtype=constants.np_int) + src_idx['row'] = np.arange(0, shape_src_idx[0], dtype=constants.NP_INT) + src_idx['col'] = np.arange(0, shape_src_idx[1], dtype=constants.NP_INT) name_row = kwds_grid['row']['name'] name_col = kwds_grid['col']['name'] kwds_grid = {'name_row': name_row, 'name_col': name_col, 'data': self.rd, 'src_idx': src_idx} diff --git a/src/ocgis/api/subset.py b/src/ocgis/api/subset.py index 63282b2e7..c9bc1bfc4 100644 --- a/src/ocgis/api/subset.py +++ b/src/ocgis/api/subset.py @@ -1,7 +1,7 @@ import logging from copy import deepcopy, copy - import numpy as np + from shapely.geometry import Point, MultiPoint from ocgis.calc.engine import OcgCalculationEngine @@ -167,11 +167,11 @@ def _process_subsettables_(self, rds): else: if self.cengine is not None: if self._has_multivariate_calculations: - headers = constants.multi_headers + headers = constants.HEADERS_MULTI else: - headers = constants.calc_headers + headers = constants.HEADERS_CALC else: - headers = constants.raw_headers + headers = constants.HEADERS_RAW ## keyed output functions require appending headers regardless. there is ## only one keyed output function allowed in a request. diff --git a/src/ocgis/calc/base.py b/src/ocgis/calc/base.py index c75ee9035..25e7d6d1b 100644 --- a/src/ocgis/calc/base.py +++ b/src/ocgis/calc/base.py @@ -3,7 +3,6 @@ import abc import itertools import logging - import numpy as np from ocgis.interface.base.variable import DerivedVariable, VariableCollection @@ -319,10 +318,10 @@ def _add_to_collection_(self, units=None, value=None, parent_variables=None, ali if sample_size is not None: # meta = {'attrs': {'standard_name': 'sample_size', 'long_name': 'Statistical Sample Size'}} attrs = OrderedDict() - attrs['standard_name'] = constants.default_sample_size_standard_name - attrs['long_name'] = constants.default_sample_size_long_name + attrs['standard_name'] = constants.DEFAULT_SAMPLE_SIZE_STANDARD_NAME + attrs['long_name'] = constants.DEFAULT_SAMPLE_SIZE_LONG_NAME dv = DerivedVariable(name=None, alias='n_' + dv.alias, units=None, value=sample_size, fdef=None, - parents=parents, dtype=constants.np_int, fill_value=fill_value, attrs=attrs) + parents=parents, dtype=constants.NP_INT, fill_value=fill_value, attrs=attrs) self.vc.add_variable(dv) @abc.abstractmethod @@ -357,7 +356,7 @@ def _get_temporal_agg_fill_(self, value=None, f=None, parms=None, shp_fill=None) ## this array holds output from the sample size computations if self.calc_sample_size: - fill_sample_size = np.ma.zeros(fill.shape, dtype=constants.np_int) + fill_sample_size = np.ma.zeros(fill.shape, dtype=constants.NP_INT) else: fill_sample_size = None diff --git a/src/ocgis/calc/eval_function.py b/src/ocgis/calc/eval_function.py index 88b59b41d..60a3d09c0 100644 --- a/src/ocgis/calc/eval_function.py +++ b/src/ocgis/calc/eval_function.py @@ -1,9 +1,10 @@ -from ocgis.calc.base import AbstractUnivariateFunction -from ocgis import constants import numpy as np import re from copy import deepcopy +from ocgis.calc.base import AbstractUnivariateFunction +from ocgis import constants + class EvalFunction(AbstractUnivariateFunction): ''' @@ -12,7 +13,7 @@ class EvalFunction(AbstractUnivariateFunction): :param str expr: The string function to evaluate. The function must have an equals sign. The function may contain multiple variables aliases. Mathematical operators include standard arithmetic symbols and NumPy functions. The list of - enabled functions is contained in :attr:~`ocgis.constants.enabled_numpy_ufuncs`. + enabled functions is contained in :attr:~`ocgis.constants.ENABLED_NUMPY_UFUNCS`. ''' description = None dtype = None @@ -76,7 +77,7 @@ def is_multivariate(expr): :param str expr: The string function to evaluate. The function must have an equals sign. The function may contain multiple variables aliases. Mathematical operators include standard arithmetic symbols and NumPy functions. The list of - enabled functions is contained in :attr:~`ocgis.constants.enabled_numpy_ufuncs`. + enabled functions is contained in :attr:~`ocgis.constants.ENABLED_NUMPY_UFUNCS`. :returns bool: ''' ## do not count the output variable name @@ -87,7 +88,7 @@ def is_multivariate(expr): strings = set([s for s in strings if re.search('[A-Za-z]',s) is not None]) strings_left = deepcopy(strings) for s in strings: - if s in constants.enabled_numpy_ufuncs: + if s in constants.ENABLED_NUMPY_UFUNCS: strings_left.remove(s) ## if there are more than one variable alias in the equation, the expression ## is multivariate @@ -103,7 +104,7 @@ def _get_eval_string_(expr,map_vars): :param str expr: The string function to evaluate. The function must have an equals sign. The function may contain multiple variables aliases. Mathematical operators include standard arithmetic symbols and NumPy functions. The list of - enabled functions is contained in :attr:~`ocgis.constants.enabled_numpy_ufuncs`. + enabled functions is contained in :attr:~`ocgis.constants.ENABLED_NUMPY_UFUNCS`. :param dict map_vars: Maps variable aliases to their output representation in ``expr``. @@ -130,12 +131,12 @@ def _get_eval_string_(expr,map_vars): ## "strings" must be entirely composed of enabled numpy functions and the ## variable aliases originating from the keys in "map_vars" for s in strings: - if s not in constants.enabled_numpy_ufuncs and s not in map_vars.keys(): + if s not in constants.ENABLED_NUMPY_UFUNCS and s not in map_vars.keys(): raise(ValueError('Unable to parse expression string: "{0}". ' 'Ensure the NumPy functions are enabled and appropriate ' 'variables have been requested. The problem string value is "{1}".'.format(expr,s))) ## update the names of the numpy functions to use the module prefix "np" - for np_func in constants.enabled_numpy_ufuncs: + for np_func in constants.ENABLED_NUMPY_UFUNCS: expr = expr.replace(np_func,'np.'+np_func) ## update the variable aliases to match the key-value relationship in ## "map_vars" diff --git a/src/ocgis/calc/library/index/duration.py b/src/ocgis/calc/library/index/duration.py index ad339ce1a..fdda1e438 100644 --- a/src/ocgis/calc/library/index/duration.py +++ b/src/ocgis/calc/library/index/duration.py @@ -1,9 +1,10 @@ -from ocgis.calc import base import numpy as np +from collections import OrderedDict + +from ocgis.calc import base from ocgis.util.helpers import iter_array from ocgis.exc import DefinitionValidationError from ocgis import constants -from collections import OrderedDict class Duration(base.AbstractUnivariateSetFunction,base.AbstractParameterizedFunction): @@ -11,7 +12,7 @@ class Duration(base.AbstractUnivariateSetFunction,base.AbstractParameterizedFunc parms_definition = {'threshold':float,'operation':str,'summary':str} ## output data type will vary by the summary operation (e.g. float for mean, ## int for max) - dtype = constants.np_float + dtype = constants.NP_FLOAT description = 'Summarizes consecutive occurrences in a sequence where the logical operation returns TRUE. The summary operation is applied to the sequences within a temporal aggregation.' standard_name = 'duration' long_name = 'Duration' @@ -98,7 +99,7 @@ class FrequencyDuration(base.AbstractKeyedOutputFunction,Duration): key = 'freq_duration' description = 'Count the frequency of spell durations within the temporal aggregation.' dtype = object - structure_dtype = OrderedDict([['names',['duration','count']],['formats',[constants.np_int,constants.np_int]]]) + structure_dtype = OrderedDict([['names',['duration','count']],['formats',[constants.NP_INT,constants.NP_INT]]]) parms_definition = {'threshold':float,'operation':str} standard_name = 'frequency_duration' long_name = 'Frequency Duration' diff --git a/src/ocgis/calc/library/index/dynamic_kernel_percentile.py b/src/ocgis/calc/library/index/dynamic_kernel_percentile.py index e2f5f81c7..a64a43a67 100644 --- a/src/ocgis/calc/library/index/dynamic_kernel_percentile.py +++ b/src/ocgis/calc/library/index/dynamic_kernel_percentile.py @@ -1,15 +1,16 @@ import numpy as np +from collections import OrderedDict, defaultdict +import calendar + from ocgis.calc.base import AbstractParameterizedFunction, AbstractUnivariateSetFunction from ocgis import constants -from collections import OrderedDict, defaultdict from datetime import datetime -import calendar class DynamicDailyKernelPercentileThreshold(AbstractUnivariateSetFunction, AbstractParameterizedFunction): key = 'dynamic_kernel_percentile_threshold' parms_definition = {'operation': str, 'percentile': float, 'daily_percentile': None, 'width': int} - dtype = constants.np_int + dtype = constants.NP_INT description = 'Implementation of moving window percentile threshold calculations similar to ECA indices: http://eca.knmi.nl/documents/atbd.pdf' standard_name = 'dynamic_kernel_percentile' long_name = 'Dynamic Kernel Percentile' diff --git a/src/ocgis/calc/library/index/heat_index.py b/src/ocgis/calc/library/index/heat_index.py index 52f7672b2..99a90b6c5 100644 --- a/src/ocgis/calc/library/index/heat_index.py +++ b/src/ocgis/calc/library/index/heat_index.py @@ -1,11 +1,12 @@ -from ocgis.calc import base import numpy as np + +from ocgis.calc import base from ocgis import constants class HeatIndex(base.AbstractMultivariateFunction): description = 'Heat Index following: http://en.wikipedia.org/wiki/Heat_index. If temperature is < 80F or relative humidity is < 40%, the value is masked during calculation. Output units are Fahrenheit.' - dtype = constants.np_float + dtype = constants.NP_FLOAT required_variables = ['tas','rhs'] required_units = {'tas':'fahrenheit','rhs':'percent'} key = 'heat_index' diff --git a/src/ocgis/calc/library/math.py b/src/ocgis/calc/library/math.py index 18d3ffb26..e9adc92e9 100644 --- a/src/ocgis/calc/library/math.py +++ b/src/ocgis/calc/library/math.py @@ -1,5 +1,6 @@ -from ocgis.calc import base import numpy as np + +from ocgis.calc import base from ocgis import constants from ocgis.util.helpers import iter_array @@ -8,7 +9,7 @@ class Divide(base.AbstractMultivariateFunction): key = 'divide' description = 'Divide arr1 by arr2.' required_variables = ['arr1','arr2'] - dtype = constants.np_float + dtype = constants.NP_FLOAT standard_name = 'divide' long_name = 'Divide' @@ -19,7 +20,7 @@ def calculate(self,arr1=None,arr2=None): class NaturalLogarithm(base.AbstractUnivariateFunction): key = 'ln' description = 'Compute the natural logarithm.' - dtype = constants.np_float + dtype = constants.NP_FLOAT standard_name = 'natural_logarithm' long_name = 'Natural Logarithm' @@ -33,7 +34,7 @@ def get_output_units(self,*args,**kwds): class Sum(base.AbstractUnivariateSetFunction): key = 'sum' description = 'Compute the algebraic sum of a series.' - dtype = constants.np_float + dtype = constants.NP_FLOAT standard_name = 'sum' long_name = 'Sum' @@ -45,7 +46,7 @@ class Convolve1D(base.AbstractUnivariateFunction, base.AbstractParameterizedFunc key = 'convolve_1d' parms_definition = {'v': np.ndarray, 'mode': str} description = 'Perform a one-dimensional convolution for each grid element along the time axis. See: http://docs.scipy.org/doc/numpy/reference/generated/numpy.convolve.html' - dtype = constants.np_float + dtype = constants.NP_FLOAT standard_name = 'convolve_1d' long_name = 'Convolution along the Time Dimension' diff --git a/src/ocgis/calc/library/statistics.py b/src/ocgis/calc/library/statistics.py index 960905c9a..9cba384be 100644 --- a/src/ocgis/calc/library/statistics.py +++ b/src/ocgis/calc/library/statistics.py @@ -1,7 +1,6 @@ import calendar from collections import OrderedDict, defaultdict import itertools - import numpy as np from datetime import datetime @@ -14,7 +13,7 @@ class MovingWindow(AbstractUnivariateFunction, AbstractParameterizedFunction): key = 'moving_window' parms_definition = {'k': int, 'mode': str, 'operation': str} description = () - dtype = constants.np_float + dtype = constants.NP_FLOAT standard_name = 'moving_window' long_name = 'Moving Window Operation' @@ -130,7 +129,7 @@ class DailyPercentile(base.AbstractUnivariateFunction, base.AbstractParameterize key = 'daily_perc' parms_definition = {'percentile': float, 'window_width': int, 'only_leap_years': bool} description = '' - dtype = constants.np_float + dtype = constants.NP_FLOAT standard_name = 'daily_percentile' long_name = 'Daily Percentile' @@ -350,7 +349,7 @@ class FrequencyPercentile(base.AbstractUnivariateSetFunction,base.AbstractParame key = 'freq_perc' parms_definition = {'percentile':float} description = 'The percentile value along the time axis. See: http://docs.scipy.org/doc/numpy-dev/reference/generated/numpy.percentile.html.' - dtype = constants.np_float + dtype = constants.NP_FLOAT standard_name = 'frequency_percentile' long_name = 'Frequency Percentile' @@ -366,7 +365,7 @@ def calculate(self,values,percentile=None): class Max(base.AbstractUnivariateSetFunction): description = 'Max value for the series.' key = 'max' - dtype = constants.np_float + dtype = constants.NP_FLOAT standard_name = 'max' long_name = 'max' @@ -377,7 +376,7 @@ def calculate(self,values): class Min(base.AbstractUnivariateSetFunction): description = 'Min value for the series.' key = 'min' - dtype = constants.np_float + dtype = constants.NP_FLOAT standard_name = 'min' long_name = 'Min' @@ -388,7 +387,7 @@ def calculate(self,values): class Mean(base.AbstractUnivariateSetFunction): description = 'Compute mean value of the set.' key = 'mean' - dtype = constants.np_float + dtype = constants.NP_FLOAT standard_name = 'mean' long_name = 'Mean' @@ -399,7 +398,7 @@ def calculate(self,values): class Median(base.AbstractUnivariateSetFunction): description = 'Compute median value of the set.' key = 'median' - dtype = constants.np_float + dtype = constants.NP_FLOAT standard_name = 'median' long_name = 'median' @@ -410,7 +409,7 @@ def calculate(self,values): class StandardDeviation(base.AbstractUnivariateSetFunction): description = 'Compute standard deviation of the set.' key = 'std' - dtype = constants.np_float + dtype = constants.NP_FLOAT standard_name = 'standard_deviation' long_name = 'Standard Deviation' diff --git a/src/ocgis/calc/library/thresholds.py b/src/ocgis/calc/library/thresholds.py index 931a51f4c..57627499f 100644 --- a/src/ocgis/calc/library/thresholds.py +++ b/src/ocgis/calc/library/thresholds.py @@ -1,12 +1,13 @@ -from ocgis.calc import base import numpy as np + +from ocgis.calc import base from ocgis import constants class Between(base.AbstractUnivariateSetFunction,base.AbstractParameterizedFunction): description = 'Count of values falling within the limits lower and upper (inclusive).' parms_definition = {'lower':float,'upper':float} - dtype = constants.np_int + dtype = constants.NP_INT key = 'between' standard_name = 'between' long_name = 'between' @@ -26,7 +27,7 @@ def calculate(self,values,lower=None,upper=None): class Threshold(base.AbstractUnivariateSetFunction,base.AbstractParameterizedFunction): description = 'Count of values where the logical operation returns TRUE.' parms_definition = {'threshold':float,'operation':str} - dtype = constants.np_int + dtype = constants.NP_INT key = 'threshold' standard_name = 'threshold' long_name = 'threshold' diff --git a/src/ocgis/constants.py b/src/ocgis/constants.py index 46fd914c9..5d37f40fc 100644 --- a/src/ocgis/constants.py +++ b/src/ocgis/constants.py @@ -1,75 +1,79 @@ import numpy as np -#: Standard bounds name used when none is available from the input data. -ocgis_bounds = 'bounds' + +# : Standard bounds name used when none is available from the input data. +OCGIS_BOUNDS = 'bounds' #: Standard name for the unique identifier in GIS files. -ocgis_unique_geometry_identifier = 'UGID' +OCGIS_UNIQUE_GEOMETRY_IDENTIFIER = 'UGID' #: Default netCDF4 output file type -netCDF_default_data_model = 'NETCDF4' +NETCDF_DEFAULT_DATA_MODEL = 'NETCDF4' #: Default temporal calendar. -default_temporal_calendar = 'standard' +DEFAULT_TEMPORAL_CALENDAR = 'standard' #: Default temporal units. -default_temporal_units = 'days since 0001-01-01 00:00:00' +DEFAULT_TEMPORAL_UNITS = 'days since 0001-01-01 00:00:00' #: Default name for coordinate systems in netCDF file if none is provided. -default_coordinate_system_name = 'coordinate_system' +DEFAULT_COORDINATE_SYSTEM_NAME = 'coordinate_system' #: Default sample size variable standard name. -default_sample_size_standard_name = 'sample_size' +DEFAULT_SAMPLE_SIZE_STANDARD_NAME = 'sample_size' #: Default sample size variable long name. -default_sample_size_long_name = 'Statistical Sample Size' +DEFAULT_SAMPLE_SIZE_LONG_NAME = 'Statistical Sample Size' #: Default row coordinate name. -default_name_row_coordinates = 'yc' +DEFAULT_NAME_ROW_COORDINATES = 'yc' #: Default column coordinate name. -default_name_col_coordinates = 'xc' +DEFAULT_NAME_COL_COORDINATES = 'xc' #: Default corners dimension name. -default_name_corners_dimension = 'ncorners' +DEFAULT_NAME_CORNERS_DIMENSION = 'ncorners' #: Standard headers for subset operations. -raw_headers = ['did','vid','ugid','tid','lid','gid','variable','alias','time','year','month','day','level','value'] +HEADERS_RAW = ['did', 'vid', 'ugid', 'tid', 'lid', 'gid', 'variable', 'alias', 'time', 'year', 'month', 'day', 'level', + 'value'] #: Standard headers for computation. -calc_headers = ['did','vid','cid','ugid','tid','lid','gid','variable','alias','calc_key','calc_alias','time','year','month','day','level','value'] +HEADERS_CALC = ['did', 'vid', 'cid', 'ugid', 'tid', 'lid', 'gid', 'variable', 'alias', 'calc_key', 'calc_alias', 'time', + 'year', 'month', 'day', 'level', 'value'] #: Standard headers for multivariate calculations. -multi_headers = ['did','cid','ugid','tid','lid','gid','calc_key','calc_alias','time','year','month','day','level','value'] +HEADERS_MULTI = ['did', 'cid', 'ugid', 'tid', 'lid', 'gid', 'calc_key', 'calc_alias', 'time', 'year', 'month', 'day', + 'level', 'value'] #: Required headers for every request. -required_headers = ['did','ugid','gid'] +HEADERS_REQUIRED = ['did', 'ugid', 'gid'] #: Key identifiers for output formats. -output_formats = ['numpy','nc','csv','csv+','shp','geojson','meta'] +OUTPUT_FORMATS = ['numpy', 'nc', 'csv', 'csv+', 'shp', 'geojson', 'meta'] # Download URL for test datasets. -test_data_download_url_prefix = None +TEST_DATA_DOWNLOAD_PREFIX = None #: The day value to use for month centroids. -calc_month_centroid = 16 +CALC_MONTH_CENTROID = 16 #: The month value to use for year centroids. -calc_year_centroid_month = 7 +CALC_YEAR_CENTROID_MONTH = 7 #: The default day value for year centroids. -calc_year_centroid_day = 1 +CALC_YEAR_CENTROID_DAY = 1 #: The number of values to use when calculating data resolution. -resolution_limit = 100 +RESOLUTION_LIMIT = 100 #: The data type to use for NumPy integers. -np_int = np.int32 +NP_INT = np.int32 #: The data type to use for NumPy floats. -np_float = np.float32 +NP_FLOAT = np.float32 #: Function key prefix for the `icclim` indices library. -prefix_icclim_function_key = 'icclim' +ICCLIM_PREFIX_FUNCTION_KEY = 'icclim' #: NumPy functions enabled for functions evaluated from string representations. -enabled_numpy_ufuncs = ['exp','log','abs'] +ENABLED_NUMPY_UFUNCS = ['exp', 'log', 'abs'] #: The value for the 180th meridian to use when wrapping. -meridian_180th = 180. -# meridian_180th = 179.9999999999999 +MERIDIAN_180TH = 180. +# MERIDIAN_180TH = 179.9999999999999 diff --git a/src/ocgis/contrib/library_icclim.py b/src/ocgis/contrib/library_icclim.py index 91536c4e7..d5731ee3a 100644 --- a/src/ocgis/contrib/library_icclim.py +++ b/src/ocgis/contrib/library_icclim.py @@ -2,12 +2,12 @@ from collections import OrderedDict from copy import deepcopy import json +import numpy as np from icclim.percentile_dict import get_percentile_dict from icclim import calc_indice, calc_indice_perc from icclim import set_longname_units as slu from icclim import set_globattr -import numpy as np from ocgis.calc.base import AbstractUnivariateSetFunction, AbstractMultivariateFunction, AbstractParameterizedFunction from ocgis import constants @@ -239,7 +239,7 @@ def get_percentile_dict(*args, **kwargs): class IcclimTG(AbstractIcclimUnivariateSetFunction): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_TG' @@ -268,7 +268,7 @@ class IcclimTNn(IcclimTG): class IcclimCSU(AbstractIcclimUnivariateSetFunction): - dtype = constants.np_int + dtype = constants.NP_INT key = 'icclim_CSU' @@ -289,26 +289,26 @@ class IcclimID(IcclimCSU): class IcclimHD17(IcclimTG): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_HD17' required_units = ['K','kelvin'] class IcclimGD4(IcclimTG): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_GD4' required_units = ['K','kelvin'] class IcclimSU(IcclimCSU): - dtype = constants.np_int + dtype = constants.NP_INT key = 'icclim_SU' required_units = ['K','kelvin'] class IcclimDTR(AbstractIcclimMultivariateFunction): key = 'icclim_DTR' - dtype = constants.np_float + dtype = constants.NP_FLOAT required_variables = ['tasmin','tasmax'] time_aggregation_external = False @@ -329,157 +329,157 @@ class IcclimvDTR(IcclimDTR): class IcclimRR(IcclimCSU): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_RR' class IcclimRR1(IcclimCSU): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_RR1' class IcclimCWD(IcclimCSU): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_CWD' class IcclimSDII(IcclimCSU): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_SDII' class IcclimR10mm(IcclimCSU): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_R10mm' class IcclimR20mm(IcclimCSU): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_R20mm' class IcclimRX1day(IcclimCSU): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_RX1day' class IcclimRX5day(IcclimCSU): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_RX5day' class IcclimSD(IcclimCSU): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_SD' class IcclimSD1(IcclimCSU): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_SD1' class IcclimSD5(IcclimCSU): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_SD5cm' class IcclimSD50(IcclimCSU): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_SD50cm' class IcclimCDD(IcclimCSU): - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'icclim_CDD' class IcclimTG10p(AbstractIcclimPercentileIndice): key = 'icclim_TG10p' - dtype = constants.np_float + dtype = constants.NP_FLOAT percentile = 10 class IcclimTX10p(AbstractIcclimPercentileIndice): key = 'icclim_TX10p' - dtype = constants.np_float + dtype = constants.NP_FLOAT percentile = 10 class IcclimTN10p(AbstractIcclimPercentileIndice): key = 'icclim_TN10p' - dtype = constants.np_float + dtype = constants.NP_FLOAT percentile = 10 class IcclimTG90p(AbstractIcclimPercentileIndice): key = 'icclim_TG90p' - dtype = constants.np_float + dtype = constants.NP_FLOAT percentile = 90 class IcclimTX90p(AbstractIcclimPercentileIndice): key = 'icclim_TX90p' - dtype = constants.np_float + dtype = constants.NP_FLOAT percentile = 10 class IcclimTN90p(AbstractIcclimPercentileIndice): key = 'icclim_TN90p' - dtype = constants.np_float + dtype = constants.NP_FLOAT percentile = 10 class IcclimWSDI(AbstractIcclimPercentileIndice): key = 'icclim_WSDI' - dtype = constants.np_float + dtype = constants.NP_FLOAT percentile = 90 class IcclimCSDI(AbstractIcclimPercentileIndice): key = 'icclim_CSDI' - dtype = constants.np_float + dtype = constants.NP_FLOAT percentile = 10 class IcclimR75p(AbstractIcclimPercentileIndice): key = 'icclim_R75p' - dtype = constants.np_float + dtype = constants.NP_FLOAT percentile = 75 class IcclimR75TOT(AbstractIcclimPercentileIndice): key = 'icclim_R75TOT' - dtype = constants.np_float + dtype = constants.NP_FLOAT percentile = 75 class IcclimR95p(AbstractIcclimPercentileIndice): key = 'icclim_R95p' - dtype = constants.np_float + dtype = constants.NP_FLOAT percentile = 95 class IcclimR95TOT(AbstractIcclimPercentileIndice): key = 'icclim_R95TOT' - dtype = constants.np_float + dtype = constants.NP_FLOAT percentile = 95 class IcclimR99p(AbstractIcclimPercentileIndice): key = 'icclim_R99p' - dtype = constants.np_float + dtype = constants.NP_FLOAT percentile = 99 class IcclimR99TOT(AbstractIcclimPercentileIndice): key = 'icclim_R99TOT' - dtype = constants.np_float + dtype = constants.NP_FLOAT percentile = 99 class IcclimCD(AbstractIcclimMultivariateFunction, AbstractParameterizedFunction): key = 'icclim_CD' - dtype = constants.np_float + dtype = constants.NP_FLOAT required_variables = ['tas', 'pr'] time_aggregation_external = False parms_definition = {'tas_25th_percentile_dict': dict, 'pr_25th_percentile_dict': dict} diff --git a/src/ocgis/conv/nc.py b/src/ocgis/conv/nc.py index a9d0c40ee..1053d8765 100644 --- a/src/ocgis/conv/nc.py +++ b/src/ocgis/conv/nc.py @@ -1,6 +1,6 @@ -import datetime import netCDF4 as nc +import datetime import ocgis from ocgis.conv.base import AbstractConverter from ocgis import constants @@ -20,7 +20,7 @@ def _get_file_format_(self): file_format = set() # if no operations are present, use the default data model if self.ops is None: - ret = constants.netCDF_default_data_model + ret = constants.NETCDF_DEFAULT_DATA_MODEL else: for rd in self.ops.dataset.iter_request_datasets(): rr = rd.source_metadata['file_format'] @@ -36,7 +36,7 @@ def _get_file_format_(self): ret = list(file_format)[0] except IndexError: # likely all field objects in the dataset. use the default netcdf data model - ret = constants.netCDF_default_data_model + ret = constants.NETCDF_DEFAULT_DATA_MODEL return ret def _write_coll_(self, ds, coll): diff --git a/src/ocgis/interface/base/crs.py b/src/ocgis/interface/base/crs.py index 3183e987b..de797be27 100644 --- a/src/ocgis/interface/base/crs.py +++ b/src/ocgis/interface/base/crs.py @@ -3,10 +3,10 @@ import itertools import abc import logging +import numpy as np from osgeo.osr import SpatialReference from fiona.crs import from_string, to_string -import numpy as np from shapely.geometry import Point, Polygon from shapely.geometry.base import BaseMultipartGeometry @@ -20,7 +20,7 @@ class CoordinateReferenceSystem(object): def __init__(self, value=None, proj4=None, epsg=None, name=None): - self.name = name or constants.default_coordinate_system_name + self.name = name or constants.DEFAULT_COORDINATE_SYSTEM_NAME if value is None: if proj4 is not None: @@ -290,7 +290,7 @@ def _get_wrapped_state_from_array_(cls, arr): :rtype: str """ - gt_m180 = arr > constants.meridian_180th + gt_m180 = arr > constants.MERIDIAN_180TH lt_pm = arr < 0 if np.any(lt_pm): @@ -333,7 +333,7 @@ def _get_wrapped_state_from_geometry_(cls, geom): @staticmethod def _place_prime_meridian_array_(arr): """ - Replace any 180 degree values with the value of :attribute:`ocgis.constants.meridian_180th`. + Replace any 180 degree values with the value of :attribute:`ocgis.constants.MERIDIAN_180TH`. :param arr: The target array to modify inplace. :type arr: :class:`numpy.array` @@ -344,7 +344,7 @@ def _place_prime_meridian_array_(arr): # find the values that are 180 select = arr == 180 # replace the values that are 180 with the constant value - np.place(arr, select, constants.meridian_180th) + np.place(arr, select, constants.MERIDIAN_180TH) # return the mask used for the replacement return select diff --git a/src/ocgis/interface/base/dimension/base.py b/src/ocgis/interface/base/dimension/base.py index d4817a0fb..6ae26155a 100644 --- a/src/ocgis/interface/base/dimension/base.py +++ b/src/ocgis/interface/base/dimension/base.py @@ -1,7 +1,6 @@ import abc from copy import copy, deepcopy from operator import mul - import numpy as np from ocgis import constants @@ -130,7 +129,7 @@ def _get_uid_(self): ret = None else: n = reduce(mul, self.value.shape) - ret = np.arange(1, n + 1, dtype=constants.np_int).reshape(self.value.shape) + ret = np.arange(1, n + 1, dtype=constants.NP_INT).reshape(self.value.shape) ret = np.ma.array(ret, mask=False) return ret @@ -166,7 +165,7 @@ def __init__(self, *args, **kwargs): bounds = kwargs.pop('bounds', None) # used for creating name_bounds as well as the name of the bounds dimension in netCDF - self.name_bounds_suffix = kwargs.pop('name_bounds_suffix', None) or constants.ocgis_bounds + self.name_bounds_suffix = kwargs.pop('name_bounds_suffix', None) or constants.OCGIS_BOUNDS self._name_bounds = kwargs.pop('name_bounds', None) self.axis = kwargs.pop('axis', None) # if True, bounds were interpolated. if False, they were loaded from source data. used in conforming units. @@ -225,9 +224,9 @@ def resolution(self): if self.bounds is None and self.value.shape[0] < 2: raise(ResolutionError('With no bounds and a single coordinate, approximate resolution may not be determined.')) elif self.bounds is None: - res_array = np.diff(self.value[0:constants.resolution_limit]) + res_array = np.diff(self.value[0:constants.RESOLUTION_LIMIT]) else: - res_bounds = self.bounds[0:constants.resolution_limit] + res_bounds = self.bounds[0:constants.RESOLUTION_LIMIT] res_array = res_bounds[:,1] - res_bounds[:,0] ret = np.abs(res_array).mean() return(ret) @@ -370,7 +369,7 @@ def write_to_netcdf_dataset(self, dataset, unlimited=False, bounds_dimension_nam :type dataset: :class:`netCDF4.Dataset` :param bool unlimited: If ``True``, create the dimension on the netCDF object with ``size=None``. See http://unidata.github.io/netcdf4-python/netCDF4.Dataset-class.html#createDimension. - :param str bounds_dimension_name: If ``None``, default to :attrs:`ocgis.constants.ocgis_bounds`. + :param str bounds_dimension_name: If ``None``, default to :attrs:`ocgis.constants.OCGIS_BOUNDS`. :param kwargs: Extra keyword arguments in addition to ``dimensions`` to pass to ``createVariable``. See http://unidata.github.io/netcdf4-python/netCDF4.Dataset-class.html#createVariable """ @@ -423,7 +422,7 @@ def _get_uid_(self): shp = self._value.shape[0] else: shp = self._src_idx.shape[0] - ret = np.arange(1,shp+1,dtype=constants.np_int) + ret = np.arange(1,shp+1,dtype=constants.NP_INT) ret = np.atleast_1d(ret) return(ret) diff --git a/src/ocgis/interface/base/dimension/spatial.py b/src/ocgis/interface/base/dimension/spatial.py index 3d443294d..44cc12329 100644 --- a/src/ocgis/interface/base/dimension/spatial.py +++ b/src/ocgis/interface/base/dimension/spatial.py @@ -762,7 +762,7 @@ def resolution(self): try: ret = np.mean([self.row.resolution,self.col.resolution]) except AttributeError: - resolution_limit = int(constants.resolution_limit)/2 + resolution_limit = int(constants.RESOLUTION_LIMIT)/2 r_value = self.value[:,0:resolution_limit,0:resolution_limit] rows = np.mean(np.diff(r_value[0,:,:],axis=0)) cols = np.mean(np.diff(r_value[1,:,:],axis=1)) @@ -883,8 +883,8 @@ def write_to_netcdf_dataset(self, dataset, **kwargs): self.col.write_to_netcdf_dataset(dataset, **kwargs) except AttributeError: # likely no row and column. write the grid value. - name_yc = constants.default_name_row_coordinates - name_xc = constants.default_name_col_coordinates + name_yc = constants.DEFAULT_NAME_ROW_COORDINATES + name_xc = constants.DEFAULT_NAME_COL_COORDINATES dataset.createDimension(name_yc, size=self.shape[0]) dataset.createDimension(name_xc, size=self.shape[1]) value = self.value @@ -898,7 +898,7 @@ def write_to_netcdf_dataset(self, dataset, **kwargs): if self.corners is not None: corners = self.corners - ncorners = constants.default_name_corners_dimension + ncorners = constants.DEFAULT_NAME_CORNERS_DIMENSION dataset.createDimension(ncorners, size=4) name_yc_corner = '{0}_corners'.format(name_yc) name_xc_corner = '{0}_corners'.format(name_xc) @@ -931,7 +931,7 @@ def _get_uid_(self, shp=None): shp = len(self.row), len(self.col) else: shp = self._value.shape[1], self._value.shape[2] - ret = np.arange(1, (shp[0] * shp[1]) + 1, dtype=constants.np_int).reshape(shp) + ret = np.arange(1, (shp[0] * shp[1]) + 1, dtype=constants.NP_INT).reshape(shp) ret = np.ma.array(ret, mask=False) return ret @@ -1066,7 +1066,7 @@ def __init__(self, *args, **kwargs): @property def weights(self): - ret = np.ones(self.value.shape,dtype=constants.np_float) + ret = np.ones(self.value.shape,dtype=constants.NP_FLOAT) ret = np.ma.array(ret,mask=self.value.mask) return(ret) @@ -1238,7 +1238,7 @@ def __init__(self, *args, **kwargs): @property def area(self): r_value = self.value - fill = np.ones(r_value.shape,dtype=constants.np_float) + fill = np.ones(r_value.shape,dtype=constants.NP_FLOAT) fill = np.ma.array(fill,mask=r_value.mask) for (ii,jj),geom in iter_array(r_value,return_value=True): fill[ii,jj] = geom.area diff --git a/src/ocgis/interface/base/dimension/temporal.py b/src/ocgis/interface/base/dimension/temporal.py index 3d97a0169..fcc9ec996 100644 --- a/src/ocgis/interface/base/dimension/temporal.py +++ b/src/ocgis/interface/base/dimension/temporal.py @@ -1,12 +1,12 @@ from collections import deque import itertools -import datetime from copy import deepcopy import netCDF4 as nc +import numpy as np import netcdftime -import numpy as np +import datetime import base from ocgis import constants from ocgis.util.logging_ocgis import ocgis_lh @@ -31,7 +31,7 @@ class TemporalDimension(base.VectorDimension): _date_parts = ('year', 'month', 'day', 'hour', 'minute', 'second') def __init__(self, *args, **kwargs): - self.calendar = kwargs.pop('calendar', constants.default_temporal_calendar) + self.calendar = kwargs.pop('calendar', constants.DEFAULT_TEMPORAL_CALENDAR) self.format_time = kwargs.pop('format_time', True) kwargs['axis'] = kwargs.get('axis') or 'T' @@ -40,7 +40,7 @@ def __init__(self, *args, **kwargs): super(TemporalDimension, self).__init__(*args, **kwargs) - self.units = self.units or constants.default_temporal_units + self.units = self.units or constants.DEFAULT_TEMPORAL_UNITS # test if the units are the special case with months in the time units if self.units.startswith('months'): self._has_months_units = True @@ -136,7 +136,7 @@ def get_datetime(self, arr): except ValueError: arr[idx] = arr[idx] else: - arr = get_datetime_from_months_time_units(arr, self.units, month_centroid=constants.calc_month_centroid) + arr = get_datetime_from_months_time_units(arr, self.units, month_centroid=constants.CALC_MONTH_CENTROID) return arr def get_grouping(self, grouping): @@ -445,7 +445,7 @@ def _get_grouping_representative_datetime_(self,grouping,bounds,value): try: set_grouping = set(grouping) if set_grouping == set(['month']): - ref_calc_month_centroid = constants.calc_month_centroid + ref_calc_month_centroid = constants.CALC_MONTH_CENTROID for idx in range(ret.shape[0]): month = ref_value[idx]['month'] ## get the start year from the bounds data @@ -453,14 +453,14 @@ def _get_grouping_representative_datetime_(self,grouping,bounds,value): ## create the datetime object ret[idx] = datetime.datetime(start_year,month,ref_calc_month_centroid) elif set_grouping == set(['year']): - ref_calc_year_centroid_month = constants.calc_year_centroid_month - ref_calc_year_centroid_day = constants.calc_year_centroid_day + ref_calc_year_centroid_month = constants.CALC_YEAR_CENTROID_MONTH + ref_calc_year_centroid_day = constants.CALC_YEAR_CENTROID_DAY for idx in range(ret.shape[0]): year = ref_value[idx]['year'] ## create the datetime object ret[idx] = datetime.datetime(year,ref_calc_year_centroid_month,ref_calc_year_centroid_day) elif set_grouping == set(['month','year']): - ref_calc_month_centroid = constants.calc_month_centroid + ref_calc_month_centroid = constants.CALC_MONTH_CENTROID for idx in range(ret.shape[0]): year,month = ref_value[idx]['year'],ref_value[idx]['month'] ret[idx] = datetime.datetime(year,month,ref_calc_month_centroid) @@ -497,7 +497,7 @@ def _get_grouping_representative_datetime_(self,grouping,bounds,value): else: center_month = int(np.floor(np.mean([r_bounds[0].month,r_bounds[1].month]))) center_year = int(np.floor(np.mean([r_bounds[0].year,r_bounds[1].year]))) - fill = datetime.datetime(center_year,center_month,constants.calc_month_centroid) + fill = datetime.datetime(center_year,center_month,constants.CALC_MONTH_CENTROID) ret[idx] = fill return(ret) diff --git a/src/ocgis/test/test_ocgis/test_api/test_collection.py b/src/ocgis/test/test_ocgis/test_api/test_collection.py index ceda7c259..800a5cd78 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_collection.py +++ b/src/ocgis/test/test_ocgis/test_api/test_collection.py @@ -1,10 +1,10 @@ import os from copy import copy, deepcopy +import numpy as np import fiona from shapely.geometry import Point, shape, MultiPoint from shapely.geometry.multipolygon import MultiPolygon -import numpy as np import datetime from ocgis.api.collection import SpatialCollection, AbstractCollection @@ -154,7 +154,7 @@ def test_calculation_iteration(self): sc = ShpCabinet() meta = sc.get_meta('state_boundaries') - sp = SpatialCollection(meta=meta, key='state_boundaries', headers=constants.calc_headers) + sp = SpatialCollection(meta=meta, key='state_boundaries', headers=constants.HEADERS_CALC) for row in sc.iter_geoms('state_boundaries'): sp.add_field(row['properties']['UGID'], row['geom'], cfield, properties=row['properties']) for ii, row in enumerate(sp.get_iter_dict()): @@ -166,7 +166,7 @@ def test_calculation_iteration(self): 'alias': 'tmax', 'variable': 'tmax', 'gid': 1, 'calc_key': 'mean', 'tid': 1, 'level': 50, 'day': 16}) self.assertEqual(len(row), 2) - self.assertEqual(len(row[1]), len(constants.calc_headers)) + self.assertEqual(len(row[1]), len(constants.HEADERS_CALC)) def test_calculation_iteration_two_calculations(self): field = self.get_field(with_value=True, month_count=2) @@ -199,7 +199,7 @@ def test_calculation_iteration_two_calculations(self): sc = ShpCabinet() meta = sc.get_meta('state_boundaries') - sp = SpatialCollection(meta=meta, key='state_boundaries', headers=constants.calc_headers) + sp = SpatialCollection(meta=meta, key='state_boundaries', headers=constants.HEADERS_CALC) for row in sc.iter_geoms('state_boundaries'): sp.add_field(row['properties']['UGID'], row['geom'], cfield, properties=row['properties']) @@ -214,7 +214,7 @@ def test_calculation_iteration_two_calculations(self): 'alias': 'tmax', 'variable': 'tmax', 'gid': 1, 'calc_key': 'mean', 'tid': 1, 'level': 50, 'day': 16}) self.assertEqual(len(row), 2) - self.assertEqual(len(row[1]), len(constants.calc_headers)) + self.assertEqual(len(row[1]), len(constants.HEADERS_CALC)) self.assertEqual(ii + 1, 2 * 2 * 2 * 3 * 4 * 51 * 4) self.assertEqual(len(cids), 4) @@ -246,7 +246,7 @@ def test_iteration_methods(self): self.assertDictEqual(row[1],{'lid': 1, 'ugid': 1, 'vid': 1, 'alias': 'tmax', 'did': 1, 'year': 2000, 'value': 0.7203244934421581, 'month': 1, 'variable': 'tmax', 'gid': 2, 'time': datetime.datetime(2000, 1, 1, 12, 0), 'tid': 1, 'level': 50, 'day': 1}) self.assertIsInstance(row[0],MultiPolygon) self.assertEqual(len(row),2) - self.assertEqual(len(row[1]),len(constants.raw_headers)) + self.assertEqual(len(row[1]),len(constants.HEADERS_RAW)) def test_multivariate_iteration(self): field = self.get_field(with_value=True,month_count=1) @@ -266,7 +266,7 @@ def test_multivariate_iteration(self): sc = ShpCabinet() meta = sc.get_meta('state_boundaries') - sp = SpatialCollection(meta=meta,key='state_boundaries',headers=constants.multi_headers) + sp = SpatialCollection(meta=meta,key='state_boundaries',headers=constants.HEADERS_MULTI) for row in sc.iter_geoms('state_boundaries'): sp.add_field(row['properties']['UGID'],row['geom'],cfield,properties=row['properties']) diff --git a/src/ocgis/test/test_ocgis/test_api/test_operations.py b/src/ocgis/test/test_ocgis/test_api/test_operations.py index 83e93f43d..98eff5fed 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_operations.py +++ b/src/ocgis/test/test_ocgis/test_api/test_operations.py @@ -1,13 +1,13 @@ import csv -from datetime import datetime as dt import itertools -import datetime import os - -import ESMF from numpy import dtype import numpy as np +import ESMF + +from datetime import datetime as dt +import datetime from ocgis.api.parms.definition import RegridOptions, OutputFormat from ocgis.interface.base.crs import CFWGS84 from ocgis.test.base import TestBase, attr @@ -310,7 +310,7 @@ def test_keyword_headers(self): for htype in [list,tuple]: hvalue = htype(headers) hh = definition.Headers(hvalue) - self.assertEqual(hh.value,tuple(constants.required_headers+['value'])) + self.assertEqual(hh.value,tuple(constants.HEADERS_REQUIRED+['value'])) headers = ['foo'] with self.assertRaises(DefinitionValidationError): @@ -318,7 +318,7 @@ def test_keyword_headers(self): headers = [] hh = definition.Headers(headers) - self.assertEqual(hh.value,tuple(constants.required_headers)) + self.assertEqual(hh.value,tuple(constants.HEADERS_REQUIRED)) def test_keyword_level_range(self): rd = self.test_data.get_rd('cancm4_tas') diff --git a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py index 9a14b20cf..0288880b4 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py +++ b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py @@ -355,7 +355,7 @@ def test_init(self): for htype in [list, tuple]: hvalue = htype(headers) hh = Headers(hvalue) - self.assertEqual(hh.value, tuple(constants.required_headers + ['value'])) + self.assertEqual(hh.value, tuple(constants.HEADERS_REQUIRED + ['value'])) headers = ['foo'] with self.assertRaises(DefinitionValidationError): @@ -363,7 +363,7 @@ def test_init(self): headers = [] hh = Headers(headers) - self.assertEqual(hh.value, tuple(constants.required_headers)) + self.assertEqual(hh.value, tuple(constants.HEADERS_REQUIRED)) class TestDataset(TestBase): diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py index b6f4481fc..9eafe4aeb 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py @@ -312,7 +312,7 @@ def test_source_dictionary_is_deepcopied(self): def test_source_index_matches_constant_value(self): rd = self.test_data.get_rd('cancm4_tas') field = rd.get() - self.assertEqual(field.temporal._src_idx.dtype, constants.np_int) + self.assertEqual(field.temporal._src_idx.dtype, constants.NP_INT) def test_with_units(self): units = 'celsius' diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index a21decac6..4532a19ae 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -256,7 +256,7 @@ def test_get_field_time_region(self): var = ds.variables['time'] real_temporal = nc.num2date(var[:],var.units,var.calendar) select = [True if x.month == 8 else False for x in real_temporal] - indices = np.arange(0,var.shape[0],dtype=constants.np_int)[np.array(select)] + indices = np.arange(0,var.shape[0],dtype=constants.NP_INT)[np.array(select)] self.assertNumpyAll(indices,field.temporal._src_idx) self.assertNumpyAll(field.temporal.value_datetime,real_temporal[indices]) self.assertNumpyAll(field.variables['tas'].value.data.squeeze(),ds.variables['tas'][indices,:,:]) @@ -278,7 +278,7 @@ def test_get_field_time_region_with_years(self): var = ds.variables['time'] real_temporal = nc.num2date(var[:],var.units,var.calendar) select = [True if x.month == 8 and x.year in [2008,2010] else False for x in real_temporal] - indices = np.arange(0,var.shape[0],dtype=constants.np_int)[np.array(select)] + indices = np.arange(0,var.shape[0],dtype=constants.NP_INT)[np.array(select)] self.assertNumpyAll(indices,field.temporal._src_idx) self.assertNumpyAll(field.temporal.value_datetime,real_temporal[indices]) self.assertNumpyAll(field.variables['tas'].value.data.squeeze(),ds.variables['tas'][indices,:,:]) @@ -385,7 +385,7 @@ def test_get_field_projection_axes(self): self.assertEqual(field.temporal.value_datetime[0],datetime.datetime(1950, 1, 16, 0, 0)) self.assertEqual(field.temporal.value_datetime[-1],datetime.datetime(2099, 12, 15, 0, 0)) self.assertEqual(field.level,None) - self.assertNumpyAll(field.realization.value,np.arange(1,37,dtype=constants.np_int)) + self.assertNumpyAll(field.realization.value,np.arange(1,37,dtype=constants.NP_INT)) ds = nc.Dataset(uri,'r') to_test = ds.variables['Tavg'] diff --git a/src/ocgis/test/test_ocgis/test_calc/test_base.py b/src/ocgis/test/test_ocgis/test_calc/test_base.py index 5f2e89be4..3b502c128 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_base.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_base.py @@ -1,7 +1,7 @@ from copy import deepcopy +import numpy as np from cfunits.cfunits import Units -import numpy as np from ocgis.interface.base.variable import VariableCollection, DerivedVariable from ocgis.test.base import TestBase @@ -14,7 +14,7 @@ class FooNeedsUnits(AbstractUnivariateFunction): description = 'calculation with units' - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'fnu' required_units = ['K', 'kelvin'] standard_name = 'foo_needs_units' @@ -26,7 +26,7 @@ def calculate(self, values): class FooNeedsUnitsSet(AbstractUnivariateSetFunction): description = 'calculation with units' - dtype = constants.np_float + dtype = constants.NP_FLOAT key = 'fnu' required_units = ['K', 'kelvin'] standard_name = '' @@ -60,8 +60,8 @@ def test_add_to_collection(self): if k.calc_sample_size: alias = 'n_{0}'.format(variable.alias) ss = res[alias] - attrs = {'standard_name': constants.default_sample_size_standard_name, - 'long_name': constants.default_sample_size_long_name} + attrs = {'standard_name': constants.DEFAULT_SAMPLE_SIZE_STANDARD_NAME, + 'long_name': constants.DEFAULT_SAMPLE_SIZE_LONG_NAME} self.assertDictEqual(ss.attrs, attrs) def test_add_to_collection_parents(self): diff --git a/src/ocgis/test/test_ocgis/test_conv/test_nc.py b/src/ocgis/test/test_ocgis/test_conv/test_nc.py index 25b7ac38d..a8a20bd9b 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_nc.py @@ -1,12 +1,12 @@ +import numpy as np + from ocgis.test.base import nc_scope from ocgis.util.itester import itr_products_keywords from ocgis.api.operations import OcgOperations from ocgis.conv.nc import NcConverter -import numpy as np from ocgis.test.test_ocgis.test_conv.test_base import AbstractTestConverter import ocgis from ocgis import constants -from datetime import datetime as dt class TestNcConverter(AbstractTestConverter): @@ -40,13 +40,13 @@ def test_get_file_format(self): coll = self.get_spatial_collection(field=self.get_field()) conv = NcConverter([coll], self.current_dir_output, 'foo') file_format = conv._get_file_format_() - self.assertEqual(file_format, constants.netCDF_default_data_model) + self.assertEqual(file_format, constants.NETCDF_DEFAULT_DATA_MODEL) # add operations with a field as the dataset ops = OcgOperations(dataset=coll[1]['foo'], output_format='nc') conv = NcConverter([coll], self.current_dir_output, 'foo', ops=ops) file_format = conv._get_file_format_() - self.assertEqual(file_format, constants.netCDF_default_data_model) + self.assertEqual(file_format, constants.NETCDF_DEFAULT_DATA_MODEL) # add operations and use a request dataset coll = self.get_spatial_collection() diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py index 76b75c6e7..22511caad 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py @@ -1,9 +1,9 @@ import os from copy import deepcopy import netCDF4 as nc +import numpy as np from shapely.geometry import Point, MultiPoint -import numpy as np from shapely.geometry.multipolygon import MultiPolygon from ocgis.interface.base.crs import CoordinateReferenceSystem, WGS84,\ @@ -45,7 +45,7 @@ def test_init(self): # test with a name parameter crs = CoordinateReferenceSystem(epsg=4326) - self.assertEqual(crs.name, constants.default_coordinate_system_name) + self.assertEqual(crs.name, constants.DEFAULT_COORDINATE_SYSTEM_NAME) crs = CoordinateReferenceSystem(epsg=4326, name='foo') self.assertEqual(crs.name, 'foo') @@ -221,7 +221,7 @@ def test_place_prime_meridian_array(self): arr = np.array([123, 180, 200, 180], dtype=float) ret = Spherical._place_prime_meridian_array_(arr) self.assertNumpyAll(ret, np.array([False, True, False, True])) - self.assertNumpyAll(arr, np.array([123., constants.meridian_180th, 200., constants.meridian_180th])) + self.assertNumpyAll(arr, np.array([123., constants.MERIDIAN_180TH, 200., constants.MERIDIAN_180TH])) def test_wrap_unwrap_with_mask(self): """Test wrapped and unwrapped geometries with a mask ensuring that masked values are wrapped and unwrapped.""" diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py index 4c0010b74..3beb961ef 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py @@ -1,8 +1,8 @@ from collections import OrderedDict import os from copy import deepcopy - import numpy as np + from cfunits.cfunits import Units from ocgis.interface.base.attributes import Attributes @@ -86,7 +86,7 @@ def test_init(self): self.assertIsNone(vd.name_value) self.assertEqual(vd.name_uid, 'None_uid') self.assertEqual(vd.name_bounds, 'None_bounds') - self.assertEqual(vd.name_bounds_suffix, constants.ocgis_bounds) + self.assertEqual(vd.name_bounds_suffix, constants.OCGIS_BOUNDS) self.assertIsNone(vd.axis) # test passing attributes to the constructor @@ -163,7 +163,7 @@ def test_load_from_source(self): """Test loading from a fake data source.""" vdim = VectorDimension(src_idx=[0, 1, 2, 3], data='foo') - self.assertNumpyAll(vdim.uid, np.array([1, 2, 3, 4], dtype=constants.np_int)) + self.assertNumpyAll(vdim.uid, np.array([1, 2, 3, 4], dtype=constants.NP_INT)) with self.assertRaises(NotImplementedError): vdim.value with self.assertRaises(NotImplementedError): @@ -255,7 +255,7 @@ def test_with_bounds(self): vdim = VectorDimension(value=[4, 5, 6], bounds=[[3, 5], [4, 6], [5, 7]]) self.assertNumpyAll(vdim.bounds, np.array([[3, 5], [4, 6], [5, 7]])) - self.assertNumpyAll(vdim.uid, np.array([1, 2, 3], dtype=constants.np_int)) + self.assertNumpyAll(vdim.uid, np.array([1, 2, 3], dtype=constants.NP_INT)) self.assertEqual(vdim.resolution, 2.0) def test_with_units(self): @@ -319,7 +319,7 @@ def test_write_to_netcdf_dataset(self): self.assertEqual(var.axis, axis_actual) try: - self.assertIn(constants.ocgis_bounds, ds.dimensions) + self.assertIn(constants.OCGIS_BOUNDS, ds.dimensions) except AssertionError: try: self.assertFalse(k.with_bounds) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py index 26c963eb7..c86e6a14f 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py @@ -96,7 +96,7 @@ def polygon_value_alternate_ordering(self): @property def uid_value(self): - return np.ma.array([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]], mask=False, dtype=constants.np_int) + return np.ma.array([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]], mask=False, dtype=constants.NP_INT) def write_sdim(self): sdim = self.get_sdim(bounds=True) @@ -558,7 +558,7 @@ def test_get_intersects_polygon_small(self): ret = sdim.get_intersects(poly,use_spatial_index=u) to_test = np.ma.array([[[38.]],[[-100.]]],mask=False) self.assertNumpyAll(ret.grid.value,to_test) - self.assertNumpyAll(ret.uid,np.ma.array([[9]],dtype=constants.np_int)) + self.assertNumpyAll(ret.uid,np.ma.array([[9]],dtype=constants.NP_INT)) self.assertEqual(ret.shape,(1,1)) to_test = ret.geom.point.value.compressed()[0] self.assertTrue(to_test.almost_equals(Point(-100,38))) @@ -888,12 +888,12 @@ def test_point_as_value(self): pts = np.array([[pt,pt2]],dtype=object) g = SpatialGeometryPointDimension(value=pts) self.assertEqual(g.value.mask.any(),False) - self.assertNumpyAll(g.uid,np.ma.array([[1,2]],dtype=constants.np_int)) + self.assertNumpyAll(g.uid,np.ma.array([[1,2]],dtype=constants.NP_INT)) sgdim = SpatialGeometryDimension(point=g) sdim = SpatialDimension(geom=sgdim) self.assertEqual(sdim.shape,(1,2)) - self.assertNumpyAll(sdim.uid,np.ma.array([[1,2]],dtype=constants.np_int)) + self.assertNumpyAll(sdim.uid,np.ma.array([[1,2]],dtype=constants.NP_INT)) sdim_slc = sdim[:,1] self.assertEqual(sdim_slc.shape,(1,1)) self.assertTrue(sdim_slc.geom.point.value[0,0].almost_equals(pt2)) @@ -904,7 +904,7 @@ def test_grid_get_subset_bbox(self): bg = sdim.grid.get_subset_bbox(-99,39,-98,39,closed=False) self.assertEqual(bg._value,None) self.assertEqual(bg.uid.shape,(1,2)) - self.assertNumpyAll(bg.uid,np.ma.array([[6,7]],dtype=constants.np_int)) + self.assertNumpyAll(bg.uid,np.ma.array([[6,7]],dtype=constants.NP_INT)) with self.assertRaises(EmptySubsetError): sdim.grid.get_subset_bbox(1000,1000,1001,10001) @@ -962,7 +962,7 @@ def test_wrap(self): """Test wrapping a SpatialDimension""" def assertWrapped(arr): - select = arr >= constants.meridian_180th + select = arr >= constants.MERIDIAN_180TH self.assertFalse(select.any()) sdim = self.get_sdim(crs=WGS84()) @@ -1452,18 +1452,18 @@ def test_write_to_netcdf_dataset(self): self.assertNumpyAll(ds.variables[grid.row.name][:], row.value) self.assertNumpyAll(ds.variables[grid.col.name][:], col.value) else: - yc = ds.variables[constants.default_name_row_coordinates] - xc = ds.variables[constants.default_name_col_coordinates] + yc = ds.variables[constants.DEFAULT_NAME_ROW_COORDINATES] + xc = ds.variables[constants.DEFAULT_NAME_COL_COORDINATES] self.assertNumpyAll(yc[:], grid.value[0].data) self.assertNumpyAll(xc[:], grid.value[1].data) self.assertEqual(yc.axis, 'Y') self.assertEqual(xc.axis, 'X') if k.with_corners and not k.with_rc: name_yc_corners, name_xc_corners = ['{0}_corners'.format(xx) for xx in - [constants.default_name_row_coordinates, - constants.default_name_col_coordinates]] + [constants.DEFAULT_NAME_ROW_COORDINATES, + constants.DEFAULT_NAME_COL_COORDINATES]] for idx, name in zip([0, 1], [name_yc_corners, name_xc_corners]): var = ds.variables[name] self.assertNumpyAll(var[:], grid.corners[idx].data) - self.assertEqual(ds.variables[constants.default_name_row_coordinates].corners, name_yc_corners) - self.assertEqual(ds.variables[constants.default_name_col_coordinates].corners, name_xc_corners) + self.assertEqual(ds.variables[constants.DEFAULT_NAME_ROW_COORDINATES].corners, name_yc_corners) + self.assertEqual(ds.variables[constants.DEFAULT_NAME_COL_COORDINATES].corners, name_xc_corners) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py index 4312c6c31..2a95b451b 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py @@ -1,14 +1,14 @@ from copy import deepcopy from netCDF4 import num2date, date2num import os -from datetime import datetime as dt -import datetime from collections import deque import itertools +import numpy as np import netcdftime -import numpy as np +from datetime import datetime as dt +import datetime from ocgis.util.itester import itr_products_keywords from ocgis import constants from ocgis.test.base import TestBase, nc_scope @@ -136,8 +136,8 @@ def test_init(self): self.assertEqual(td.axis, 'T') self.assertEqual(td.name, 'time') self.assertEqual(td.name_uid, 'tid') - self.assertEqual(td.calendar, constants.default_temporal_calendar) - self.assertEqual(td.units, constants.default_temporal_units) + self.assertEqual(td.calendar, constants.DEFAULT_TEMPORAL_CALENDAR) + self.assertEqual(td.units, constants.DEFAULT_TEMPORAL_UNITS) self.assertIsInstance(td, VectorDimension) self.assertFalse(td._has_months_units) self.assertTrue(td.format_time) @@ -169,8 +169,8 @@ def test_bounds_datetime_and_bounds_numtime(self): value_datetime = np.array([dt(2000, 1, 15), dt(2000, 2, 15)]) bounds_datetime = np.array([[dt(2000, 1, 1), dt(2000, 2, 1)], [dt(2000, 2, 1), dt(2000, 3, 1)]]) - value = date2num(value_datetime, constants.default_temporal_units, calendar=constants.default_temporal_calendar) - bounds_num = date2num(bounds_datetime, constants.default_temporal_units, calendar=constants.default_temporal_calendar) + value = date2num(value_datetime, constants.DEFAULT_TEMPORAL_UNITS, calendar=constants.DEFAULT_TEMPORAL_CALENDAR) + bounds_num = date2num(bounds_datetime, constants.DEFAULT_TEMPORAL_UNITS, calendar=constants.DEFAULT_TEMPORAL_CALENDAR) bounds_options = [None, bounds_num, bounds_datetime] value_options = [value, value, value_datetime] for format_time in [True, False]: @@ -280,7 +280,7 @@ def test_getiter(self): self.assertIsInstance(values['time'], float) def test_get_numtime(self): - units_options = [constants.default_temporal_units, 'months since 1960-5'] + units_options = [constants.DEFAULT_TEMPORAL_UNITS, 'months since 1960-5'] value_options = [np.array([5000., 5001]), np.array([5, 6, 7])] for units, value in zip(units_options, value_options): td = TemporalDimension(value=value, units=units) @@ -377,7 +377,7 @@ def test_get_grouping_seasonal_real_data_all_seasons(self): tgd = field.temporal.get_grouping(calc_grouping) self.assertEqual(tgd.shape, (4,)) self.assertEqual([xx[1] for xx in calc_grouping], [xx.month for xx in tgd.value.flat]) - self.assertEqual(set([xx.day for xx in tgd.value.flat]), {constants.calc_month_centroid}) + self.assertEqual(set([xx.day for xx in tgd.value.flat]), {constants.CALC_MONTH_CENTROID}) self.assertEqual([2006, 2005, 2005, 2005], [xx.year for xx in tgd.value.flat]) self.assertNumpyAll(tgd.bounds_numtime, np.array([[55152.0, 58804.0], [55211.0, 58590.0], [55303.0, 58682.0], [55395.0, 58773.0]])) @@ -592,7 +592,7 @@ def test_time_range_subset(self): def test_value_datetime_and_value_numtime(self): value_datetime = np.array([dt(2000, 1, 15), dt(2000, 2, 15)]) - value = date2num(value_datetime, constants.default_temporal_units, calendar=constants.default_temporal_calendar) + value = date2num(value_datetime, constants.DEFAULT_TEMPORAL_UNITS, calendar=constants.DEFAULT_TEMPORAL_CALENDAR) keywords = dict(value=[value, value_datetime], format_time=[True, False]) for k in itr_products_keywords(keywords, as_namedtuple=True): @@ -660,7 +660,7 @@ def test_return_from_get_grouping(self): tgd = td.get_grouping(['month']) self.assertEqual(tuple(tgd.date_parts[0]), (None, 1, None, None, None, None)) self.assertTrue(tgd.dgroups[0].all()) - self.assertNumpyAll(tgd.uid, np.array([1], dtype=constants.np_int)) + self.assertNumpyAll(tgd.uid, np.array([1], dtype=constants.NP_INT)) def test_write_to_netcdf_dataset(self): tgd = self.get_tgd() diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py index 80ca65118..2d5eefa51 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py @@ -1,14 +1,14 @@ import os -from datetime import datetime as dt -import datetime import itertools from copy import deepcopy from collections import OrderedDict - import numpy as np + from shapely import wkt from shapely.ops import cascaded_union +from datetime import datetime as dt +import datetime from ocgis import constants from ocgis import RequestDataset from ocgis.interface.base.attributes import Attributes @@ -586,12 +586,12 @@ def test_write_to_netcdf_dataset_without_row_column_on_grid(self): with nc_scope(path, 'w') as ds: field.write_to_netcdf_dataset(ds) self.assertAsSetEqual(ds.variables.keys(), ['time', 'time_bounds', 'level', 'level_bounds', - constants.default_name_row_coordinates, - constants.default_name_col_coordinates, 'yc_corners', + constants.DEFAULT_NAME_ROW_COORDINATES, + constants.DEFAULT_NAME_COL_COORDINATES, 'yc_corners', 'xc_corners', 'tmax']) self.assertAsSetEqual(ds.dimensions.keys(), - ['time', 'bounds', 'level', constants.default_name_row_coordinates, - constants.default_name_col_coordinates, constants.default_name_corners_dimension]) + ['time', 'bounds', 'level', constants.DEFAULT_NAME_ROW_COORDINATES, + constants.DEFAULT_NAME_COL_COORDINATES, constants.DEFAULT_NAME_CORNERS_DIMENSION]) def test_write_to_netcdf_dataset_without_temporal(self): """Test without a temporal dimensions.""" diff --git a/src/ocgis/test/test_ocgis/test_util/test_helpers.py b/src/ocgis/test/test_ocgis/test_util/test_helpers.py index 0e2584d14..4bd12bbdc 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_helpers.py +++ b/src/ocgis/test/test_ocgis/test_util/test_helpers.py @@ -1,13 +1,13 @@ from collections import OrderedDict import os import itertools +import numpy as np import fiona -import numpy as np from shapely.geometry import Point, mapping from datetime import datetime as dt, datetime -from ocgis.constants import ocgis_unique_geometry_identifier +from ocgis.constants import OCGIS_UNIQUE_GEOMETRY_IDENTIFIER from ocgis.interface.base.crs import Spherical, CoordinateReferenceSystem from ocgis.exc import SingleElementError, ShapeError from ocgis.test.test_ocgis.test_interface.test_base.test_dimension.test_spatial import AbstractTestSpatialDimension @@ -236,7 +236,7 @@ def test_add_shapefile_unique_identifier(self): sci = ShpCabinetIterator(path=out_path) records = list(sci) - self.assertAsSetEqual([1, 2], [xx['properties'][ocgis_unique_geometry_identifier] for xx in records]) + self.assertAsSetEqual([1, 2], [xx['properties'][OCGIS_UNIQUE_GEOMETRY_IDENTIFIER] for xx in records]) self.assertAsSetEqual([6, 60], [xx['properties']['fid'] for xx in records]) self.assertEqual(CoordinateReferenceSystem(records[0]['meta']['crs']), crs) @@ -254,7 +254,7 @@ def test_add_shapefile_unique_identifier(self): add_shapefile_unique_identifier(in_path, out_path, template='fid') sci = ShpCabinetIterator(path=out_path) records = list(sci) - self.assertAsSetEqual([6, 60], [xx['properties'][ocgis_unique_geometry_identifier] for xx in records]) + self.assertAsSetEqual([6, 60], [xx['properties'][OCGIS_UNIQUE_GEOMETRY_IDENTIFIER] for xx in records]) # test with a different name attribute out_path = os.path.join(self.current_dir_output, 'name.shp') diff --git a/src/ocgis/test/test_simple/test_simple.py b/src/ocgis/test/test_simple/test_simple.py index b13dae016..626e0a4d4 100644 --- a/src/ocgis/test/test_simple/test_simple.py +++ b/src/ocgis/test/test_simple/test_simple.py @@ -1,7 +1,6 @@ import re import unittest import itertools -import datetime import os.path from abc import ABCMeta, abstractproperty import netCDF4 as nc @@ -10,28 +9,29 @@ from copy import deepcopy from csv import DictReader import tempfile +import numpy as np from fiona.crs import from_string from osgeo.osr import SpatialReference -import numpy as np from shapely.geometry.point import Point from shapely.geometry.polygon import Polygon import fiona from shapely.geometry.geo import mapping from shapely import wkt +import datetime from ocgis.api.operations import OcgOperations from ocgis.api.interpreter import OcgInterpreter from ocgis.api.parms.definition import SpatialOperation from ocgis.util.helpers import make_poly, project_shapely_geometry from ocgis import exc, env, constants -from ocgis.test.base import TestBase, nc_scope, attr +from ocgis.test.base import TestBase, nc_scope import ocgis from ocgis.exc import ExtentError, DefinitionValidationError from ocgis.interface.base import crs from ocgis.interface.base.crs import CoordinateReferenceSystem, WGS84, CFWGS84, WrappableCoordinateReferenceSystem -from ocgis.api.request.base import RequestDataset, RequestDatasetCollection -from ocgis.test.test_simple.make_test_data import SimpleNcNoLevel, SimpleNc, SimpleNcNoBounds, SimpleMaskNc, \ +from ocgis.api.request.base import RequestDataset +from ocgis.test.test_simple.make_test_data import SimpleNcNoLevel, SimpleNc, SimpleMaskNc, \ SimpleNc360, SimpleNcProjection, SimpleNcNoSpatialBounds, SimpleNcMultivariate from ocgis.api.parms.definition import OutputFormat from ocgis.interface.base.field import DerivedMultivariateField @@ -661,7 +661,7 @@ def test_nc_conversion(self): var = ds.variables[k] self.assertEqual(var.axis, v) with self.nc_scope(ret) as ds: - self.assertEqual(ds.file_format, constants.netCDF_default_data_model) + self.assertEqual(ds.file_format, constants.NETCDF_DEFAULT_DATA_MODEL) def test_nc_conversion_calc(self): calc_grouping = ['month'] @@ -744,7 +744,7 @@ def test_limiting_headers(self): ret = ops.execute() with open(ret) as f: reader = DictReader(f) - self.assertEqual(reader.fieldnames,[c.upper() for c in constants.required_headers]+['VALUE']) + self.assertEqual(reader.fieldnames,[c.upper() for c in constants.HEADERS_REQUIRED]+['VALUE']) with self.assertRaises(DefinitionValidationError): OcgOperations(dataset=self.get_dataset(),headers=['foo'],output_format='csv') @@ -955,7 +955,7 @@ def test_calc_multivariate_conversion(self): rd2['alias'] = 'var2' calc = [{'name': 'divide', 'func': 'divide', 'kwds': {'arr1': 'var1', 'arr2': 'var2'}}] - for o in constants.output_formats: + for o in constants.OUTPUT_FORMATS: calc_grouping = ['month'] try: diff --git a/src/ocgis/util/helpers.py b/src/ocgis/util/helpers.py index 0cd856dc4..6ab883b47 100644 --- a/src/ocgis/util/helpers.py +++ b/src/ocgis/util/helpers.py @@ -5,8 +5,8 @@ import sys from copy import deepcopy from tempfile import mkdtemp - import numpy as np + from shapely.geometry import Point from shapely.geometry.polygon import Polygon from osgeo.ogr import CreateGeometryFromWkb @@ -54,7 +54,7 @@ def add_shapefile_unique_identifier(in_path, out_path, name=None, template=None) :param str in_path: Full path to the input shapefile. :param str out_path: Full path to the output shapefile. :param str name: The name of the unique identifer. If ``None``, defaults to - :attr:`ocgis.constants.ocgis_unique_geometry_identifier`. + :attr:`ocgis.constants.OCGIS_UNIQUE_GEOMETRY_IDENTIFIER`. :param str template: The integer attribute to copy as the unique identifier. :returns: Path to the copied shapefile with the addition of a unique integer attribute called ``name``. :rtype: str diff --git a/src/ocgis/util/shp_process.py b/src/ocgis/util/shp_process.py index 694d57fd9..8ca1b62cc 100644 --- a/src/ocgis/util/shp_process.py +++ b/src/ocgis/util/shp_process.py @@ -6,7 +6,7 @@ import fiona from shapely.geometry.geo import shape, mapping -from ocgis.constants import ocgis_unique_geometry_identifier +from ocgis.constants import OCGIS_UNIQUE_GEOMETRY_IDENTIFIER class ShpProcess(object): @@ -24,7 +24,7 @@ def process(self, key=None, ugid=None, name=None): :param str key: The name of the new output shapefile. :param str ugid: The integer attribute to copy as the unique identifier. :param str name: The name of the unique identifer. If ``None``, defaults to - :attr:`ocgis.constants.ocgis_unique_geometry_identifier`. + :attr:`ocgis.constants.OCGIS_UNIQUE_GEOMETRY_IDENTIFIER`. """ # get the original shapefile file name @@ -39,7 +39,7 @@ def process(self, key=None, ugid=None, name=None): # update the schema to include UGID meta = self._get_meta_() - identifier = name or ocgis_unique_geometry_identifier + identifier = name or OCGIS_UNIQUE_GEOMETRY_IDENTIFIER if identifier in meta['schema']['properties']: meta['schema']['properties'].pop(identifier) new_properties = OrderedDict({identifier: 'int'}) From 48ccccafe09e14936527fc1e847864fb78f2a528 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 29 Dec 2014 18:19:36 -0700 Subject: [PATCH 45/71] change "csv+" to "csv-shp" #228 The "csv+" output format was renamed to "csv-shp". The old format name will still work. closes #228 --- doc/api.rst | 28 +- doc/examples.rst | 30 +- doc/sphinx_examples/advanced_subset.py | 58 +- doc/sphinx_examples/simple_subset.py | 41 +- .../colorado_noaa_climate_divisions.py | 12 +- examples/esgf_f2f_2013.py | 26 +- src/ocgis/api/parms/definition.py | 19 +- src/ocgis/constants.py | 11 +- src/ocgis/conv/base.py | 24 +- src/ocgis/conv/csv_.py | 8 +- src/ocgis/test/test_misc/test_conversion.py | 34 +- .../test_api/test_parms/test_definition.py | 11 +- .../test_api/test_request/test_base.py | 3 +- .../test/test_ocgis/test_api/test_subset.py | 27 +- .../test_ocgis/test_calc/test_calc_general.py | 54 +- .../test_library/test_index/test_duration.py | 9 +- .../test/test_ocgis/test_conv/test_csv_shp.py | 24 +- .../test/test_ocgis/test_util/test_zipper.py | 65 +- .../test/test_real_data/test_combinatorial.py | 133 +- .../test_real_data/test_multiple_datasets.py | 196 ++- src/ocgis/test/test_real_data/test_package.py | 26 +- .../test_real_data/test_random_datasets.py | 404 +++--- src/ocgis/test/test_simple/test_simple.py | 1124 +++++++++-------- 23 files changed, 1250 insertions(+), 1117 deletions(-) diff --git a/doc/api.rst b/doc/api.rst index fc2b2f6a0..0d98a1af4 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -244,8 +244,8 @@ snippet ====================== =========================================================================== Value Description ====================== =========================================================================== -`True` Return only the first time point and the first level slice (if applicable). -`False` (default) Return all data. +``True`` Return only the first time point and the first level slice (if applicable). +``False`` (default) Return all data. ====================== =========================================================================== .. _output_format_headline: @@ -256,12 +256,12 @@ output_format ====================== =============================================================================================== Value Description ====================== =============================================================================================== -`numpy` (default) Return a `ocgis.SpatialCollection` with keys matching `ugid` (see `geom`_). -`shp` A shapefile representation of the data. -`csv` A CSV file representation of the data. -`csv+` In addition to a CSV representation, shapefiles with primary key links to the CSV are provided. -`nc` A NetCDF4 file. -`geojson` A GeoJSON representation of the data. +``'numpy'`` (default) Return a :class:`~ocgis.SpatialCollection` with keys matching `ugid` (see `geom`_). +``'shp'`` A shapefile representation of the data. +``'csv'`` A CSV file representation of the data. +``'csv-shp'`` In addition to a CSV representation, shapefiles with primary key links to the CSV are provided. +``'nc'`` A NetCDF4 file. +``'geojson'`` A GeoJSON representation of the data. ====================== =============================================================================================== .. _agg_selection: @@ -269,12 +269,12 @@ Value Description agg_selection ~~~~~~~~~~~~~ -================= =============================================== -Value Description -================= =============================================== -`True` Aggregate (union) `geom`_ to a single geometry. -`False` (default) Leave `geom`_ as is. -================= =============================================== +=================== =============================================== +Value Description +=================== =============================================== +``True`` Aggregate (union) `geom`_ to a single geometry. +``False`` (default) Leave `geom`_ as is. +=================== =============================================== The purpose of this data manipulation is to ease the method required to aggregate (union) geometries into arbitrary regions. A simple example would be unioning the U.S. state boundaries of Utah, Nevada, Arizona, and New Mexico into a single polygon representing a "Southwestern Region". diff --git a/doc/examples.rst b/doc/examples.rst index d9129750a..61703fdbd 100644 --- a/doc/examples.rst +++ b/doc/examples.rst @@ -89,21 +89,21 @@ Now, the directory structure for `/tmp/foo` will look like: /tmp/foo/nc_output/nc_output_did.csv /tmp/foo/nc_output/nc_output.nc /tmp/foo/nc_output/nc_output_meta.txt - /tmp/foo/csv+_output - /tmp/foo/csv+_output/csv+_output_meta.txt - /tmp/foo/csv+_output/csv+_output_did.csv - /tmp/foo/csv+_output/csv+_output.csv - /tmp/foo/csv+_output/shp - /tmp/foo/csv+_output/shp/csv+_output_gid.csv - /tmp/foo/csv+_output/shp/csv+_output_gid.shp - /tmp/foo/csv+_output/shp/csv+_output_ugid.prj - /tmp/foo/csv+_output/shp/csv+_output_gid.dbf - /tmp/foo/csv+_output/shp/csv+_output_ugid.shp - /tmp/foo/csv+_output/shp/csv+_output_gid.prj - /tmp/foo/csv+_output/shp/csv+_output_ugid.shx - /tmp/foo/csv+_output/shp/csv+_output_gid.shx - /tmp/foo/csv+_output/shp/csv+_output_ugid.csv - /tmp/foo/csv+_output/shp/csv+_output_ugid.dbf + /tmp/foo/csv-shp_output + /tmp/foo/csv-shp_output/csv-shp_output_meta.txt + /tmp/foo/csv-shp_output/csv-shp_output_did.csv + /tmp/foo/csv-shp_output/csv-shp_output.csv + /tmp/foo/csv-shp_output/shp + /tmp/foo/csv-shp_output/shp/csv-shp_output_gid.csv + /tmp/foo/csv-shp_output/shp/csv-shp_output_gid.shp + /tmp/foo/csv-shp_output/shp/csv-shp_output_ugid.prj + /tmp/foo/csv-shp_output/shp/csv-shp_output_gid.dbf + /tmp/foo/csv-shp_output/shp/csv-shp_output_ugid.shp + /tmp/foo/csv-shp_output/shp/csv-shp_output_gid.prj + /tmp/foo/csv-shp_output/shp/csv-shp_output_ugid.shx + /tmp/foo/csv-shp_output/shp/csv-shp_output_gid.shx + /tmp/foo/csv-shp_output/shp/csv-shp_output_ugid.csv + /tmp/foo/csv-shp_output/shp/csv-shp_output_ugid.dbf /tmp/foo/shp_output /tmp/foo/shp_output/shp_output_ugid.shx /tmp/foo/shp_output/shp_output_ugid.prj diff --git a/doc/sphinx_examples/advanced_subset.py b/doc/sphinx_examples/advanced_subset.py index e56933add..caee733c8 100644 --- a/doc/sphinx_examples/advanced_subset.py +++ b/doc/sphinx_examples/advanced_subset.py @@ -1,55 +1,53 @@ -from ocgis import OcgOperations, RequestDataset, RequestDatasetCollection, env import os.path +from ocgis import OcgOperations, RequestDataset, RequestDatasetCollection, env + -## Directory holding climate data. +# Directory holding climate data. DATA_DIR = '/usr/local/climate_data/CanCM4' -## Filename to variable name mapping. -NCS = {'tasmin_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc':'tasmin', - 'tas_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc':'tas', - 'tasmax_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc':'tasmax'} -## Always start with a snippet (if there are no calculations!). +# Filename to variable name mapping. +NCS = {'tasmin_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc': 'tasmin', + 'tas_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc': 'tas', + 'tasmax_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc': 'tasmax'} +# Always start with a snippet (if there are no calculations!). SNIPPET = True -## Data returns will overwrite in this case. Use with caution!! +# Data returns will overwrite in this case. Use with caution!! env.OVERWRITE = True -## RequestDatasetCollection #################################################### +# RequestDatasetCollection ############################################################################################# rdc = RequestDatasetCollection([RequestDataset( - os.path.join(DATA_DIR,uri),var) for uri,var in NCS.iteritems()]) + os.path.join(DATA_DIR, uri), var) for uri, var in NCS.iteritems()]) -## Return In-Memory ############################################################ +# Return In-Memory ##################################################################################################### -## Data is returned as a dictionary-like object (SpatialCollection) with 51 keys -## (don't forget Puerto Rico...). A key in the returned dictionary corresponds -## to a geometry "ugid" with the value of type OcgCollection. +# Data is returned as a dictionary-like object (SpatialCollection) with 51 keys (don't forget Puerto Rico...). A key in +# the returned dictionary corresponds to a geometry "ugid" with the value of type OcgCollection. print('returning numpy...') -ops = OcgOperations(dataset=rdc,spatial_operation='clip',aggregate=True, - snippet=SNIPPET,geom='state_boundaries') +ops = OcgOperations(dataset=rdc, spatial_operation='clip', aggregate=True, snippet=SNIPPET, geom='state_boundaries') ret = ops.execute() -## Return a SpatialCollection, but only for a target state in a U.S. state -## boundaries shapefile. In this case, the UGID attribute value of 23 is associated -## with Nebraska. +# Return a SpatialCollection, but only for a target state in a U.S. state boundaries shapefile. In this case, the UGID +# attribute value of 23 is associated with Nebraska. print('returning numpy for a state...') -ops = OcgOperations(dataset=rdc,spatial_operation='clip',aggregate=True, - snippet=SNIPPET,geom='state_boundaries',select_ugid=[23]) +ops = OcgOperations(dataset=rdc, spatial_operation='clip', aggregate=True, snippet=SNIPPET, geom='state_boundaries', + select_ugid=[23]) ret = ops.execute() -## Write to Shapefile ########################################################## +# Write to Shapefile ################################################################################################### print('returning shapefile...') -ops = OcgOperations(dataset=rdc,spatial_operation='clip',aggregate=True, - snippet=SNIPPET,geom='state_boundaries',output_format='shp') +ops = OcgOperations(dataset=rdc, spatial_operation='clip', aggregate=True, snippet=SNIPPET, geom='state_boundaries', + output_format='shp') path = ops.execute() -## Write All Data to Keyed Format ############################################## +# Write All Data to Keyed Format ####################################################################################### -## Without the snippet, we are writing all data to the linked CSV-Shapefile -## output format. The operation will take considerably longer. -print('returning csv+...') -ops = OcgOperations(dataset=rdc,spatial_operation='clip',aggregate=True, - snippet=False,geom='state_boundaries',output_format='csv+') +# Without the snippet, we are writing all data to the linked CSV-Shapefile output format. The operation will take +# considerably longer. +print('returning csv-shp...') +ops = OcgOperations(dataset=rdc, spatial_operation='clip', aggregate=True, snippet=False, geom='state_boundaries', + output_format='csv-shp') path = ops.execute() \ No newline at end of file diff --git a/doc/sphinx_examples/simple_subset.py b/doc/sphinx_examples/simple_subset.py index bc59b9f79..71be0f289 100644 --- a/doc/sphinx_examples/simple_subset.py +++ b/doc/sphinx_examples/simple_subset.py @@ -1,40 +1,39 @@ -import ocgis import os +import ocgis + + -## Directory holding climate data. +# Directory holding climate data. DATA_DIR = '/usr/local/climate_data/CanCM4' -## Location and variable name for a daily decadal temperature simulation. -URI_TAS = os.path.join(DATA_DIR,'tas_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc') +# Location and variable name for a daily decadal temperature simulation. +URI_TAS = os.path.join(DATA_DIR, 'tas_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc') VAR_TAS = 'tas' -## Make it easy to switch to non-snippet requests. +# Make it easy to switch to non-snippet requests. SNIPPET = True -## Set output directory for shapefile and keyed formats. (MAKE SURE IT EXISTS!) +# Set output directory for shapefile and keyed formats. (MAKE SURE IT EXISTS!) ocgis.env.DIR_OUTPUT = '/tmp/foo' os.mkdir('/tmp/foo') -## The bounding box coordinates [minx, miny, maxx, maxy] for the state of -## Colorado in WGS84 latitude/longitude coordinates. +# The bounding box coordinates [minx, miny, maxx, maxy] for the state of Colorado in WGS84 latitude/longitude +# coordinates. BBOX = [-109.1, 36.9, -102.0, 41.0] -## Construct RequestDataset Object ############################################# +# Construct RequestDataset Object ###################################################################################### -## This object will be reused so just build it once. -rd = ocgis.RequestDataset(URI_TAS,VAR_TAS) +# This object will be reused so just build it once. +rd = ocgis.RequestDataset(URI_TAS, VAR_TAS) -## Returning NumPy Data Objects ################################################ +# Returning NumPy Data Objects ######################################################################################### -## The NumPy data type return is the default. Only the geometry and -## RequestDataset are required (except "snippet" of course...). See the -## documentation for the OcgCollection object to understand the return -## structure. -ret = ocgis.OcgOperations(dataset=rd,geom=BBOX,snippet=SNIPPET).execute() +# The NumPy data type return is the default. Only the geometry and RequestDataset are required (except "snippet" of +# course...). See the documentation for the OcgCollection object to understand the return structure. +ret = ocgis.OcgOperations(dataset=rd, geom=BBOX, snippet=SNIPPET).execute() -## Returning Converted Files ################################################### +# Returning Converted Files ############################################################################################ -output_formats = ['shp','csv','csv+','nc'] +output_formats = ['shp', 'csv', 'csv-shp', 'nc'] for output_format in output_formats: prefix = output_format + '_output' - ops = ocgis.OcgOperations(dataset=rd,geom=BBOX,snippet=SNIPPET, - output_format=output_format,prefix=prefix) + ops = ocgis.OcgOperations(dataset=rd, geom=BBOX, snippet=SNIPPET, output_format=output_format, prefix=prefix) ret = ops.execute() \ No newline at end of file diff --git a/examples/colorado_climate_divisions/colorado_noaa_climate_divisions.py b/examples/colorado_climate_divisions/colorado_noaa_climate_divisions.py index 545433619..8bade4674 100644 --- a/examples/colorado_climate_divisions/colorado_noaa_climate_divisions.py +++ b/examples/colorado_climate_divisions/colorado_noaa_climate_divisions.py @@ -12,19 +12,17 @@ select = lambda x: x['properties']['STATE'] == 'Colorado' -rows = filter(select,ocgis.ShpCabinetIterator(path=SHP_PATH)) -select_ugid = map(lambda x: x['properties']['UGID'],rows) +rows = filter(select, ocgis.ShpCabinetIterator(path=SHP_PATH)) +select_ugid = map(lambda x: x['properties']['UGID'], rows) select_ugid.sort() rds = [] for fn in FILENAMES: variable = fn.split('_')[0] - rd = ocgis.RequestDataset(uri=fn,variable=variable,conform_units_to='Celsius') + rd = ocgis.RequestDataset(uri=fn, variable=variable, conform_units_to='Celsius') rds.append(rd) -ops = ocgis.OcgOperations(dataset=rds,select_ugid=select_ugid,spatial_operation='clip', - aggregate=True,output_format='csv+',geom=SHP_PATH) +ops = ocgis.OcgOperations(dataset=rds, select_ugid=select_ugid, spatial_operation='clip', aggregate=True, + output_format='csv-shp', geom=SHP_PATH) ret = ops.execute() print(ret) - -import ipdb;ipdb.set_trace() \ No newline at end of file diff --git a/examples/esgf_f2f_2013.py b/examples/esgf_f2f_2013.py index b12e03f33..a284630b6 100644 --- a/examples/esgf_f2f_2013.py +++ b/examples/esgf_f2f_2013.py @@ -4,7 +4,6 @@ ocgis.env.DIR_OUTPUT = '/home/local/WX/ben.koziol/links/project/ocg/presentation/2013-ESGF-F2F/ocgis_output' ocgis.env.DIR_DATA = '/usr/local/climate_data' - rd_cmip5 = ocgis.RequestDataset('tas_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', 'tas', alias='cmip5_tas') @@ -16,25 +15,24 @@ alias='maurer_tasmax') rds = [ - rd_cmip5, - rd_narccap, - rd_maurer - ] + rd_cmip5, + rd_narccap, + rd_maurer +] for rd in rds: print rd.alias - ops = ocgis.OcgOperations(dataset=rd,output_format='shp',snippet=True,prefix=rd.alias, + ops = ocgis.OcgOperations(dataset=rd, output_format='shp', snippet=True, prefix=rd.alias, output_crs=ocgis.crs.WGS84()) ops.execute() - -################################################################################ + +# ####################################################################################################################### for rd in rds: - rd.time_region = {'month':[1]} + rd.time_region = {'month': [1]} calc_grouping = ['month'] -calc = [{'func':'mean','name':'mean'},{'func':'std','name':'stdev'}] -ops = ocgis.OcgOperations(dataset=rds,geom='state_boundaries',select_ugid=[16], - snippet=False,prefix='nebraska',abstraction='point', - aggregate=True,output_format='csv+',output_crs=ocgis.crs.WGS84(), - calc=calc,calc_grouping=calc_grouping,spatial_operation='clip') +calc = [{'func': 'mean', 'name': 'mean'}, {'func': 'std', 'name': 'stdev'}] +ops = ocgis.OcgOperations(dataset=rds, geom='state_boundaries', select_ugid=[16], snippet=False, prefix='nebraska', + abstraction='point', aggregate=True, output_format='csv-shp', output_crs=ocgis.crs.WGS84(), + calc=calc, calc_grouping=calc_grouping, spatial_operation='clip') ops.execute() \ No newline at end of file diff --git a/src/ocgis/api/parms/definition.py b/src/ocgis/api/parms/definition.py index e30c347d7..65a84e358 100644 --- a/src/ocgis/api/parms/definition.py +++ b/src/ocgis/api/parms/definition.py @@ -771,19 +771,24 @@ def _get_meta_(self): class OutputFormat(base.StringOptionParameter): name = 'output_format' - default = 'numpy' - valid = constants.OUTPUT_FORMATS + default = constants.OUTPUT_FORMAT_NUMPY + valid = [constants.OUTPUT_FORMAT_CSV, constants.OUTPUT_FORMAT_CSV_SHAPEFILE, constants.OUTPUT_FORMAT_GEOJSON, + constants.OUTPUT_FORMAT_METADATA, constants.OUTPUT_FORMAT_NETCDF, constants.OUTPUT_FORMAT_NUMPY, + constants.OUTPUT_FORMAT_SHAPEFILE] + + def __init__(self, init_value=None): + if init_value == constants.OUTPUT_FORMAT_CSV_SHAPEFILE_OLD: + init_value = constants.OUTPUT_FORMAT_CSV_SHAPEFILE + super(OutputFormat, self).__init__(init_value=init_value) @classmethod def iter_possible(cls): - from ocgis.conv.base import AbstractConverter - mmap = AbstractConverter.get_converter_map() - for key in mmap.keys(): - yield key + for element in cls.valid: + yield element def _get_meta_(self): ret = 'The output format is "{0}".'.format(self.value) - return(ret) + return ret class Prefix(base.StringParameter): diff --git a/src/ocgis/constants.py b/src/ocgis/constants.py index 5d37f40fc..a7c33e3ee 100644 --- a/src/ocgis/constants.py +++ b/src/ocgis/constants.py @@ -47,8 +47,15 @@ #: Required headers for every request. HEADERS_REQUIRED = ['did', 'ugid', 'gid'] -#: Key identifiers for output formats. -OUTPUT_FORMATS = ['numpy', 'nc', 'csv', 'csv+', 'shp', 'geojson', 'meta'] +OUTPUT_FORMAT_CSV = 'csv' +OUTPUT_FORMAT_CSV_SHAPEFILE = 'csv-shp' +OUTPUT_FORMAT_CSV_SHAPEFILE_OLD = 'csv+' +OUTPUT_FORMAT_ESMPY_GRID = 'esmpy' +OUTPUT_FORMAT_GEOJSON = 'geojson' +OUTPUT_FORMAT_METADATA = 'meta' +OUTPUT_FORMAT_NETCDF = 'nc' +OUTPUT_FORMAT_NUMPY = 'numpy' +OUTPUT_FORMAT_SHAPEFILE = 'shp' # Download URL for test datasets. TEST_DATA_DOWNLOAD_PREFIX = None diff --git a/src/ocgis/conv/base.py b/src/ocgis/conv/base.py index 5e74be9bb..9b1c1cda8 100644 --- a/src/ocgis/conv/base.py +++ b/src/ocgis/conv/base.py @@ -7,6 +7,7 @@ from shapely.geometry.polygon import Polygon import fiona +from ocgis import constants from ocgis.interface.base.field import Field from ocgis.conv.meta import MetaConverter from ocgis.util.inspect import Inspect @@ -265,22 +266,27 @@ def write(self): @classmethod def get_converter_map(cls): + """ + :returns: A dictionary with keys corresponding to an output format's short name. Values correspond to the + converter class. + :rtype: dict + """ + from ocgis.conv.fiona_ import ShpConverter, GeoJsonConverter from ocgis.conv.csv_ import CsvConverter, CsvPlusConverter from ocgis.conv.numpy_ import NumpyConverter -# from ocgis.conv.shpidx import ShpIdxConverter -# from ocgis.conv.keyed import KeyedConverter from ocgis.conv.nc import NcConverter - mmap = {'shp': ShpConverter, - 'csv': CsvConverter, - 'csv+': CsvPlusConverter, - 'numpy': NumpyConverter, - 'geojson': GeoJsonConverter, + mmap = {constants.OUTPUT_FORMAT_SHAPEFILE: ShpConverter, + constants.OUTPUT_FORMAT_CSV: CsvConverter, + constants.OUTPUT_FORMAT_CSV_SHAPEFILE: CsvPlusConverter, + constants.OUTPUT_FORMAT_NUMPY: NumpyConverter, + constants.OUTPUT_FORMAT_GEOJSON: GeoJsonConverter, # 'shpidx':ShpIdxConverter, # 'keyed':KeyedConverter, - 'nc': NcConverter, - 'meta': MetaConverter} + constants.OUTPUT_FORMAT_NETCDF: NcConverter, + constants.OUTPUT_FORMAT_METADATA: MetaConverter} + return mmap @classmethod diff --git a/src/ocgis/conv/csv_.py b/src/ocgis/conv/csv_.py index 2d5f5eb7e..53e277838 100644 --- a/src/ocgis/conv/csv_.py +++ b/src/ocgis/conv/csv_.py @@ -1,13 +1,15 @@ import csv -from ocgis.conv.base import AbstractConverter from csv import excel import os from collections import OrderedDict import logging -from ocgis.util.logging_ocgis import ocgis_lh + import fiona from shapely.geometry.geo import mapping +from ocgis.conv.base import AbstractConverter +from ocgis.util.logging_ocgis import ocgis_lh + class OcgDialect(excel): lineterminator = '\n' @@ -67,7 +69,7 @@ def _build_(self,coll): fiona_object = fiona.open(fiona_path,'w',driver='ESRI Shapefile',crs=fiona_crs,schema=fiona_schema) else: ocgis_lh('creating a UGID-GID shapefile is not necessary for aggregated data. use UGID shapefile.', - 'conv.csv+', + 'conv.csv-shp', logging.WARN) fiona_object = None diff --git a/src/ocgis/test/test_misc/test_conversion.py b/src/ocgis/test/test_misc/test_conversion.py index 848943ca7..29e73d04d 100644 --- a/src/ocgis/test/test_misc/test_conversion.py +++ b/src/ocgis/test/test_misc/test_conversion.py @@ -1,10 +1,12 @@ import unittest -from ocgis.test.base import TestBase -import ocgis import netCDF4 as nc import os + import fiona +from ocgis.test.base import TestBase +import ocgis + class Test(TestBase): @@ -15,32 +17,30 @@ def test_nc_projection_writing(self): ds = nc.Dataset(ret) self.assertTrue('lambert_conformal_conic' in ds.variables) - def test_csv_plus(self): + def test_csv_shp(self): rd1 = self.test_data.get_rd('cancm4_tasmax_2011') rd2 = self.test_data.get_rd('maurer_bccr_1950') - ops = ocgis.OcgOperations(dataset=[rd1,rd2],snippet=True,output_format='csv+', - geom='state_boundaries',agg_selection=True, - select_ugid=[32]) + ops = ocgis.OcgOperations(dataset=[rd1, rd2], snippet=True, output_format='csv-shp', geom='state_boundaries', + agg_selection=True, select_ugid=[32]) ret = ops.execute() - meta = os.path.join(os.path.split(ret)[0],'ocgis_output_source_metadata.txt') - - with open(meta,'r') as f: + meta = os.path.join(os.path.split(ret)[0], 'ocgis_output_source_metadata.txt') + + with open(meta, 'r') as f: lines = f.readlines() self.assertTrue(len(lines) > 50) - def test_csv_plus_custom_headers(self): + def test_csv_shp_custom_headers(self): rd1 = self.test_data.get_rd('cancm4_tasmax_2011') rd2 = self.test_data.get_rd('maurer_bccr_1950') - headers = ['did','ugid','gid','alias','value','time'] - ops = ocgis.OcgOperations(dataset=[rd1,rd2],snippet=True,output_format='csv+', - geom='state_boundaries',agg_selection=True, - select_ugid=[32],headers=headers) + headers = ['did', 'ugid', 'gid', 'alias', 'value', 'time'] + ops = ocgis.OcgOperations(dataset=[rd1, rd2], snippet=True, output_format='csv-shp', geom='state_boundaries', + agg_selection=True, select_ugid=[32], headers=headers) ret = ops.execute() - - with open(ret,'r') as f: + + with open(ret, 'r') as f: line = f.readline() fheaders = [h.strip() for h in line.split(',')] - self.assertEqual(fheaders,[h.upper() for h in headers]) + self.assertEqual(fheaders, [h.upper() for h in headers]) def test_shp_custom_headers(self): rd1 = self.test_data.get_rd('cancm4_tasmax_2011') diff --git a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py index 0288880b4..3f76793b6 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py +++ b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py @@ -623,10 +623,17 @@ def test_bad_ordination(self): class TestOutputFormat(TestBase): create_dir = False + def test_init(self): + of = OutputFormat(constants.OUTPUT_FORMAT_CSV_SHAPEFILE_OLD) + self.assertEqual(of.value, constants.OUTPUT_FORMAT_CSV_SHAPEFILE) + @attr('esmpy7') def test_init_esmpy(self): - oo = OutputFormat('esmpy') - self.assertEqual(oo.value, 'esmpy') + oo = OutputFormat(constants.OUTPUT_FORMAT_ESMPY_GRID) + self.assertEqual(oo.value, constants.OUTPUT_FORMAT_ESMPY_GRID) + + def test_valid(self): + self.assertAsSetEqual(OutputFormat.valid, ['csv', 'csv-shp', 'geojson', 'meta', 'nc', 'numpy', 'shp']) class TestRegridDestination(TestBase): diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py index 9eafe4aeb..f5f9a427f 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py @@ -678,5 +678,6 @@ def test_with_overloads_real_data(self): 'variable': u'tas', 't_calendar': u'365_day'}] rdc = RequestDatasetCollection(dataset) - ops = OcgOperations(dataset=rdc, geom='state_boundaries', select_ugid=[25], output_format='csv+') + ops = OcgOperations(dataset=rdc, geom='state_boundaries', select_ugid=[25], + output_format=constants.OUTPUT_FORMAT_SHAPEFILE) ops.execute() diff --git a/src/ocgis/test/test_ocgis/test_api/test_subset.py b/src/ocgis/test/test_ocgis/test_api/test_subset.py index 7b03d2302..13314a7ea 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_subset.py +++ b/src/ocgis/test/test_ocgis/test_api/test_subset.py @@ -3,9 +3,9 @@ import os import pickle import itertools +import numpy as np import ESMF -import numpy as np from ocgis.api.parms.definition import OutputFormat from ocgis.interface.base.field import Field @@ -20,7 +20,7 @@ from ocgis.test.test_ocgis.test_api.test_parms.test_definition import TestGeom from ocgis.util.itester import itr_products_keywords from ocgis.util.logging_ocgis import ProgressOcgOperations -from ocgis import env +from ocgis import env, constants class TestSubsetOperation(TestBase): @@ -92,17 +92,18 @@ def test_dataset_as_field(self): ret = ops.execute() except ValueError as ve: self.assertIsNone(k.crs) - self.assertIn(k.output_format, ['csv', 'csv+', 'geojson', 'shp']) + self.assertIn(k.output_format, [constants.OUTPUT_FORMAT_CSV, constants.OUTPUT_FORMAT_CSV_SHAPEFILE, + constants.OUTPUT_FORMAT_GEOJSON, constants.OUTPUT_FORMAT_SHAPEFILE]) continue - if k.output_format == 'numpy': + if k.output_format == constants.OUTPUT_FORMAT_NUMPY: self.assertIsInstance(ret[1]['foo'], Field) continue - if k.output_format == 'meta': + if k.output_format == constants.OUTPUT_FORMAT_METADATA: self.assertIsInstance(ret, basestring) self.assertTrue(len(ret) > 50) continue - if k.output_format == 'esmpy': + if k.output_format == constants.OUTPUT_FORMAT_ESMPY_GRID: self.assertIsInstance(ret, ESMF.Field) continue @@ -111,7 +112,9 @@ def test_dataset_as_field(self): path_did = os.path.join(folder, '{0}_did.csv'.format(ops.prefix)) with open(path_did, 'r') as f: rows = list(csv.DictReader(f)) - self.assertEqual(rows, [{'ALIAS': 'foo', 'DID': '1', 'URI': '', 'UNITS': '', 'STANDARD_NAME': '', 'VARIABLE': 'foo', 'LONG_NAME': ''}]) + self.assertEqual(rows, [ + {'ALIAS': 'foo', 'DID': '1', 'URI': '', 'UNITS': '', 'STANDARD_NAME': '', 'VARIABLE': 'foo', + 'LONG_NAME': ''}]) path_source_metadata = os.path.join(folder, '{0}_source_metadata.txt'.format(ops.prefix)) with open(path_source_metadata, 'r') as f: @@ -134,16 +137,18 @@ def test_dataset_as_field(self): contents = os.listdir(folder) - expected_contents = [xx.format(ops.prefix) for xx in '{0}_source_metadata.txt', '{0}_did.csv', '{0}.log', '{0}_metadata.txt'] + expected_contents = [xx.format(ops.prefix) for xx in + '{0}_source_metadata.txt', '{0}_did.csv', '{0}.log', '{0}_metadata.txt'] if k.output_format == 'nc': expected_contents.append('{0}.nc'.format(ops.prefix)) self.assertAsSetEqual(contents, expected_contents) - elif k.output_format == 'csv+': + elif k.output_format == constants.OUTPUT_FORMAT_CSV_SHAPEFILE: expected_contents.append('{0}.csv'.format(ops.prefix)) expected_contents.append('shp') self.assertAsSetEqual(contents, expected_contents) - elif k.output_format == 'shp': - expected_contents = ['{0}.shp', '{0}.dbf', '{0}.shx', '{0}.cpg', '{0}.log', '{0}_metadata.txt', '{0}_source_metadata.txt', '{0}_did.csv', '{0}.prj'] + elif k.output_format == constants.OUTPUT_FORMAT_SHAPEFILE: + expected_contents = ['{0}.shp', '{0}.dbf', '{0}.shx', '{0}.cpg', '{0}.log', '{0}_metadata.txt', + '{0}_source_metadata.txt', '{0}_did.csv', '{0}.prj'] expected_contents = [xx.format(ops.prefix) for xx in expected_contents] self.assertAsSetEqual(contents, expected_contents) diff --git a/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py b/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py index dc858cfba..9294c456d 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_calc_general.py @@ -1,14 +1,15 @@ import unittest import numpy as np +import itertools + from ocgis.api.operations import OcgOperations from datetime import datetime as dt import ocgis import datetime from ocgis.test.base import TestBase -import netCDF4 as nc -import itertools from ocgis.calc.engine import OcgCalculationEngine from ocgis.calc.library.thresholds import Threshold +from ocgis import constants class AbstractCalcBase(TestBase): @@ -18,54 +19,55 @@ def get_reshaped(self,arr): ret = np.ma.array(ret,mask=False) assert(len(ret.shape) == 3) return(ret) - - def run_standard_operations(self,calc,capture=False,output_format=None): - _aggregate = [False,True] - _calc_grouping = [['month'],['month','year'],'all'] - _output_format = output_format or ['numpy','csv+','nc'] + + def run_standard_operations(self, calc, capture=False, output_format=None): + _aggregate = [False, True] + _calc_grouping = [['month'], ['month', 'year'], 'all'] + _output_format = output_format or [constants.OUTPUT_FORMAT_NUMPY, constants.OUTPUT_FORMAT_CSV_SHAPEFILE, + constants.OUTPUT_FORMAT_NETCDF] captured = [] - for ii,tup in enumerate(itertools.product(_aggregate,_calc_grouping,_output_format)): - aggregate,calc_grouping,output_format = tup - if aggregate is True and output_format == 'nc': + for ii, tup in enumerate(itertools.product(_aggregate, _calc_grouping, _output_format)): + aggregate, calc_grouping, output_format = tup + if aggregate is True and output_format == constants.OUTPUT_FORMAT_NETCDF: continue - rd = self.test_data.get_rd('cancm4_tas',kwds={'time_region':{'year':[2001,2002]}}) + rd = self.test_data.get_rd('cancm4_tas', kwds={'time_region': {'year': [2001, 2002]}}) try: - ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[25], - calc=calc,calc_grouping=calc_grouping,output_format=output_format, - aggregate=aggregate,prefix=('standard_ops_'+str(ii))) + ops = OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[25], calc=calc, + calc_grouping=calc_grouping, output_format=output_format, aggregate=aggregate, + prefix=('standard_ops_' + str(ii))) ret = ops.execute() - if output_format == 'numpy': + if output_format == constants.OUTPUT_FORMAT_NUMPY: ref = ret[25]['tas'].variables[calc[0]['name']].value if aggregate: - space_shape = [1,1] + space_shape = [1, 1] else: - space_shape = [5,4] + space_shape = [5, 4] if calc_grouping == ['month']: shp1 = [12] elif calc_grouping == 'all': - raise(NotImplementedError('calc_grouping all')) + raise NotImplementedError('calc_grouping all') else: shp1 = [24] test_shape = [1] + shp1 + [1] + space_shape - self.assertEqual(ref.shape,tuple(test_shape)) + self.assertEqual(ref.shape, tuple(test_shape)) if not aggregate: - ## ensure the geometry mask is appropriately update by the function + # ensure the geometry mask is appropriately update by the function try: - self.assertTrue(np.ma.is_masked(ref[0,0,0,0,0])) - ## likely a structure array with multiple masked elements per index + self.assertTrue(np.ma.is_masked(ref[0, 0, 0, 0, 0])) + # likely a structure array with multiple masked elements per index except TypeError: - self.assertTrue(np.all([np.ma.is_masked(element) for element in ref[0,0,0,0,0]])) + self.assertTrue(np.all([np.ma.is_masked(element) for element in ref[0, 0, 0, 0, 0]])) except ValueError: raise except AssertionError: raise except Exception as e: if capture: - parms = dict(aggregate=aggregate,calc_grouping=calc_grouping,output_format=output_format) - captured.append({'exception':e,'parms':parms}) + parms = dict(aggregate=aggregate, calc_grouping=calc_grouping, output_format=output_format) + captured.append({'exception': e, 'parms': parms}) else: raise - return(captured) + return captured class Test(AbstractCalcBase): diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py index 57f235eee..ae33dbcd9 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py @@ -1,7 +1,7 @@ import csv - import numpy as np +from ocgis import constants from ocgis.test.base import attr from ocgis.calc.library.index.duration import Duration, FrequencyDuration from ocgis.exc import DefinitionValidationError @@ -99,7 +99,7 @@ def test_real_data_multiple_datasets(self): rd_tasmin = self.test_data.get_rd('maurer_2010_concatenated_tasmin', kwds=kwds) ops = OcgOperations(dataset=[rd_tasmax, rd_tasmin], - output_format='csv+', + output_format=constants.OUTPUT_FORMAT_CSV_SHAPEFILE, calc=[{'name': 'Frequency Duration', 'func': 'freq_duration', 'kwds': {'threshold': 25.0, 'operation': 'gte'}}], calc_grouping=['month', 'year'], @@ -118,7 +118,8 @@ def test_real_data(self): rd = self.test_data.get_rd('maurer_2010_concatenated_tasmax', kwds={'time_region': {'year': [1991], 'month': [7]}}) - for output_format in ['numpy', 'csv+', 'shp', 'csv']: + for output_format in [constants.OUTPUT_FORMAT_NUMPY, constants.OUTPUT_FORMAT_CSV_SHAPEFILE, + constants.OUTPUT_FORMAT_SHAPEFILE, constants.OUTPUT_FORMAT_CSV]: ops = OcgOperations(dataset=rd, output_format=output_format, prefix=output_format, calc=[{'name': 'Frequency Duration', @@ -135,7 +136,7 @@ def test_real_data(self): ref = ret[2778]['tasmax'].variables['Frequency Duration'].value self.assertEqual(ref.compressed()[0].shape, (2,)) - if output_format == 'csv+': + if output_format == constants.OUTPUT_FORMAT_CSV_SHAPEFILE: real = [{'COUNT': '1', 'UGID': '2778', 'DID': '1', 'CALC_KEY': 'freq_duration', 'MONTH': '7', 'DURATION': '7', 'GID': '2778', 'YEAR': '1991', 'VARIABLE': 'tasmax', 'DAY': '16'}, {'COUNT': '1', 'UGID': '2778', 'DID': '1', 'CALC_KEY': 'freq_duration', 'MONTH': '7', diff --git a/src/ocgis/test/test_ocgis/test_conv/test_csv_shp.py b/src/ocgis/test/test_ocgis/test_conv/test_csv_shp.py index c13b9d4d1..00070180e 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_csv_shp.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_csv_shp.py @@ -3,25 +3,24 @@ import fiona +from ocgis import constants from ocgis.test.base import TestBase from ocgis.api.operations import OcgOperations from ocgis.util.shp_cabinet import ShpCabinetIterator class Test(TestBase): - def test_geometries_not_duplicated_with_equivalent_ugid(self): - ## if geometries are equivalent, they should not have duplicates in the - ## output shapefile. + # if geometries are equivalent, they should not have duplicates in the output shapefile. rd = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('cancm4_tasmax_2011') - ops = OcgOperations(dataset=[rd,rd2],geom='state_boundaries',select_ugid=[16], - output_format='csv+',snippet=True) + ops = OcgOperations(dataset=[rd, rd2], geom='state_boundaries', select_ugid=[16], + output_format=constants.OUTPUT_FORMAT_CSV_SHAPEFILE, snippet=True) ops.execute() - - path_shp = os.path.join(self.current_dir_output,ops.prefix,'shp',ops.prefix+'_ugid.shp') + + path_shp = os.path.join(self.current_dir_output, ops.prefix, 'shp', ops.prefix + '_ugid.shp') with fiona.open(path_shp) as source: - self.assertEqual(len(list(source)),1) + self.assertEqual(len(list(source)), 1) def test_geometries_different_ugid(self): # equivalent geometries with different ugid values should be included @@ -30,9 +29,10 @@ def test_geometries_different_ugid(self): row[1]['properties']['UGID'] = 17 rd = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('cancm4_tasmax_2011') - ops = OcgOperations(dataset=[rd, rd2], geom=row, output_format='csv+', snippet=True) + ops = OcgOperations(dataset=[rd, rd2], geom=row, output_format=constants.OUTPUT_FORMAT_CSV_SHAPEFILE, + snippet=True) ops.execute() - - path_shp = os.path.join(self.current_dir_output,ops.prefix,'shp',ops.prefix+'_ugid.shp') + + path_shp = os.path.join(self.current_dir_output, ops.prefix, 'shp', ops.prefix + '_ugid.shp') with fiona.open(path_shp) as source: - self.assertEqual(len(list(source)),2) + self.assertEqual(len(list(source)), 2) diff --git a/src/ocgis/test/test_ocgis/test_util/test_zipper.py b/src/ocgis/test/test_ocgis/test_util/test_zipper.py index 18a047cba..642724bd3 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_zipper.py +++ b/src/ocgis/test/test_ocgis/test_util/test_zipper.py @@ -1,57 +1,58 @@ -import ocgis import tempfile import itertools -from ocgis.util.zipper import format_return import os from zipfile import is_zipfile, ZipFile import re import shutil + +import ocgis +from ocgis.util.zipper import format_return from ocgis.test.base import TestBase +from ocgis import constants -def test(): +def test(): tdata = TestBase.get_tst_data() rd = tdata.get_rd('cancm4_tas') - - output_formats = [ - 'csv+', - 'csv', - 'nc', - 'shp', - 'numpy'] - _with_auxiliary_files = [True,False] - for output_format,with_auxiliary_files in itertools.product(output_formats,_with_auxiliary_files): + + output_formats = [constants.OUTPUT_FORMAT_CSV_SHAPEFILE, + constants.OUTPUT_FORMAT_CSV, + constants.OUTPUT_FORMAT_NETCDF, + constants.OUTPUT_FORMAT_SHAPEFILE, + constants.OUTPUT_FORMAT_NUMPY] + _with_auxiliary_files = [True, False] + for output_format, with_auxiliary_files in itertools.product(output_formats, _with_auxiliary_files): dir_output = tempfile.mkdtemp() try: ocgis.env.DIR_OUTPUT = dir_output - - ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format=output_format, - geom='state_boundaries',select_ugid=[23], - prefix=output_format+'_data') + + ops = ocgis.OcgOperations(dataset=rd, snippet=True, output_format=output_format, geom='state_boundaries', + select_ugid=[23], prefix=output_format + '_data') ret_path = ops.execute() - - fmtd_ret = format_return(ret_path,ops,with_auxiliary_files=with_auxiliary_files) - assert(os.path.exists(fmtd_ret)) - if output_format in ['csv','nc'] and with_auxiliary_files is False: - assert(fmtd_ret.endswith(output_format)) + fmtd_ret = format_return(ret_path, ops, with_auxiliary_files=with_auxiliary_files) + + assert (os.path.exists(fmtd_ret)) + if output_format in [constants.OUTPUT_FORMAT_CSV, + constants.OUTPUT_FORMAT_NETCDF] and with_auxiliary_files is False: + assert (fmtd_ret.endswith(output_format)) else: - assert(is_zipfile(fmtd_ret)) - zipf = ZipFile(fmtd_ret,'r') + assert (is_zipfile(fmtd_ret)) + zipf = ZipFile(fmtd_ret, 'r') try: namelist = zipf.namelist() - assert(len(namelist) > 0) - if output_format in ['csv+']: - test = [re.match('shp/.+'.format(output_format),name) != None for name in namelist] - assert(any(test)) + assert (len(namelist) > 0) + if output_format in [constants.OUTPUT_FORMAT_CSV_SHAPEFILE]: + test = [re.match('shp/.+'.format(output_format), name) != None for name in namelist] + assert (any(test)) else: - test = [re.match('shp/.+'.format(output_format),name) == None for name in namelist] - assert(all(test)) + test = [re.match('shp/.+'.format(output_format), name) == None for name in namelist] + assert (all(test)) finally: zipf.close() - ## numpy formats are not implemented + # numpy formats are not implemented except NotImplementedError: - assert(output_format == 'numpy') + assert (output_format == constants.OUTPUT_FORMAT_NUMPY) finally: - ocgis.env.reset() + ocgis.env.reset() shutil.rmtree(dir_output) \ No newline at end of file diff --git a/src/ocgis/test/test_real_data/test_combinatorial.py b/src/ocgis/test/test_real_data/test_combinatorial.py index 020faf58b..e6c45f359 100644 --- a/src/ocgis/test/test_real_data/test_combinatorial.py +++ b/src/ocgis/test/test_real_data/test_combinatorial.py @@ -3,19 +3,20 @@ import os import shutil import itertools +import numpy as np + import fiona +from shapely import wkt + from ocgis.api.request.base import RequestDataset, RequestDatasetCollection import ocgis from ocgis.exc import DefinitionValidationError, ExtentError from ocgis.interface.base.crs import CFWGS84, CoordinateReferenceSystem from ocgis.util.spatial.fiona_maker import FionaMaker -from shapely import wkt - -from ocgis import OcgOperations, env +from ocgis import OcgOperations, env, constants from ocgis.test.base import TestBase, attr from ocgis.test.test_simple.make_test_data import SimpleNc, SimpleNcNoBounds, SimpleNcNoLevel from ocgis.test.test_simple.test_simple import TestSimpleBase -import numpy as np class TestCombinatorial(TestBase): @@ -187,13 +188,13 @@ def test_combinatorial_projection_with_geometries(self): # self.get_ret(kwds={'output_format':'shp','prefix':'as_point','abstraction':'point'}) features = [ - {'NAME':'a','wkt':'POLYGON((-105.020430 40.073118,-105.810753 39.327957,-105.660215 38.831183,-104.907527 38.763441,-104.004301 38.816129,-103.643011 39.802151,-103.643011 39.802151,-103.643011 39.802151,-103.643011 39.802151,-103.959140 40.118280,-103.959140 40.118280,-103.959140 40.118280,-103.959140 40.118280,-104.327957 40.201075,-104.327957 40.201075,-105.020430 40.073118))'}, - {'NAME':'b','wkt':'POLYGON((-102.212903 39.004301,-102.905376 38.906452,-103.311828 37.694624,-103.326882 37.295699,-103.898925 37.220430,-103.846237 36.746237,-102.619355 37.107527,-102.634409 37.724731,-101.874194 37.882796,-102.212903 39.004301))'}, - {'NAME':'c','wkt':'POLYGON((-105.336559 37.175269,-104.945161 37.303226,-104.726882 37.175269,-104.696774 36.844086,-105.043011 36.693548,-105.283871 36.640860,-105.336559 37.175269))'}, - {'NAME':'d','wkt':'POLYGON((-102.318280 39.741935,-103.650538 39.779570,-103.620430 39.448387,-103.349462 39.433333,-103.078495 39.606452,-102.325806 39.613978,-102.325806 39.613978,-102.333333 39.741935,-102.318280 39.741935))'}, - ] + {'NAME': 'a', 'wkt': 'POLYGON((-105.020430 40.073118,-105.810753 39.327957,-105.660215 38.831183,-104.907527 38.763441,-104.004301 38.816129,-103.643011 39.802151,-103.643011 39.802151,-103.643011 39.802151,-103.643011 39.802151,-103.959140 40.118280,-103.959140 40.118280,-103.959140 40.118280,-103.959140 40.118280,-104.327957 40.201075,-104.327957 40.201075,-105.020430 40.073118))'}, + {'NAME': 'b', 'wkt': 'POLYGON((-102.212903 39.004301,-102.905376 38.906452,-103.311828 37.694624,-103.326882 37.295699,-103.898925 37.220430,-103.846237 36.746237,-102.619355 37.107527,-102.634409 37.724731,-101.874194 37.882796,-102.212903 39.004301))'}, + {'NAME': 'c', 'wkt': 'POLYGON((-105.336559 37.175269,-104.945161 37.303226,-104.726882 37.175269,-104.696774 36.844086,-105.043011 36.693548,-105.283871 36.640860,-105.336559 37.175269))'}, + {'NAME': 'd', 'wkt': 'POLYGON((-102.318280 39.741935,-103.650538 39.779570,-103.620430 39.448387,-103.349462 39.433333,-103.078495 39.606452,-102.325806 39.613978,-102.325806 39.613978,-102.333333 39.741935,-102.318280 39.741935))'}, + ] - for filename in ['polygon','point']: + for filename in ['polygon', 'point']: if filename == 'point': geometry = 'Point' to_write = deepcopy(features) @@ -204,84 +205,83 @@ def test_combinatorial_projection_with_geometries(self): to_write = features geometry = 'Polygon' - path = os.path.join(self.current_dir_output,'ab_{0}.shp'.format(filename)) - with FionaMaker(path,geometry=geometry) as fm: + path = os.path.join(self.current_dir_output, 'ab_{0}.shp'.format(filename)) + with FionaMaker(path, geometry=geometry) as fm: fm.write(to_write) no_bounds_nc = SimpleNcNoBounds() no_bounds_nc.write() - no_bounds_uri = os.path.join(env.DIR_OUTPUT,no_bounds_nc.filename) + no_bounds_uri = os.path.join(env.DIR_OUTPUT, no_bounds_nc.filename) no_level_nc = SimpleNcNoLevel() no_level_nc.write() - no_level_uri = os.path.join(env.DIR_OUTPUT,no_level_nc.filename) + no_level_uri = os.path.join(env.DIR_OUTPUT, no_level_nc.filename) ocgis.env.DIR_SHPCABINET = self.current_dir_output -# ocgis.env.DEBUG = True -# ocgis.env.VERBOSE = True + # ocgis.env.DEBUG = True + # ocgis.env.VERBOSE = True aggregate = [ - False, - True - ] + False, + True + ] spatial_operation = [ - 'intersects', - 'clip' - ] + 'intersects', + 'clip' + ] epsg = [ - 2163, - 4326, - None - ] + 2163, + 4326, + None + ] output_format = [ - 'nc', - 'shp', - 'csv+' - ] + constants.OUTPUT_FORMAT_NETCDF, + constants.OUTPUT_FORMAT_SHAPEFILE, + constants.OUTPUT_FORMAT_CSV_SHAPEFILE + ] abstraction = [ - 'polygon', - 'point', - None - ] + 'polygon', + 'point', + None + ] dataset = [ - self.get_dataset(), - {'uri':no_bounds_uri,'variable':'foo'}, - {'uri':no_level_uri,'variable':'foo'} - ] + self.get_dataset(), + {'uri': no_bounds_uri, 'variable': 'foo'}, + {'uri': no_level_uri, 'variable': 'foo'} + ] geom = [ - 'ab_polygon', - 'ab_point' - ] + 'ab_polygon', + 'ab_point' + ] calc = [ - None, - [{'func':'mean','name':'my_mean'}] - ] + None, + [{'func': 'mean', 'name': 'my_mean'}] + ] calc_grouping = ['month'] - args = (aggregate,spatial_operation,epsg,output_format,abstraction,geom,calc,dataset) - for ii,tup in enumerate(itertools.product(*args)): - a,s,e,o,ab,g,c,d = tup + args = (aggregate, spatial_operation, epsg, output_format, abstraction, geom, calc, dataset) + for ii, tup in enumerate(itertools.product(*args)): + a, s, e, o, ab, g, c, d = tup if os.path.split(d['uri'])[1] == 'test_simple_spatial_no_bounds_01.nc': unbounded = True else: unbounded = False - if o == 'nc' and e == 4326: + if o == constants.OUTPUT_FORMAT_NETCDF and e == 4326: output_crs = CFWGS84() else: output_crs = CoordinateReferenceSystem(epsg=e) if e is not None else None - kwds = dict(aggregate=a,spatial_operation=s,output_format=o,output_crs=output_crs, - geom=g,abstraction=ab,dataset=d,prefix=str(ii),calc=c, - calc_grouping=calc_grouping) + kwds = dict(aggregate=a, spatial_operation=s, output_format=o, output_crs=output_crs, geom=g, + abstraction=ab, dataset=d, prefix=str(ii), calc=c, calc_grouping=calc_grouping) try: ops = OcgOperations(**kwds) ret = ops.execute() except DefinitionValidationError: - if o == 'nc': - if e not in [4326,None]: + if o == constants.OUTPUT_FORMAT_NETCDF: + if e not in [4326, None]: continue if s == 'clip': continue @@ -293,32 +293,33 @@ def test_combinatorial_projection_with_geometries(self): else: raise - if o == 'shp': - ugid_path = os.path.join(self.current_dir_output,ops.prefix,ops.prefix+'_ugid.shp') + if o == constants.OUTPUT_FORMAT_SHAPEFILE: + ugid_path = os.path.join(self.current_dir_output, ops.prefix, ops.prefix + '_ugid.shp') else: - ugid_path = os.path.join(self.current_dir_output,ops.prefix,'shp',ops.prefix+'_ugid.shp') + ugid_path = os.path.join(self.current_dir_output, ops.prefix, constants.OUTPUT_FORMAT_SHAPEFILE, + ops.prefix + '_ugid.shp') - if o != 'nc': - with fiona.open(ugid_path,'r') as f: + if o != constants.OUTPUT_FORMAT_NETCDF: + with fiona.open(ugid_path, 'r') as f: if e: second = output_crs else: second = CoordinateReferenceSystem(epsg=4326) - self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']),second) + self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']), second) - if o == 'shp': - with fiona.open(ret,'r') as f: + if o == constants.OUTPUT_FORMAT_SHAPEFILE: + with fiona.open(ret, 'r') as f: if a and ab == 'point': second = 'MultiPoint' elif ab is None: - field = RequestDataset(uri=d['uri'],variable='foo').get() + field = RequestDataset(uri=d['uri'], variable='foo').get() second = field.spatial.geom.get_highest_order_abstraction()._geom_type else: second = ab.title() - if second in ['Polygon','MultiPolygon']: - second = ['Polygon','MultiPolygon'] - elif second in ['Point','MultiPoint']: - second = ['Point','MultiPoint'] + if second in ['Polygon', 'MultiPolygon']: + second = ['Polygon', 'MultiPolygon'] + elif second in ['Point', 'MultiPoint']: + second = ['Point', 'MultiPoint'] self.assertTrue(f.meta['schema']['geometry'] in second) diff --git a/src/ocgis/test/test_real_data/test_multiple_datasets.py b/src/ocgis/test/test_real_data/test_multiple_datasets.py index cbc4623cf..e166cb413 100644 --- a/src/ocgis/test/test_real_data/test_multiple_datasets.py +++ b/src/ocgis/test/test_real_data/test_multiple_datasets.py @@ -1,8 +1,8 @@ from itertools import izip import os from copy import deepcopy - import numpy as np + import fiona from ocgis.api.operations import OcgOperations @@ -11,192 +11,166 @@ from ocgis.exc import DefinitionValidationError from ocgis.util.shp_cabinet import ShpCabinetIterator from ocgis.interface.base.crs import CFWGS84, CoordinateReferenceSystem +from ocgis import constants class Test(TestBase): - def setUp(self): TestBase.setUp(self) self.maurer = self.test_data.get_rd('maurer_bccr_1950') self.cancm4 = self.test_data.get_rd('cancm4_tasmax_2001') self.tasmin = self.test_data.get_rd('cancm4_tasmin_2001') -# self.albisccp = self.test_data.get_rd('ccsm4') - + @property def california(self): - ret = list(ShpCabinetIterator('state_boundaries',select_ugid=[25])) - return(ret) - + ret = list(ShpCabinetIterator('state_boundaries', select_ugid=[25])) + return ret + @property def dataset(self): - dataset = [ - deepcopy(self.maurer), - deepcopy(self.cancm4) - ] - return(dataset) + dataset = [deepcopy(self.maurer), deepcopy(self.cancm4)] + return dataset - def get_ops(self,kwds={}): + def get_ops(self, kwds={}): geom = self.california - ops = OcgOperations(dataset=self.dataset, - snippet=True, - geom=geom, - output_format='numpy') - for k,v in kwds.iteritems(): - setattr(ops,k,v) - return(ops) - - def get_ref(self,kwds={}): + ops = OcgOperations(dataset=self.dataset, snippet=True, geom=geom, output_format='numpy') + for k, v in kwds.iteritems(): + setattr(ops, k, v) + return ops + + def get_ref(self, kwds={}): ops = self.get_ops(kwds=kwds) ret = ops.execute() - return(ret[25]) - + return ret[25] + def test_default(self): - ops = self.get_ops() + ops = self.get_ops() ret = ops.execute() - - self.assertEqual(set(['Prcp','tasmax']),set(ret[25].keys())) - - shapes = {'Prcp':(1,1,1,77,83),'tasmax':(1,1,1,5,4)} - for (ugid,field_alias,var_alias,variable),shape in izip(ret.get_iter_elements(),shapes): - self.assertEqual(variable.value.shape,shapes[var_alias]) - + + self.assertEqual(set(['Prcp', 'tasmax']), set(ret[25].keys())) + + shapes = {'Prcp': (1, 1, 1, 77, 83), 'tasmax': (1, 1, 1, 5, 4)} + for (ugid, field_alias, var_alias, variable), shape in izip(ret.get_iter_elements(), shapes): + self.assertEqual(variable.value.shape, shapes[var_alias]) + def test_vector_wrap(self): geom = self.california - keys = [ - ['maurer_bccr_1950',(1,12,1,77,83)], - ['cancm4_tasmax_2011',(1,3650,1,5,4)] - ] + keys = [['maurer_bccr_1950', (1, 12, 1, 77, 83)], ['cancm4_tasmax_2011', (1, 3650, 1, 5, 4)]] for key in keys: prev_value = None - for vector_wrap in [True,False]: + for vector_wrap in [True, False]: rd = self.test_data.get_rd(key[0]) - prefix = 'vw_{0}_{1}'.format(vector_wrap,rd.variable) - ops = ocgis.OcgOperations(dataset=rd,geom=geom,snippet=False, - vector_wrap=vector_wrap,prefix=prefix) + prefix = 'vw_{0}_{1}'.format(vector_wrap, rd.variable) + ops = ocgis.OcgOperations(dataset=rd, geom=geom, snippet=False, vector_wrap=vector_wrap, prefix=prefix) ret = ops.execute() - ref = ret.gvu(25,rd.variable) - self.assertEqual(ref.shape,key[1]) + ref = ret.gvu(25, rd.variable) + self.assertEqual(ref.shape, key[1]) if prev_value is None: prev_value = ref else: self.assertTrue(np.all(ref == prev_value)) - + def test_aggregate_clip(self): - kwds = {'aggregate':True,'spatial_operation':'clip'} - ops = self.get_ops(kwds=kwds) - # for k, v in ops.dataset.iteritems(): - # v.get()[0, 0, 0, :, :].spatial.write_fiona('/tmp/{0}.shp'.format(k)) - # import ipdb;ipdb.set_trace() + kwds = {'aggregate': True, 'spatial_operation': 'clip'} + ref = self.get_ref(kwds) for field in ref.values(): for variable in field.variables.values(): - self.assertEqual(field.spatial.geom.shape,(1,1)) - self.assertEqual(variable.value.shape,(1,1,1,1,1)) - + self.assertEqual(field.spatial.geom.shape, (1, 1)) + self.assertEqual(variable.value.shape, (1, 1, 1, 1, 1)) + def test_calculation(self): - calc = [{'func':'mean','name':'mean'},{'func':'std','name':'std'}] + calc = [{'func': 'mean', 'name': 'mean'}, {'func': 'std', 'name': 'std'}] calc_grouping = ['year'] - kwds = {'aggregate':True, - 'spatial_operation':'clip', - 'calc':calc, - 'calc_grouping':calc_grouping, - 'output_format':'numpy', - 'geom':self.california, - 'dataset':self.dataset, - 'snippet':False} + kwds = {'aggregate': True, + 'spatial_operation': 'clip', + 'calc': calc, + 'calc_grouping': calc_grouping, + 'output_format': constants.OUTPUT_FORMAT_NUMPY, + 'geom': self.california, + 'dataset': self.dataset, + 'snippet': False} ops = OcgOperations(**kwds) ret = ops.execute() - + ref = ret[25]['Prcp'] - self.assertEquals(set(ref.variables.keys()),set(['mean', 'std'])) + self.assertEquals(set(ref.variables.keys()), set(['mean', 'std'])) for value in ref.variables.itervalues(): - self.assertEqual(value.value.shape,(1,1,1,1,1)) + self.assertEqual(value.value.shape, (1, 1, 1, 1, 1)) ref = ret[25]['tasmax'] - self.assertEquals(set(ref.variables.keys()),set(['mean', 'std'])) + self.assertEquals(set(ref.variables.keys()), set(['mean', 'std'])) for value in ref.variables.itervalues(): - self.assertEqual(value.value.shape,(1,10,1,1,1)) - + self.assertEqual(value.value.shape, (1, 10, 1, 1, 1)) + def test_same_variable_name(self): - ds = deepcopy([self.cancm4,self.cancm4]) - + ds = deepcopy([self.cancm4, self.cancm4]) + with self.assertRaises(KeyError): OcgOperations(dataset=ds) ds[0].alias = 'foo' ds[1].alias = 'foo' with self.assertRaises(KeyError): OcgOperations(dataset=ds) - - ds = [deepcopy(self.cancm4),deepcopy(self.cancm4)] + + ds = [deepcopy(self.cancm4), deepcopy(self.cancm4)] ds[0].alias = 'foo_var' - ops = OcgOperations(dataset=ds,snippet=True) + ops = OcgOperations(dataset=ds, snippet=True) ret = ops.execute() - self.assertEqual(set(ret[1].keys()),set(['foo_var','tasmax'])) - values = [v.variables[k] for k,v in ret[1].iteritems()] + self.assertEqual(set(ret[1].keys()), set(['foo_var', 'tasmax'])) + values = [v.variables[k] for k, v in ret[1].iteritems()] self.assertTrue(np.all(values[0].value == values[1].value)) @attr('slow') def test_consolidating_projections(self): - - def assert_projection(path,check_ugid=True): + + def assert_projection(path, check_ugid=True): try: - source = [fiona.open(path,'r')] + source = [fiona.open(path, 'r')] except fiona.errors.DriverError: shp_path = os.path.split(path)[0] prefix = os.path.split(shp_path)[1] - shp_path = os.path.join(shp_path,'shp') + shp_path = os.path.join(shp_path, 'shp') if check_ugid: - ids = ['gid','ugid'] + ids = ['gid', 'ugid'] else: ids = ['gid'] - source = [fiona.open(os.path.join(shp_path,prefix+'_'+suffix+'.shp')) for suffix in ids] - + source = [fiona.open(os.path.join(shp_path, prefix + '_' + suffix + '.shp')) for suffix in ids] + try: for src in source: - self.assertEqual(CoordinateReferenceSystem(value=src.meta['crs']),CFWGS84()) + self.assertEqual(CoordinateReferenceSystem(value=src.meta['crs']), CFWGS84()) finally: for src in source: src.close() - + rd1 = self.test_data.get_rd('narccap_rcm3') rd1.alias = 'rcm3' rd2 = self.test_data.get_rd('narccap_crcm') rd2.alias = 'crcm' - rd = [ - rd1, - rd2 - ] - - for output_format in [ - 'csv+', - 'shp', - 'nc' - ]: - + rd = [rd1, rd2] + + for output_format in [constants.OUTPUT_FORMAT_CSV_SHAPEFILE, constants.OUTPUT_FORMAT_SHAPEFILE, + constants.OUTPUT_FORMAT_NETCDF]: + try: - ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format=output_format, - geom='state_boundaries',agg_selection=False, - select_ugid=[25], - prefix='ca'+output_format, - output_crs=CFWGS84()) + ops = ocgis.OcgOperations(dataset=rd, snippet=True, output_format=output_format, + geom='state_boundaries', agg_selection=False, select_ugid=[25], + prefix='ca' + output_format, output_crs=CFWGS84()) ret = ops.execute() - ## writing to a reference projection is currently not supported for - ## netCDF data. + # writing to a reference projection is currently not supported for netCDF data. except DefinitionValidationError: - if output_format == 'nc': + if output_format == constants.OUTPUT_FORMAT_NETCDF: continue else: raise assert_projection(ret) - - ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format=output_format, - geom='state_boundaries',agg_selection=True, - prefix='states'+output_format, - output_crs=CFWGS84()) + + ops = ocgis.OcgOperations(dataset=rd, snippet=True, output_format=output_format, geom='state_boundaries', + agg_selection=True, prefix='states' + output_format, output_crs=CFWGS84()) ret = ops.execute() assert_projection(ret) - - ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format=output_format, - prefix='rcm3_crcm_domain'+output_format, - output_crs=CFWGS84()) + + ops = ocgis.OcgOperations(dataset=rd, snippet=True, output_format=output_format, + prefix='rcm3_crcm_domain' + output_format, output_crs=CFWGS84()) ret = ops.execute() - assert_projection(ret,check_ugid=False) + assert_projection(ret, check_ugid=False) diff --git a/src/ocgis/test/test_real_data/test_package.py b/src/ocgis/test/test_real_data/test_package.py index 6515f9d60..fbaeb6886 100644 --- a/src/ocgis/test/test_real_data/test_package.py +++ b/src/ocgis/test/test_real_data/test_package.py @@ -1,7 +1,7 @@ -import unittest from ocgis.test.base import TestBase import ocgis from ocgis.exc import DefinitionValidationError +from ocgis import constants class Test(TestBase): @@ -9,20 +9,14 @@ class Test(TestBase): def test_nc(self): rd1 = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('cancm4_rhsmax') - rd = [rd1,rd2] - for output_format in ['shp','csv','csv+','nc']: - if output_format == 'nc': + rd = [rd1, rd2] + for output_format in [constants.OUTPUT_FORMAT_SHAPEFILE, constants.OUTPUT_FORMAT_CSV, + constants.OUTPUT_FORMAT_CSV_SHAPEFILE, constants.OUTPUT_FORMAT_NETCDF]: + if output_format == constants.OUTPUT_FORMAT_NETCDF: with self.assertRaises(DefinitionValidationError): - ops = ocgis.OcgOperations(dataset=rd,output_format=output_format, - geom='state_boundaries',select_ugid=[25], - snippet=True,prefix=output_format) + ocgis.OcgOperations(dataset=rd, output_format=output_format, geom='state_boundaries', + select_ugid=[25], snippet=True, prefix=output_format) else: - ops = ocgis.OcgOperations(dataset=rd,output_format=output_format, - geom='state_boundaries',select_ugid=[25], - snippet=True,prefix=output_format) - ret = ops.execute() - - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() \ No newline at end of file + ops = ocgis.OcgOperations(dataset=rd, output_format=output_format, geom='state_boundaries', + select_ugid=[25], snippet=True, prefix=output_format) + ops.execute() diff --git a/src/ocgis/test/test_real_data/test_random_datasets.py b/src/ocgis/test/test_real_data/test_random_datasets.py index a1088e1b8..f76e92ec2 100644 --- a/src/ocgis/test/test_real_data/test_random_datasets.py +++ b/src/ocgis/test/test_real_data/test_random_datasets.py @@ -1,14 +1,14 @@ import itertools -from datetime import datetime as dt import unittest from csv import DictReader from copy import deepcopy import os - import numpy as np + import fiona from shapely.geometry.point import Point +from datetime import datetime as dt import ocgis from ocgis.calc.library.index.dynamic_kernel_percentile import DynamicDailyKernelPercentileThreshold from ocgis.test.base import TestBase, nc_scope, attr @@ -16,42 +16,41 @@ from ocgis.exc import MaskedDataError, ExtentError, RequestValidationError from ocgis.interface.base.crs import CFWGS84 from ocgis.api.request.base import RequestDataset +from ocgis import constants class TestCMIP3Masking(TestBase): - @attr('slow') def test_many_request_datasets(self): rd_base = self.test_data.get_rd('subset_test_Prcp') geom = [-74.0, 40.0, -72.0, 42.0] rds = [deepcopy(rd_base) for ii in range(500)] for rd in rds: - ret = OcgOperations(dataset=rd,geom=geom).execute() - self.assertEqual(ret[1]['Prcp'].variables['Prcp'].value.shape,(1,1800,1,1,1)) - + ret = OcgOperations(dataset=rd, geom=geom).execute() + self.assertEqual(ret[1]['Prcp'].variables['Prcp'].value.shape, (1, 1800, 1, 1, 1)) + def test(self): - for key in ['subset_test_Prcp','subset_test_Tavg_sresa2','subset_test_Tavg']: - ## test method to return a RequestDataset + for key in ['subset_test_Prcp', 'subset_test_Tavg_sresa2', 'subset_test_Tavg']: + # test method to return a RequestDataset rd = self.test_data.get_rd(key) geoms = [[-74.0, 40.0, -72.0, 42.0], [-74.0, 38.0, -72.0, 40.0]] for geom in geoms: try: - ## this will raise the exception from the 38/40 bounding box - OcgOperations(dataset=rd,output_format='shp',geom=geom, - prefix=str(geom[1])+'_'+key,allow_empty=False).execute() + # # this will raise the exception from the 38/40 bounding box + OcgOperations(dataset=rd, output_format='shp', geom=geom, + prefix=str(geom[1]) + '_' + key, allow_empty=False).execute() except MaskedDataError: if geom[1] == 38.0: - ## note all returned data is masked! - ret = OcgOperations(dataset=rd,output_format='numpy',geom=geom, - prefix=str(geom[1])+'_'+key,allow_empty=True).execute() + # note all returned data is masked! + ret = OcgOperations(dataset=rd, output_format=constants.OUTPUT_FORMAT_NUMPY, geom=geom, + prefix=str(geom[1]) + '_' + key, allow_empty=True).execute() self.assertTrue(ret[1][rd.alias].variables[rd.alias].value.mask.all()) else: raise class TestCnrmCerfacs(TestBase): - @property def rd(self): return self.test_data.get_rd('rotated_pole_cnrm_cerfacs') @@ -59,7 +58,8 @@ def rd(self): def test_subset(self): """Test data may be subsetted and that coordinate transformations return the same value arrays.""" - ops = OcgOperations(dataset=self.rd, output_format='numpy', snippet=True, geom='world_countries', select_ugid=[69]) + ops = OcgOperations(dataset=self.rd, output_format='numpy', snippet=True, geom='world_countries', + select_ugid=[69]) ret = ops.execute() # assert some of the geometry values are masked @@ -67,8 +67,8 @@ def test_subset(self): # perform the operations but change the output coordinate system. the value arrays should be equivalent # regardless of coordinate transformation - ops2 = OcgOperations(dataset=self.rd, output_format='numpy', snippet=True, geom='world_countries', select_ugid=[69], - output_crs=CFWGS84()) + ops2 = OcgOperations(dataset=self.rd, output_format='numpy', snippet=True, geom='world_countries', + select_ugid=[69], output_crs=CFWGS84()) ret2 = ops2.execute() # value arrays should be the same @@ -80,8 +80,8 @@ def test_subset_shp(self): """Test conversion to shapefile.""" for ii, output_crs in enumerate([None, CFWGS84()]): - ops = OcgOperations(dataset=self.rd, output_format='shp', snippet=True, geom='world_countries', - select_ugid=[69], output_crs=output_crs, prefix=str(ii)) + ops = OcgOperations(dataset=self.rd, output_format=constants.OUTPUT_FORMAT_SHAPEFILE, snippet=True, + geom='world_countries', select_ugid=[69], output_crs=output_crs, prefix=str(ii)) ret = ops.execute() with fiona.open(ret) as source: @@ -91,108 +91,105 @@ def test_subset_shp(self): class Test(TestBase): - def test_cccma_rotated_pole(self): - ## with rotated pole, the uid mask was not being updated correctly following - ## a transformation back to rotated pole. this needed to be updated explicitly - ## in subset.py + # with rotated pole, the uid mask was not being updated correctly following a transformation back to rotated + # pole. this needed to be updated explicitly in subset.py rd = self.test_data.get_rd('rotated_pole_cccma') geom = (5.87161922454834, 47.26985931396479, 15.03811264038086, 55.05652618408209) - ops = ocgis.OcgOperations(dataset=rd,output_format='shp',geom=geom, - select_ugid=[1],snippet=True) + ops = ocgis.OcgOperations(dataset=rd, output_format='shp', geom=geom, + select_ugid=[1], snippet=True) ret = ops.execute() with fiona.open(ret) as source: - self.assertEqual(len(source),228) + self.assertEqual(len(source), 228) gid = [row['properties']['GID'] for row in source] for element in gid: self.assertTrue(element > 4000) - + def test_ichec_rotated_pole(self): - ## this point is far outside the domain + # this point is far outside the domain ocgis.env.OVERWRITE = True rd = self.test_data.get_rd('rotated_pole_ichec') - for geom in [[-100.,45.],[-100,45,-99,46]]: - ops = ocgis.OcgOperations(dataset=rd,output_format='nc', - calc=[{'func':'mean','name':'mean'}], - calc_grouping=['month'], - geom=geom) + for geom in [[-100., 45.], [-100, 45, -99, 46]]: + ops = ocgis.OcgOperations(dataset=rd, output_format='nc', calc=[{'func': 'mean', 'name': 'mean'}], + calc_grouping=['month'], geom=geom) with self.assertRaises(ExtentError): ops.execute() - + def test_narccap_cancm4_point_subset_no_abstraction(self): rd = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('narccap_tas_rcm3_gfdl') rd.alias = 'tas_narccap' - rds = [rd,rd2] - geom = [-105.2751,39.9782] - ops = ocgis.OcgOperations(dataset=rds,geom=geom,output_format='csv+', - prefix='ncar_point',add_auxiliary_files=True,output_crs=ocgis.crs.CFWGS84(), + rds = [rd, rd2] + geom = [-105.2751, 39.9782] + ops = ocgis.OcgOperations(dataset=rds, geom=geom, output_format=constants.OUTPUT_FORMAT_CSV_SHAPEFILE, + prefix='ncar_point', add_auxiliary_files=True, output_crs=ocgis.crs.CFWGS84(), snippet=True) with self.assertRaises(ValueError): ops.execute() - + def test_narccap_cancm4_point_subset_with_abstraction(self): rd = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('narccap_tas_rcm3_gfdl') rd2.alias = 'tas_narccap' rds = [ - rd, - rd2 - ] - geom = [-105.2751,39.9782] - ops = ocgis.OcgOperations(dataset=rds,geom=geom,output_format='numpy', - prefix='ncar_point',add_auxiliary_files=True,output_crs=ocgis.crs.CFWGS84(), - snippet=True,abstraction='point') + rd, + rd2 + ] + geom = [-105.2751, 39.9782] + ops = ocgis.OcgOperations(dataset=rds, geom=geom, output_format='numpy', + prefix='ncar_point', add_auxiliary_files=True, output_crs=ocgis.crs.CFWGS84(), + snippet=True, abstraction='point') ret = ops.execute() - - ## ensure array is trimmed and masked tgeometries removed - self.assertEqual(ret[2]['tas_narccap'].spatial.shape,(4,4)) - ## only two geometries returned - self.assertEqual(ret[1]['tas'].spatial.shape,(4,4)) - ## different buffer radii should have unique identifiers - self.assertEqual(ret.keys(),[1,2]) - ## the first buffer radius is larger + + # # ensure array is trimmed and masked tgeometries removed + self.assertEqual(ret[2]['tas_narccap'].spatial.shape, (4, 4)) + # # only two geometries returned + self.assertEqual(ret[1]['tas'].spatial.shape, (4, 4)) + # # different buffer radii should have unique identifiers + self.assertEqual(ret.keys(), [1, 2]) + # # the first buffer radius is larger self.assertTrue(ret.geoms[1].area > ret.geoms[2].area) - + def test_narccap_cancm4_point_subset_with_abstraction_to_csv_shp(self): rd = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('narccap_tas_rcm3_gfdl') rd.alias = 'tas_narccap' rds = [ - rd, - rd2 - ] - geom = [-105.2751,39.9782] - ops = ocgis.OcgOperations(dataset=rds,geom=geom,output_format='csv+', - prefix='ncar_point',add_auxiliary_files=True,output_crs=ocgis.crs.CFWGS84(), - snippet=True,abstraction='point') + rd, + rd2 + ] + geom = [-105.2751, 39.9782] + ops = ocgis.OcgOperations(dataset=rds, geom=geom, output_format=constants.OUTPUT_FORMAT_CSV_SHAPEFILE, + prefix='ncar_point', add_auxiliary_files=True, output_crs=ocgis.crs.CFWGS84(), + snippet=True, abstraction='point') ret = ops.execute() - ugid_shp_path = os.path.join(os.path.split(ret)[0],'shp',ops.prefix+'_ugid.shp') + ugid_shp_path = os.path.join(os.path.split(ret)[0], 'shp', ops.prefix + '_ugid.shp') with fiona.open(ugid_shp_path) as ds: rows = list(ds) - self.assertEqual(set([row['properties']['UGID'] for row in rows]),set([1,2])) - + self.assertEqual(set([row['properties']['UGID'] for row in rows]), set([1, 2])) + def test_collection_field_geometries_equivalent(self): - rd = self.test_data.get_rd('cancm4_tas',kwds=dict(time_region={'month':[6,7,8]})) - geom = ['state_boundaries',[{'properties':{'UGID':16},'geom':Point([-99.80780059778753,41.52315831343389])}]] - for vw,g in itertools.product([True,False],geom): - ops = ocgis.OcgOperations(dataset=rd,select_ugid=[16,32],geom=g, - aggregate=True,vector_wrap=vw,spatial_operation='clip') + rd = self.test_data.get_rd('cancm4_tas', kwds=dict(time_region={'month': [6, 7, 8]})) + geom = ['state_boundaries', + [{'properties': {'UGID': 16}, 'geom': Point([-99.80780059778753, 41.52315831343389])}]] + for vw, g in itertools.product([True, False], geom): + ops = ocgis.OcgOperations(dataset=rd, select_ugid=[16, 32], geom=g, + aggregate=True, vector_wrap=vw, spatial_operation='clip') coll = ops.execute() coll_geom = coll.geoms[16] - field_geom = coll[16]['tas'].spatial.geom.polygon.value[0,0] - self.assertTrue(coll_geom.bounds,field_geom.bounds) - self.assertTrue(coll_geom.area,field_geom.area) - + field_geom = coll[16]['tas'].spatial.geom.polygon.value[0, 0] + self.assertTrue(coll_geom.bounds, field_geom.bounds) + self.assertTrue(coll_geom.area, field_geom.area) + def test_empty_subset_multi_geometry_wrapping(self): - ## adjacent state boundaries were causing an error with wrapping where - ## a reference to the source field was being updated. + # # adjacent state boundaries were causing an error with wrapping where + # # a reference to the source field was being updated. rd = self.test_data.get_rd('cancm4_tas') - ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[5,6,7]) + ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[5, 6, 7]) ret = ops.execute() - self.assertEqual(set(ret.keys()),set([5,6,7])) - + self.assertEqual(set(ret.keys()), set([5, 6, 7])) + def test_seasonal_calc(self): """Test some calculations using a seasonal grouping.""" @@ -214,7 +211,7 @@ def test_seasonal_calc(self): self.assertEqual(ret[23]['tas'].variables['my_std'].value.shape, (1, 4, 1, 4, 3)) temporal = ret[23]['tas'].temporal numtime = temporal.value_numtime - numtime_actual = np.array([56993., 56718., 56809., 56901.]) + numtime_actual = np.array([56993., 56718., 56809., 56901.]) self.assertNumpyAll(numtime, numtime_actual) calc = [{'func': 'mean', 'name': 'my_mean'}, {'func': 'std', 'name': 'my_std'}] @@ -229,21 +226,22 @@ def test_seasonal_calc(self): bounds_numtime_actual = np.array([[55152.0, 58804.0], [55183.0, 58529.0]]) self.assertNumpyAll(bounds_numtime, bounds_numtime_actual) - def test_seasonal_calc_dkp(self): + def test_seasonal_calc_dkp(self): key = 'dynamic_kernel_percentile_threshold' - calc = [{'func':key,'name':'dkp','kwds':{'operation':'lt','percentile':90,'width':5}}] - calc_grouping = [[3,4,5]] + calc = [{'func': key, 'name': 'dkp', 'kwds': {'operation': 'lt', 'percentile': 90, 'width': 5}}] + calc_grouping = [[3, 4, 5]] rd = self.test_data.get_rd('cancm4_tas') - ops = ocgis.OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, - calc_sample_size=False,geom='state_boundaries', + ops = ocgis.OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping, + calc_sample_size=False, geom='state_boundaries', select_ugid=[23]) ret = ops.execute() to_test = ret[23]['tas'].variables['dkp'].value - reference = np.ma.array(data=[[[[[0,0,838],[831,829,834],[831,830,834],[831,835,830]]]]], - mask=[[[[[True,True,False],[False,False,False],[False,False,False],[False,False,False]]]]], + reference = np.ma.array(data=[[[[[0, 0, 838], [831, 829, 834], [831, 830, 834], [831, 835, 830]]]]], + mask=[[[[[True, True, False], [False, False, False], [False, False, False], + [False, False, False]]]]], dtype=DynamicDailyKernelPercentileThreshold.dtype) - self.assertNumpyAll(to_test,reference) - + self.assertNumpyAll(to_test, reference) + def test_selecting_single_value(self): rd = self.test_data.get_rd('cancm4_tas') lat_index = 32 @@ -251,58 +249,58 @@ def test_selecting_single_value(self): with nc_scope(rd.uri) as ds: lat_value = ds.variables['lat'][lat_index] lon_value = ds.variables['lon'][lon_index] - data_values = ds.variables['tas'][:,lat_index,lon_index] - - ops = ocgis.OcgOperations(dataset=rd,geom=[lon_value,lat_value],search_radius_mult=0.1) + data_values = ds.variables['tas'][:, lat_index, lon_index] + + ops = ocgis.OcgOperations(dataset=rd, geom=[lon_value, lat_value], search_radius_mult=0.1) ret = ops.execute() values = np.squeeze(ret[1]['tas'].variables['tas'].value) - self.assertNumpyAll(data_values,values.data) + self.assertNumpyAll(data_values, values.data) self.assertFalse(np.any(values.mask)) - - geom = Point(lon_value,lat_value).buffer(0.001) - ops = ocgis.OcgOperations(dataset=rd,geom=geom) + + geom = Point(lon_value, lat_value).buffer(0.001) + ops = ocgis.OcgOperations(dataset=rd, geom=geom) ret = ops.execute() values = np.squeeze(ret[1]['tas'].variables['tas'].value) - self.assertNumpyAll(data_values,values.data) + self.assertNumpyAll(data_values, values.data) self.assertFalse(np.any(values.mask)) - - geom = Point(lon_value-360.,lat_value).buffer(0.001) - ops = ocgis.OcgOperations(dataset=rd,geom=geom) + + geom = Point(lon_value - 360., lat_value).buffer(0.001) + ops = ocgis.OcgOperations(dataset=rd, geom=geom) ret = ops.execute() values = np.squeeze(ret[1]['tas'].variables['tas'].value) - self.assertNumpyAll(data_values,values.data) + self.assertNumpyAll(data_values, values.data) self.assertFalse(np.any(values.mask)) - - geom = Point(lon_value-360.,lat_value).buffer(0.001) - ops = ocgis.OcgOperations(dataset=rd,geom=geom,aggregate=True,spatial_operation='clip') + + geom = Point(lon_value - 360., lat_value).buffer(0.001) + ops = ocgis.OcgOperations(dataset=rd, geom=geom, aggregate=True, spatial_operation='clip') ret = ops.execute() values = np.squeeze(ret[1]['tas'].variables['tas'].value) - self.assertNumpyAll(data_values,values.data) + self.assertNumpyAll(data_values, values.data) self.assertFalse(np.any(values.mask)) - - ops = ocgis.OcgOperations(dataset=rd,geom=[lon_value,lat_value], - search_radius_mult=0.1,output_format='nc') + + ops = ocgis.OcgOperations(dataset=rd, geom=[lon_value, lat_value], + search_radius_mult=0.1, output_format='nc') ret = ops.execute() with nc_scope(ret) as ds: values = np.squeeze(ds.variables['tas'][:]) - self.assertNumpyAll(data_values,values) - + self.assertNumpyAll(data_values, values) + @attr('slow') def test_value_conversion(self): - ## confirm value data types are properly converted + # # confirm value data types are properly converted ocgis.env.DIR_DATA = ocgis.env.DIR_TEST_DATA rd_maurer = ocgis.RequestDataset('Maurer02new_OBS_tasmax_daily.1971-2000.nc', - 'tasmax', - alias='maurer_tasmax') + 'tasmax', + alias='maurer_tasmax') - ops = ocgis.OcgOperations(dataset=rd_maurer,output_format='shp',snippet=True, - output_crs=ocgis.crs.WGS84(),geom='state_boundaries', + ops = ocgis.OcgOperations(dataset=rd_maurer, output_format='shp', snippet=True, + output_crs=ocgis.crs.WGS84(), geom='state_boundaries', select_ugid=[25]) ops.execute() - + def test_qed_multifile(self): """Test concatenating three single time slice climatological files.""" - + key = ['qed_2013_maurer02v2_median_txxmmedm_january_1971-2000', 'qed_2013_maurer02v2_median_txxmmedm_february_1971-2000', 'qed_2013_maurer02v2_median_txxmmedm_march_1971-2000'] @@ -334,11 +332,11 @@ def test_maurer_concatenated_shp(self): with fiona.open(ret) as f: variables = set([row['properties']['VARIABLE'] for row in f]) self.assertEqual(variables, set([u'pr', u'tasmax', u'tasmin', u'tas'])) - + def test_point_shapefile_subset(self): """Test subsetting using a point shapefile.""" - _output_format = ['numpy', 'nc', 'csv', 'csv+'] + _output_format = ['numpy', 'nc', 'csv', constants.OUTPUT_FORMAT_CSV_SHAPEFILE] for output_format in _output_format: rd = self.test_data.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd, geom='qed_city_centroids', output_format=output_format, @@ -346,7 +344,7 @@ def test_point_shapefile_subset(self): ret = ops.execute() if output_format == 'numpy': self.assertEqual(len(ret), 4) - + @attr('slow') def test_maurer_concatenated_tasmax_region(self): rd = self.test_data.get_rd('maurer_2010_concatenated_tasmax') @@ -363,7 +361,7 @@ def test_maurer_concatenated_tasmax_region(self): time_values = ref.temporal.value[select] kwds = { - 'time_region': {'month': [6, 7, 8], 'year': [1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999]}} + 'time_region': {'month': [6, 7, 8], 'year': [1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999]}} rd = self.test_data.get_rd('maurer_2010_concatenated_tasmax', kwds=kwds) ops = ocgis.OcgOperations(dataset=rd, geom='us_counties', select_ugid=[2778], output_format='numpy') @@ -374,135 +372,139 @@ def test_maurer_concatenated_tasmax_region(self): self.assertEqual(time_subset.shape, ref2.variables['tasmax'].value.shape) self.assertNumpyAll(time_subset, ref2.variables['tasmax'].value) self.assertFalse(np.any(ref2.variables['tasmax'].value < 0)) - + def test_time_region_subset(self): - - _month = [[6,7],[12],None,[1,3,8]] - _year = [[2011],None,[2012],[2011,2013]] - - def run_test(month,year): - rd = self.test_data.get_rd('cancm4_rhs',kwds={'time_region':{'month':month,'year':year}}) - - ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries', + + _month = [[6, 7], [12], None, [1, 3, 8]] + _year = [[2011], None, [2012], [2011, 2013]] + + def run_test(month, year): + rd = self.test_data.get_rd('cancm4_rhs', kwds={'time_region': {'month': month, 'year': year}}) + + ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[25]) ret = ops.execute() - + ret = ret[25]['rhs'].temporal.value_datetime - + years = [dt.year for dt in ret.flat] months = [dt.month for dt in ret.flat] - + if year is not None: - self.assertEqual(set(years),set(year)) + self.assertEqual(set(years), set(year)) if month is not None: - self.assertEqual(set(months),set(month)) - - for month,year in itertools.product(_month,_year): - run_test(month,year) - + self.assertEqual(set(months), set(month)) + + for month, year in itertools.product(_month, _year): + run_test(month, year) + def test_time_range_time_region_subset(self): - time_range = [dt(2013,1,1),dt(2015,12,31)] - time_region = {'month':[6,7,8],'year':[2013,2014]} - kwds = {'time_range':time_range,'time_region':time_region} - rd = self.test_data.get_rd('cancm4_rhs',kwds=kwds) - ops = ocgis.OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[25]) + time_range = [dt(2013, 1, 1), dt(2015, 12, 31)] + time_region = {'month': [6, 7, 8], 'year': [2013, 2014]} + kwds = {'time_range': time_range, 'time_region': time_region} + rd = self.test_data.get_rd('cancm4_rhs', kwds=kwds) + ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[25]) ret = ops.execute() ref = ret[25]['rhs'] years = set([obj.year for obj in ref.temporal.value_datetime]) self.assertFalse(2015 in years) - + def test_time_range_time_region_do_not_overlap(self): - time_range = [dt(2013,1,1),dt(2015,12,31)] - time_region = {'month':[6,7,8],'year':[2013,2014,2018]} - kwds = {'time_range':time_range,'time_region':time_region} + time_range = [dt(2013, 1, 1), dt(2015, 12, 31)] + time_region = {'month': [6, 7, 8], 'year': [2013, 2014, 2018]} + kwds = {'time_range': time_range, 'time_region': time_region} with self.assertRaises(RequestValidationError): - self.test_data.get_rd('cancm4_rhs',kwds=kwds) + self.test_data.get_rd('cancm4_rhs', kwds=kwds) @attr('slow') def test_maurer_2010(self): - ## inspect the multi-file maurer datasets - keys = ['maurer_2010_pr','maurer_2010_tas','maurer_2010_tasmin','maurer_2010_tasmax'] - calc = [{'func':'mean','name':'mean'},{'func':'median','name':'median'}] + # # inspect the multi-file maurer datasets + keys = ['maurer_2010_pr', 'maurer_2010_tas', 'maurer_2010_tasmin', 'maurer_2010_tasmax'] + calc = [{'func': 'mean', 'name': 'mean'}, {'func': 'median', 'name': 'median'}] calc_grouping = ['month'] for key in keys: rd = self.test_data.get_rd(key) - + dct = rd.inspect_as_dct() - self.assertEqual(dct['derived']['Count'],'102564') - - ops = ocgis.OcgOperations(dataset=rd,snippet=True,select_ugid=[10,15], - output_format='numpy',geom='state_boundaries') + self.assertEqual(dct['derived']['Count'], '102564') + + ops = ocgis.OcgOperations(dataset=rd, snippet=True, select_ugid=[10, 15], + output_format='numpy', geom='state_boundaries') ret = ops.execute() - self.assertTrue(ret.gvu(10,rd.variable).sum() != 0) - self.assertTrue(ret.gvu(15,rd.variable).sum() != 0) - - ops = ocgis.OcgOperations(dataset=rd,snippet=False,select_ugid=[10,15], - output_format='numpy',geom='state_boundaries',calc=calc, - calc_grouping=calc_grouping) + self.assertTrue(ret.gvu(10, rd.variable).sum() != 0) + self.assertTrue(ret.gvu(15, rd.variable).sum() != 0) + + ops = ocgis.OcgOperations(dataset=rd, snippet=False, select_ugid=[10, 15], + output_format='numpy', geom='state_boundaries', calc=calc, + calc_grouping=calc_grouping) ret = ops.execute() - for calc_name in ['mean','median']: - self.assertEqual(ret[10][rd.alias].variables[calc_name].value.shape[1],12) - - ops = ocgis.OcgOperations(dataset=rd,snippet=False,select_ugid=[10,15], - output_format='csv+',geom='state_boundaries',calc=calc, - calc_grouping=calc_grouping,prefix=key) + for calc_name in ['mean', 'median']: + self.assertEqual(ret[10][rd.alias].variables[calc_name].value.shape[1], 12) + + ops = ocgis.OcgOperations(dataset=rd, snippet=False, select_ugid=[10, 15], + output_format=constants.OUTPUT_FORMAT_CSV_SHAPEFILE, geom='state_boundaries', + calc=calc, + calc_grouping=calc_grouping, prefix=key) ret = ops.execute() - + def test_clip_aggregate(self): - ## this geometry was hanging - rd = self.test_data.get_rd('cancm4_tas',kwds={'time_region':{'year':[2003]}}) - ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[14,16], - aggregate=False,spatial_operation='clip',output_format='csv+') + # # this geometry was hanging + rd = self.test_data.get_rd('cancm4_tas', kwds={'time_region': {'year': [2003]}}) + ops = OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[14, 16], + aggregate=False, spatial_operation='clip', + output_format=constants.OUTPUT_FORMAT_CSV_SHAPEFILE) ret = ops.execute() - + @attr('slow') def test_narccap_point_subset_small(self): rd = self.test_data.get_rd('narccap_pr_wrfg_ncep') - geom = [-97.74278,30.26694] -# ocgis.env.VERBOSE = True -# ocgis.env.DEBUG = True - - calc = [{'func':'mean','name':'mean'}, - {'func':'median','name':'median'}, - {'func':'max','name':'max'}, - {'func':'min','name':'min'}] - calc_grouping = ['month','year'] - ops = ocgis.OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, - output_format='numpy',geom=geom,abstraction='point', - snippet=False,allow_empty=False,output_crs=CFWGS84()) + geom = [-97.74278, 30.26694] + # ocgis.env.VERBOSE = True + # ocgis.env.DEBUG = True + + calc = [{'func': 'mean', 'name': 'mean'}, + {'func': 'median', 'name': 'median'}, + {'func': 'max', 'name': 'max'}, + {'func': 'min', 'name': 'min'}] + calc_grouping = ['month', 'year'] + ops = ocgis.OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping, + output_format='numpy', geom=geom, abstraction='point', + snippet=False, allow_empty=False, output_crs=CFWGS84()) ret = ops.execute() ref = ret[1]['pr'] - self.assertEqual(set(ref.variables.keys()),set(['mean', 'median', 'max', 'min'])) - + self.assertEqual(set(ref.variables.keys()), set(['mean', 'median', 'max', 'min'])) + def test_bad_time_dimension(self): """Test not formatting the time dimension.""" - for output_format in ['numpy', 'csv', 'csv+', 'shp', 'nc']: + for output_format in [constants.OUTPUT_FORMAT_NUMPY, constants.OUTPUT_FORMAT_CSV, + constants.OUTPUT_FORMAT_CSV_SHAPEFILE, constants.OUTPUT_FORMAT_SHAPEFILE, + constants.OUTPUT_FORMAT_NETCDF]: dataset = self.test_data.get_rd('snippet_seasonalbias') ops = OcgOperations(dataset=dataset, output_format=output_format, format_time=False, prefix=output_format) ret = ops.execute() - if output_format == 'numpy': + if output_format == constants.OUTPUT_FORMAT_NUMPY: self.assertNumpyAll(ret[1]['bias'].temporal.value, np.array([-712208.5, -712117., -712025., -711933.5])) self.assertNumpyAll(ret[1]['bias'].temporal.bounds, np.array([[-712254., -712163.], [-712163., -712071.], [-712071., -711979.], [-711979., -711888.]])) - if output_format == 'csv': + if output_format == constants.OUTPUT_FORMAT_CSV: with open(ret) as f: reader = DictReader(f) for row in reader: self.assertTrue(all([row[k] == '' for k in ['YEAR', 'MONTH', 'DAY']])) self.assertTrue(float(row['TIME']) < -50000) - if output_format == 'nc': + if output_format == constants.OUTPUT_FORMAT_NETCDF: self.assertNcEqual(ret, dataset.uri, check_types=False, ignore_attributes={'global': ['history'], 'bounds_time': ['calendar', 'units'], 'bias': ['_FillValue', 'grid_mapping', 'units']}, ignore_variables=['latitude_longitude']) - + def test_time_region_climatology(self): """Test for reading metadata from QED 2013 climate data files.""" @@ -533,15 +535,15 @@ def test_time_region_climatology(self): ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[16]) ret = ops.execute() ref = ret[16]['climatology_Tas_annual_max_of_annual_means'] - + def test_mfdataset_to_nc(self): rd = self.test_data.get_rd('maurer_2010_pr') - ops = OcgOperations(dataset=rd,output_format='nc',calc=[{'func':'mean','name':'my_mean'}], - calc_grouping=['year'],geom='state_boundaries',select_ugid=[23]) + ops = OcgOperations(dataset=rd, output_format='nc', calc=[{'func': 'mean', 'name': 'my_mean'}], + calc_grouping=['year'], geom='state_boundaries', select_ugid=[23]) ret = ops.execute() - field = RequestDataset(ret,'my_mean').get() - self.assertNumpyAll(field.temporal.value,np.array([ 18444., 18809.])) + field = RequestDataset(ret, 'my_mean').get() + self.assertNumpyAll(field.temporal.value, np.array([18444., 18809.])) + - if __name__ == '__main__': unittest.main() diff --git a/src/ocgis/test/test_simple/test_simple.py b/src/ocgis/test/test_simple/test_simple.py index 626e0a4d4..87c182bd4 100644 --- a/src/ocgis/test/test_simple/test_simple.py +++ b/src/ocgis/test/test_simple/test_simple.py @@ -1,5 +1,4 @@ import re -import unittest import itertools import os.path from abc import ABCMeta, abstractproperty @@ -42,33 +41,36 @@ class TestSimpleBase(TestBase): __metaclass__ = ABCMeta - + base_value = None return_shp = False var = 'foo' - + @abstractproperty - def nc_factory(self): pass + def nc_factory(self): + pass + @abstractproperty - def fn(self): pass - + def fn(self): + pass + def setUp(self): TestBase.setUp(self) self.nc_factory().write() - - def get_dataset(self,time_range=None,level_range=None,time_region=None): - uri = os.path.join(env.DIR_OUTPUT,self.fn) - return({'uri':uri,'variable':self.var, - 'time_range':time_range,'level_range':level_range, - 'time_region':time_region}) - - def get_ops(self,kwds={},time_range=None,level_range=None): - dataset = self.get_dataset(time_range,level_range) + + def get_dataset(self, time_range=None, level_range=None, time_region=None): + uri = os.path.join(env.DIR_OUTPUT, self.fn) + return ({'uri': uri, 'variable': self.var, + 'time_range': time_range, 'level_range': level_range, + 'time_region': time_region}) + + def get_ops(self, kwds={}, time_range=None, level_range=None): + dataset = self.get_dataset(time_range, level_range) if 'output_format' not in kwds: - kwds.update({'output_format':'numpy'}) - kwds.update({'dataset':dataset}) + kwds.update({'output_format': 'numpy'}) + kwds.update({'dataset': dataset}) ops = OcgOperations(**kwds) - return(ops) + return (ops) def get_ret(self, ops=None, kwds={}, shp=False, time_range=None, level_range=None): """ @@ -94,27 +96,27 @@ def get_ret(self, ops=None, kwds={}, shp=False, time_range=None, level_range=Non OcgInterpreter(ops2).execute() return ret - + def make_shp(self): ops = OcgOperations(dataset=self.dataset, output_format='shp') OcgInterpreter(ops).execute() - - + + class TestSimpleNoLevel(TestSimpleBase): - base_value = np.array([[1.0,1.0,2.0,2.0], - [1.0,1.0,2.0,2.0], - [3.0,3.0,4.0,4.0], - [3.0,3.0,4.0,4.0]]) + base_value = np.array([[1.0, 1.0, 2.0, 2.0], + [1.0, 1.0, 2.0, 2.0], + [3.0, 3.0, 4.0, 4.0], + [3.0, 3.0, 4.0, 4.0]]) nc_factory = SimpleNcNoLevel fn = 'test_simple_spatial_no_level_01.nc' - + def test_nc_write_no_level(self): ret = self.get_ret(kwds={'output_format': 'nc'}) ret2 = self.get_ret(kwds={'output_format': 'nc', 'dataset': {'uri': ret, 'variable': 'foo'}, 'prefix': 'level_again'}) self.assertNcEqual(ret, ret2, ignore_attributes={'global': ['history']}) - + ds = nc.Dataset(ret) try: self.assertTrue('level' not in ds.dimensions) @@ -134,7 +136,7 @@ class TestSimple(TestSimpleBase): def test_meta_attrs_applied(self): """Test overloaded metadata attributes are applied to output calculation.""" - calc = [{'func': 'mean', 'name':'mean', 'meta_attrs': {'this_is': 'something new', 'a_number': 5}}] + calc = [{'func': 'mean', 'name': 'mean', 'meta_attrs': {'this_is': 'something new', 'a_number': 5}}] calc_grouping = ['month'] ret = self.get_ret(kwds={'calc': calc, 'calc_grouping': calc_grouping, 'output_format': 'nc'}) with nc_scope(ret) as ds: @@ -158,18 +160,13 @@ def test_selection_geometry_crs_differs(self): dataset = self.get_dataset() rd = RequestDataset(**dataset) - # field = rd.get() - # coll = SpatialCollection() - # coll.add_field(1, None, field) - # conv = ShpConverter([coll], '/tmp', 'out', overwrite=True) - # conv.write() - ugeom = 'POLYGON((-104.000538 39.004301,-102.833871 39.215054,-102.833871 39.215054,-102.833871 39.215054,-102.879032 37.882796,-104.136022 37.867742,-104.000538 39.004301))' ugeom = wkt.loads(ugeom) from_sr = SpatialReference() from_sr.ImportFromEPSG(4326) to_sr = SpatialReference() - to_sr.ImportFromProj4('+proj=aea +lat_1=20 +lat_2=60 +lat_0=40 +lon_0=-96 +x_0=0 +y_0=0 +ellps=GRS80 +datum=NAD83 +units=m +no_defs') + to_sr.ImportFromProj4( + '+proj=aea +lat_1=20 +lat_2=60 +lat_0=40 +lon_0=-96 +x_0=0 +y_0=0 +ellps=GRS80 +datum=NAD83 +units=m +no_defs') ugeom = project_shapely_geometry(ugeom, from_sr, to_sr) crs = from_string(to_sr.ExportToProj4()) @@ -180,7 +177,8 @@ def test_selection_geometry_crs_differs(self): ret = ops.execute() to_test = ret[1]['foo'].variables['foo'].value[:, 0, 0, :, :] - actual = np.loads('\x80\x02cnumpy.ma.core\n_mareconstruct\nq\x01(cnumpy.ma.core\nMaskedArray\nq\x02cnumpy\nndarray\nq\x03K\x00\x85q\x04U\x01btRq\x05(K\x01K\x01K\x02K\x02\x87cnumpy\ndtype\nq\x06U\x02f8K\x00K\x01\x87Rq\x07(K\x03U\x01 180)) - - ret = self.get_ret(kwds={'vector_wrap':True}) + + ret = self.get_ret(kwds={'vector_wrap': True}) longs_wrap = _get_longs_(ret[1][self.var].spatial.abstraction_geometry.value) self.assertTrue(np.all(np.array(longs_wrap) < 180)) - - self.assertTrue(np.all(longs_unwrap-360 == longs_wrap)) - + + self.assertTrue(np.all(longs_unwrap - 360 == longs_wrap)) + def test_spatial_touch_only(self): - geom = [make_poly((38.2,39.3),(-93,-92))] - geom.append(make_poly((38,39),(-93.1,-92.1))) - - for abstraction,g in itertools.product(['polygon','point'],geom): + geom = [make_poly((38.2, 39.3), (-93, -92)), make_poly((38, 39), (-93.1, -92.1))] + + for abstraction, g in itertools.product(['polygon', 'point'], geom): try: - ops = self.get_ops(kwds={'geom':g,'abstraction':abstraction}) + ops = self.get_ops(kwds={'geom': g, 'abstraction': abstraction}) ret = ops.execute() - self.assertEqual(len(ret[1][self.var].spatial.uid.compressed()),4) - self.get_ret(kwds={'vector_wrap':False}) - ret = self.get_ret(kwds={'geom':g,'vector_wrap':False,'abstraction':abstraction}) - self.assertEqual(len(ret[1][self.var].spatial.uid.compressed()),4) + self.assertEqual(len(ret[1][self.var].spatial.uid.compressed()), 4) + self.get_ret(kwds={'vector_wrap': False}) + ret = self.get_ret(kwds={'geom': g, 'vector_wrap': False, 'abstraction': abstraction}) + self.assertEqual(len(ret[1][self.var].spatial.uid.compressed()), 4) except ExtentError: if abstraction == 'point': pass else: - # rd = ops.dataset.first() - # field = rd.get() - # field.spatial.write_fiona('/tmp/touch.shp') - # write_geom_dict({1:g},path='/tmp/should_touch.shp') raise - + def test_spatial(self): - geom = make_poly((38.1,39.1),(-93.1,-92.1)) - - for abstraction in ['polygon','point']: + geom = make_poly((38.1, 39.1), (-93.1, -92.1)) + + for abstraction in ['polygon', 'point']: n = 1 if abstraction == 'point' else 4 - ops = self.get_ops(kwds={'geom':geom,'abstraction':abstraction}) + ops = self.get_ops(kwds={'geom': geom, 'abstraction': abstraction}) ret = ops.execute() - self.assertEqual(len(ret[1][self.var].spatial.uid.compressed()),n) - self.get_ret(kwds={'vector_wrap':False}) - ret = self.get_ret(kwds={'geom':geom,'vector_wrap':False,'abstraction':abstraction}) - self.assertEqual(len(ret[1][self.var].spatial.uid.compressed()),n) + self.assertEqual(len(ret[1][self.var].spatial.uid.compressed()), n) + self.get_ret(kwds={'vector_wrap': False}) + ret = self.get_ret(kwds={'geom': geom, 'vector_wrap': False, 'abstraction': abstraction}) + self.assertEqual(len(ret[1][self.var].spatial.uid.compressed()), n) class TestSimpleProjected(TestSimpleBase): - base_value = np.array([[1.0,1.0,2.0,2.0], - [1.0,1.0,2.0,2.0], - [3.0,3.0,4.0,4.0], - [3.0,3.0,4.0,4.0]]) + base_value = np.array([[1.0, 1.0, 2.0, 2.0], + [1.0, 1.0, 2.0, 2.0], + [3.0, 3.0, 4.0, 4.0], + [3.0, 3.0, 4.0, 4.0]]) nc_factory = SimpleNcProjection fn = 'test_simple_spatial_projected_01.nc' - + def test_differing_projection_no_output_crs(self): nc_normal = SimpleNc() nc_normal.write() - uri = os.path.join(self.current_dir_output,nc_normal.filename) - + uri = os.path.join(self.current_dir_output, nc_normal.filename) + rd_projected = self.get_dataset() rd_projected['alias'] = 'projected' - rd_normal = {'uri':uri,'variable':'foo','alias':'normal'} - dataset = [rd_projected,rd_normal] - - output_format = ['numpy','shp','nc','csv+'] + rd_normal = {'uri': uri, 'variable': 'foo', 'alias': 'normal'} + dataset = [rd_projected, rd_normal] + + output_format = [constants.OUTPUT_FORMAT_NUMPY, constants.OUTPUT_FORMAT_SHAPEFILE, + constants.OUTPUT_FORMAT_NETCDF, constants.OUTPUT_FORMAT_CSV_SHAPEFILE] for o in output_format: try: - OcgOperations(dataset=dataset,output_format=o) + OcgOperations(dataset=dataset, output_format=o) except DefinitionValidationError: - if o != 'numpy': + if o != constants.OUTPUT_FORMAT_NUMPY: pass - - - def test_differing_projection_with_output_crs(self): + + + def test_differing_projection_with_output_crs(self): nc_normal = SimpleNc() nc_normal.write() - uri = os.path.join(self.current_dir_output,nc_normal.filename) - + uri = os.path.join(self.current_dir_output, nc_normal.filename) + rd_projected = self.get_dataset() rd_projected['alias'] = 'projected' - rd_normal = {'uri':uri,'variable':'foo','alias':'normal'} - dataset = [rd_projected,rd_normal] - - output_format = [ - 'numpy','shp','nc', - 'csv+' - ] - + rd_normal = {'uri': uri, 'variable': 'foo', 'alias': 'normal'} + dataset = [rd_projected, rd_normal] + + output_format = [constants.OUTPUT_FORMAT_NUMPY, constants.OUTPUT_FORMAT_SHAPEFILE, + constants.OUTPUT_FORMAT_NETCDF, constants.OUTPUT_FORMAT_CSV_SHAPEFILE] + for o in output_format: try: - ops = OcgOperations(dataset=dataset,output_format=o,output_crs=CFWGS84(), + ops = OcgOperations(dataset=dataset, output_format=o, output_crs=CFWGS84(), prefix=o) ret = ops.execute() - - if o == 'numpy': + + if o == constants.OUTPUT_FORMAT_NUMPY: uids = [] for field in ret[1].itervalues(): uids.append(field.uid) - self.assertIsInstance(field.spatial.crs,CFWGS84) - self.assertEqual(set(uids),set([1,2])) - if o == 'shp': + self.assertIsInstance(field.spatial.crs, CFWGS84) + self.assertEqual(set(uids), set([1, 2])) + if o == constants.OUTPUT_FORMAT_SHAPEFILE: with fiona.open(ret) as f: - self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']),CFWGS84()) + self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']), CFWGS84()) aliases = set([row['properties']['ALIAS'] for row in f]) - self.assertEqual(set(['projected','normal']),aliases) - if o == 'csv+': - with open(ret,'r') as f: + self.assertEqual(set(['projected', 'normal']), aliases) + if o == constants.OUTPUT_FORMAT_CSV_SHAPEFILE: + with open(ret, 'r') as f: reader = csv.DictReader(f) - collect = {'dids':[],'aliases':[]} + collect = {'dids': [], 'aliases': []} for row in reader: collect['dids'].append(int(row['DID'])) collect['aliases'].append(row['ALIAS']) - self.assertEqual(set(['projected','normal']),set(collect['aliases'])) - self.assertEqual(set([1,2]),set(collect['dids']),msg='did missing in csv file') - - gid_shp = os.path.join(ops.dir_output,ops.prefix,'shp',ops.prefix+'_gid.shp') + self.assertEqual(set(['projected', 'normal']), set(collect['aliases'])) + self.assertEqual(set([1, 2]), set(collect['dids']), msg='did missing in csv file') + + gid_shp = os.path.join(ops.dir_output, ops.prefix, 'shp', ops.prefix + '_gid.shp') with fiona.open(gid_shp) as f: dids = set([row['properties']['DID'] for row in f]) - self.assertEqual(dids,set([1,2]),msg='did missing in overview file') - + self.assertEqual(dids, set([1, 2]), msg='did missing in overview file') + except DefinitionValidationError: - if o == 'nc': + if o == constants.OUTPUT_FORMAT_NETCDF: pass else: raise - + def test_nc_projection(self): dataset = self.get_dataset() ret = self.get_ret(kwds={'output_format': 'nc'}) self.assertNcEqual(dataset['uri'], ret, ignore_attributes={'global': ['history'], 'time_bnds': ['calendar', 'units'], 'crs': ['proj4', 'units']}) - + def test_nc_projection_to_shp(self): - ret = self.get_ret(kwds={'output_format':'shp'}) + ret = self.get_ret(kwds={'output_format': constants.OUTPUT_FORMAT_SHAPEFILE}) with fiona.open(ret) as f: - self.assertEqual(f.meta['crs']['proj'],'lcc') - + self.assertEqual(f.meta['crs']['proj'], 'lcc') + def test_with_geometry(self): - self.get_ret(kwds={'output_format': 'shp', 'prefix': 'as_polygon'}) + self.get_ret(kwds={'output_format': constants.OUTPUT_FORMAT_SHAPEFILE, 'prefix': 'as_polygon'}) features = [ {'NAME': 'a', @@ -1402,7 +1539,7 @@ def test_with_geometry(self): fm.write(features) ocgis.env.DIR_SHPCABINET = self.current_dir_output - ops = OcgOperations(dataset=self.get_dataset(), output_format='shp', + ops = OcgOperations(dataset=self.get_dataset(), output_format=constants.OUTPUT_FORMAT_SHAPEFILE, geom='ab_polygon') ret = ops.execute() ugid_shp = os.path.join(os.path.split(ret)[0], ops.prefix + '_ugid.shp') @@ -1410,7 +1547,7 @@ def test_with_geometry(self): with fiona.open(ugid_shp) as f: self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']), from_crs) - ops = OcgOperations(dataset=self.get_dataset(), output_format='shp', + ops = OcgOperations(dataset=self.get_dataset(), output_format=constants.OUTPUT_FORMAT_SHAPEFILE, geom='ab_polygon', output_crs=CFWGS84(), prefix='xx') ret = ops.execute() ugid_shp = os.path.join(os.path.split(ret)[0], ops.prefix + '_ugid.shp') @@ -1419,8 +1556,3 @@ def test_with_geometry(self): self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']), WGS84()) with fiona.open(ret) as f: self.assertEqual(CoordinateReferenceSystem(value=f.meta['crs']), WGS84()) - - -if __name__ == "__main__": -# import sys;sys.argv = ['', 'TestSimple.test_time_level_subset'] - unittest.main() From 8abb89dd53b504c67ea4c738477f89576c5c5f34 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Tue, 30 Dec 2014 15:18:06 -0700 Subject: [PATCH 46/71] add iteration methods to variables Added iteration method to variable objects. --- src/ocgis/conv/base.py | 4 +- src/ocgis/conv/csv_.py | 84 +++++++++--------- src/ocgis/interface/base/field.py | 14 ++- src/ocgis/interface/base/variable.py | 85 ++++++++++++++----- .../test/test_ocgis/test_conv/test_base.py | 21 ++--- .../test/test_ocgis/test_conv/test_csv_.py | 5 ++ .../test/test_ocgis/test_conv/test_fiona_.py | 2 + .../test_interface/test_base/test_field.py | 16 +++- .../test_interface/test_base/test_variable.py | 83 +++++++++++++++++- 9 files changed, 236 insertions(+), 78 deletions(-) create mode 100644 src/ocgis/test/test_ocgis/test_conv/test_csv_.py diff --git a/src/ocgis/conv/base.py b/src/ocgis/conv/base.py index 9b1c1cda8..e66a6e811 100644 --- a/src/ocgis/conv/base.py +++ b/src/ocgis/conv/base.py @@ -273,13 +273,13 @@ def get_converter_map(cls): """ from ocgis.conv.fiona_ import ShpConverter, GeoJsonConverter - from ocgis.conv.csv_ import CsvConverter, CsvPlusConverter + from ocgis.conv.csv_ import CsvConverter, CsvShapefileConverter from ocgis.conv.numpy_ import NumpyConverter from ocgis.conv.nc import NcConverter mmap = {constants.OUTPUT_FORMAT_SHAPEFILE: ShpConverter, constants.OUTPUT_FORMAT_CSV: CsvConverter, - constants.OUTPUT_FORMAT_CSV_SHAPEFILE: CsvPlusConverter, + constants.OUTPUT_FORMAT_CSV_SHAPEFILE: CsvShapefileConverter, constants.OUTPUT_FORMAT_NUMPY: NumpyConverter, constants.OUTPUT_FORMAT_GEOJSON: GeoJsonConverter, # 'shpidx':ShpIdxConverter, diff --git a/src/ocgis/conv/csv_.py b/src/ocgis/conv/csv_.py index 53e277838..52956508b 100644 --- a/src/ocgis/conv/csv_.py +++ b/src/ocgis/conv/csv_.py @@ -17,43 +17,44 @@ class OcgDialect(excel): class CsvConverter(AbstractConverter): _ext = 'csv' - - def _build_(self,coll): + + def _build_(self, coll): headers = [h.upper() for h in coll.headers] - f = open(self.path,'w') - writer = csv.DictWriter(f,headers,dialect=OcgDialect) + f = open(self.path, 'w') + writer = csv.DictWriter(f, headers, dialect=OcgDialect) writer.writeheader() - ret = {'file_object':f,'csv_writer':writer} - return(ret) - - def _write_coll_(self,f,coll): + ret = {'file_object': f, 'csv_writer': writer} + return ret + + def _write_coll_(self, f, coll): writer = f['csv_writer'] - - for geom,row in coll.get_iter_dict(use_upper_keys=True): + + for geom, row in coll.get_iter_dict(use_upper_keys=True): writer.writerow(row) - def _finalize_(self,f): + def _finalize_(self, f): for fobj in f.itervalues(): try: fobj.close() except: pass -class CsvPlusConverter(CsvConverter): + +class CsvShapefileConverter(CsvConverter): _add_ugeom = True - - def __init__(self,*args,**kwargs): - CsvConverter.__init__(self,*args,**kwargs) + + def __init__(self, *args, **kwargs): + CsvConverter.__init__(self, *args, **kwargs) if self.ops is None: - raise(ValueError('The argument "ops" may not be "None".')) + raise ValueError('The argument "ops" may not be "None".') + + def _build_(self, coll): + ret = CsvConverter._build_(self, coll) - def _build_(self,coll): - ret = CsvConverter._build_(self,coll) - self._ugid_gid_store = {} - + if not self.ops.aggregate: - fiona_path = os.path.join(self._get_or_create_shp_folder_(),self.prefix+'_gid.shp') + fiona_path = os.path.join(self._get_or_create_shp_folder_(), self.prefix + '_gid.shp') archetype_field = coll._archetype_field try: @@ -64,34 +65,34 @@ def _build_(self,coll): else: raise - fiona_schema = {'geometry':archetype_field.spatial.abstraction_geometry._geom_type, - 'properties':OrderedDict([['DID','int'],['UGID','int'],['GID','int']])} - fiona_object = fiona.open(fiona_path,'w',driver='ESRI Shapefile',crs=fiona_crs,schema=fiona_schema) + fiona_schema = {'geometry': archetype_field.spatial.abstraction_geometry._geom_type, + 'properties': OrderedDict([['DID', 'int'], ['UGID', 'int'], ['GID', 'int']])} + fiona_object = fiona.open(fiona_path, 'w', driver='ESRI Shapefile', crs=fiona_crs, schema=fiona_schema) else: ocgis_lh('creating a UGID-GID shapefile is not necessary for aggregated data. use UGID shapefile.', 'conv.csv-shp', logging.WARN) fiona_object = None - - ret.update({'fiona_object':fiona_object}) - - return(ret) - - def _write_coll_(self,f,coll): + + ret.update({'fiona_object': fiona_object}) + + return ret + + def _write_coll_(self, f, coll): writer = f['csv_writer'] file_fiona = f['fiona_object'] rstore = self._ugid_gid_store is_aggregated = self.ops.aggregate - - for geom,row in coll.get_iter_dict(use_upper_keys=True): + + for geom, row in coll.get_iter_dict(use_upper_keys=True): writer.writerow(row) if not is_aggregated: - did,gid,ugid = row['DID'],row['GID'],row['UGID'] + did, gid, ugid = row['DID'], row['GID'], row['UGID'] try: if gid in rstore[did][ugid]: continue else: - raise(KeyError) + raise KeyError except KeyError: if did not in rstore: rstore[did] = {} @@ -99,22 +100,21 @@ def _write_coll_(self,f,coll): rstore[did][ugid] = [] if gid not in rstore[did][ugid]: rstore[did][ugid].append(gid) - - ## for multivariate calculation outputs the dataset identifier - ## is None. + + # for multivariate calculation outputs the dataset identifier is None. try: converted_did = int(did) except TypeError: converted_did = None - - feature = {'properties':{'GID':int(gid),'UGID':int(ugid),'DID':converted_did}, - 'geometry':mapping(geom)} + + feature = {'properties': {'GID': int(gid), 'UGID': int(ugid), 'DID': converted_did}, + 'geometry': mapping(geom)} try: file_fiona.write(feature) except ValueError as e: if feature['geometry']['type'] != file_fiona.meta['schema']['geometry']: msg = 'Spatial abstractions do not match. You may need to override "abstraction" and/or "s_abstraction"' - msg = '{0}. Original error message from Fiona is "ValueError({1})".'.format(msg,e.message) - raise(ValueError(msg)) + msg = '{0}. Original error message from Fiona is "ValueError({1})".'.format(msg, e.message) + raise ValueError(msg) else: raise diff --git a/src/ocgis/interface/base/field.py b/src/ocgis/interface/base/field.py index a9f6125b1..fa079a5be 100644 --- a/src/ocgis/interface/base/field.py +++ b/src/ocgis/interface/base/field.py @@ -3,8 +3,8 @@ from collections import deque import itertools import logging - import numpy as np + from shapely.ops import cascaded_union from shapely.geometry.multipoint import MultiPoint from shapely.geometry.multipolygon import MultiPolygon @@ -14,7 +14,7 @@ from ocgis.util.helpers import get_default_or_apply, get_none_or_slice, get_formatted_slice, get_reduced_slice, \ set_name_attributes from ocgis.interface.base.variable import Variable, VariableCollection -from ocgis import constants +from ocgis import SpatialCollection from ocgis.util.logging_ocgis import ocgis_lh @@ -154,6 +154,16 @@ def variables(self, value): if v._value is not None: assert v._value.shape == self.shape + def as_spatial_collection(self): + """ + :returns: A spatial collection containing the field. + :rtype: :class:`~ocgis.SpatialCollection` + """ + + coll = SpatialCollection() + coll.add_field(1, None, self, properties=self.spatial.properties, name=self.name) + return coll + def get_between(self, dim, lower, upper): pos = self._axis_map[dim] ref = getattr(self, dim) diff --git a/src/ocgis/interface/base/variable.py b/src/ocgis/interface/base/variable.py index cb107d14c..9f29c40ba 100644 --- a/src/ocgis/interface/base/variable.py +++ b/src/ocgis/interface/base/variable.py @@ -1,11 +1,10 @@ import abc from copy import copy, deepcopy - import numpy as np from ocgis.api.collection import AbstractCollection from ocgis.interface.base.attributes import Attributes -from ocgis.util.helpers import get_iter +from ocgis.util.helpers import get_iter, iter_array from ocgis.exc import NoUnitsError, VariableInCollectionError @@ -204,13 +203,13 @@ def __init__(self, name=None, alias=None, units=None, meta=None, uid=None, value Attributes.__init__(self, attrs=attrs) AbstractValueVariable.__init__(self, value=value, units=units, dtype=dtype, fill_value=fill_value, name=name, conform_units_to=conform_units_to) - - def __getitem__(self,slc): + + def __getitem__(self, slc): ret = copy(self) if ret._value is not None: ret._value = self._value[slc] - return(ret) - + return (ret) + def __str__(self): units = '{0}' if self.units is None else '"{0}"' units = units.format(self.units) @@ -239,30 +238,50 @@ def get_empty_like(self, shape=None): ret = Variable(name=self.name, units=self.units, meta=deepcopy(self.meta), value=value, did=self.did, alias=self.alias, uid=self.uid, attrs=deepcopy(self.attrs)) return ret - - def _format_private_value_(self,value): + + def iter_melted(self, use_mask=True): + """ + :param bool use_mask: If ``True`` (the default), do not yield masked values. If ``False``, yield the underlying + masked data value. + :returns: A dictionary containing variable values for each index location in the array. + :rtype: dict + """ + + units = self.units + uid = self.uid + did = self.did + alias = self.alias + name = self.name + attrs = self.attrs + + for idx, value in iter_array(self.value, use_mask=use_mask, return_value=True): + yld = {'value': value, 'units': units, 'uid': uid, 'did': did, 'alias': alias, 'name': name, 'slice': idx, + 'attrs': attrs} + yield yld + + def _format_private_value_(self, value): # the superclass method does nice things like conform units if appropriate value = AbstractValueVariable._format_private_value_(self, value) if value is None: ret = None else: - assert(isinstance(value,np.ndarray)) - if not isinstance(value,np.ma.MaskedArray): - ret = np.ma.array(value,mask=False) + assert (isinstance(value, np.ndarray)) + if not isinstance(value, np.ma.MaskedArray): + ret = np.ma.array(value, mask=False) else: ret = value - return(ret) - + return ret + def _get_value_(self): if self._value is None: self._set_value_from_source_() - return(self._value) - + return self._value + def _set_value_from_source_(self): - ## load the value from source using the referenced field - self._value = self._field._get_value_from_source_(self._data,self.name) - ## ensure the new value has the geometry masked applied - self._field._set_new_value_mask_(self._field,self._field.spatial.get_mask()) + # load the value from source using the referenced field + self._value = self._field._get_value_from_source_(self._data, self.name) + # ensure the new value has the geometry masked applied + self._field._set_new_value_mask_(self._field, self._field.spatial.get_mask()) class VariableCollection(AbstractCollection): @@ -301,6 +320,16 @@ def get_sliced_variables(self, slc): ret = VariableCollection(variables=variables) return ret + def iter_melted(self, **kwargs): + """ + :returns: Call :meth:`~ocgis.Variable.iter_melted` passing ``kwargs`` for each variable in the collection. + :rtype: see :meth:`~ocgis.Variable.iter_melted` + """ + + for variable in self.itervalues(): + for row in variable.iter_melted(**kwargs): + yield row + class DerivedVariable(Variable): """ @@ -319,3 +348,21 @@ def __init__(self, **kwargs): self.parents = kwargs.pop('parents', None) super(DerivedVariable, self).__init__(**kwargs) + + def iter_melted(self, **kwargs): + calc_key = self.fdef['func'] + calc_alias = self.fdef['name'] + + if self.parents is not None: + first = self.parents.first() + name = first.name + alias = first.alias + else: + name, alias = None, None + + for row in super(DerivedVariable, self).iter_melted(**kwargs): + row['calc_key'] = calc_key + row['calc_alias'] = calc_alias + row['name'] = name + row['alias'] = alias + yield row diff --git a/src/ocgis/test/test_ocgis/test_conv/test_base.py b/src/ocgis/test/test_ocgis/test_conv/test_base.py index 691ed9c4d..f0b7ce6ca 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_base.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_base.py @@ -1,16 +1,17 @@ from csv import DictReader -from ocgis.test.base import TestBase, nc_scope -from ocgis.api.collection import SpatialCollection -from ocgis.conv.csv_ import CsvConverter, CsvPlusConverter -import ocgis import os -from ocgis.conv.fiona_ import ShpConverter, GeoJsonConverter -from ocgis.conv.nc import NcConverter import itertools from copy import deepcopy import tempfile import numpy as np +from ocgis.test.base import TestBase, nc_scope +from ocgis.api.collection import SpatialCollection +from ocgis.conv.csv_ import CsvConverter, CsvShapefileConverter +import ocgis +from ocgis.conv.fiona_ import ShpConverter, GeoJsonConverter +from ocgis.conv.nc import NcConverter + class AbstractTestConverter(TestBase): @@ -38,9 +39,9 @@ def run_auxiliary_file_tst(self,Converter,file_list,auxiliary_file_list=None): outdir = tempfile.mkdtemp(dir=self.current_dir_output) try: conv = Converter([coll],outdir,'ocgis_output',add_auxiliary_files=add_auxiliary_files,ops=ops_arg) - ## CsvPlusConverter requires an operations argument + ## CsvShapefileConverter requires an operations argument except ValueError as e: - if Converter == CsvPlusConverter and ops_arg is None: + if Converter == CsvShapefileConverter and ops_arg is None: continue else: raise(e) @@ -114,7 +115,7 @@ def test_overwrite_true_shp(self): self.run_overwrite_true_tst(ShpConverter) def test_overwrite_true_csv_shp(self): - self.run_overwrite_true_tst(CsvPlusConverter,include_ops=True) + self.run_overwrite_true_tst(CsvShapefileConverter,include_ops=True) def test_add_auxiliary_files_csv(self): self.run_auxiliary_file_tst(CsvConverter,['ocgis_output.csv']) @@ -126,7 +127,7 @@ def test_add_auxiliary_files_nc(self): self.run_auxiliary_file_tst(NcConverter,['ocgis_output.nc']) def test_add_auxiliary_files_csv_shp(self): - self.run_auxiliary_file_tst(CsvPlusConverter,['ocgis_output.csv', 'shp']) + self.run_auxiliary_file_tst(CsvShapefileConverter,['ocgis_output.csv', 'shp']) def test_add_auxiliary_files_shp(self): self.run_auxiliary_file_tst(ShpConverter,['ocgis_output.dbf', 'ocgis_output.shx', 'ocgis_output.shp', 'ocgis_output.cpg', 'ocgis_output.prj']) diff --git a/src/ocgis/test/test_ocgis/test_conv/test_csv_.py b/src/ocgis/test/test_ocgis/test_conv/test_csv_.py new file mode 100644 index 000000000..f61af2e8a --- /dev/null +++ b/src/ocgis/test/test_ocgis/test_conv/test_csv_.py @@ -0,0 +1,5 @@ +from ocgis.test.test_ocgis.test_conv.test_base import AbstractTestConverter + + +class TestCsvConverter(AbstractTestConverter): + pass diff --git a/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py b/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py index 0167351cc..72307b4fc 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py @@ -1,6 +1,8 @@ from collections import OrderedDict import os + import fiona + import ocgis from ocgis.api.subset import SubsetOperation from ocgis.conv.fiona_ import ShpConverter diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py index 2d5eefa51..0e06e4240 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py @@ -9,7 +9,7 @@ from datetime import datetime as dt import datetime -from ocgis import constants +from ocgis import constants, SpatialCollection from ocgis import RequestDataset from ocgis.interface.base.attributes import Attributes from ocgis.interface.base.crs import WGS84, Spherical @@ -148,6 +148,20 @@ def test_init_empty(self): with self.assertRaises(ValueError): Field() + def test_as_spatial_collection(self): + field = self.get_field(with_value=True) + coll = field.as_spatial_collection() + self.assertIsInstance(coll, SpatialCollection) + self.assertIsInstance(coll[1][field.name], Field) + self.assertIsNone(coll.properties[1]) + + # test with some properties + properties = np.zeros(1, dtype={'names': ['color', 'desc'], 'formats': [object, int]}) + properties[0] = ('blue', 4) + field.spatial.properties = properties + coll = field.as_spatial_collection() + self.assertEqual(coll.properties[1], properties[0]) + def test_crs(self): field = self.get_field(with_value=True) self.assertIsNone(field.spatial.crs) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py index a5a8dba84..e3cfb9a7c 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py @@ -1,9 +1,9 @@ from collections import OrderedDict - from numpy.ma import MaskedArray -from cfunits import Units import numpy as np +from cfunits import Units + from ocgis.exc import VariableInCollectionError, NoUnitsError from ocgis.interface.base.attributes import Attributes from ocgis.test.base import TestBase @@ -78,6 +78,23 @@ def test_init(self): self.assertEqual(dv.fdef, fdef) self.assertIsNone(dv.parents) + def test_iter_melted(self): + fdef = {'func': 'mean', 'name': 'mean_alias'} + tmax = Variable(name='tmax', alias='tmax_alias') + parents = VariableCollection(variables=tmax) + + for p in [None, parents]: + dv = DerivedVariable(fdef=fdef, value=np.array([1, 2]), name='mean', alias='my_mean', parents=p) + for row in dv.iter_melted(): + self.assertEqual(row['calc_key'], 'mean') + self.assertEqual(row['calc_alias'], 'mean_alias') + for key in ['name', 'alias']: + if p is None: + self.assertIsNone(row[key]) + else: + self.assertEqual(row['name'], 'tmax') + self.assertEqual(row['alias'], 'tmax_alias') + class TestVariable(TestBase): @@ -191,6 +208,60 @@ def test_get_empty_like(self): new_var.meta['hi'] = 'there' self.assertDictEqual(var.meta, {'foo': 5}) + def test_iter_melted(self): + + def _assert_key_(attr, key, row, actual_none=None): + key_value = row[key] + if attr is not None: + self.assertEqual(key_value, attr) + else: + if actual_none is None: + self.assertIsNone(key_value) + else: + self.assertEqual(key_value, actual_none) + + keywords = dict(value=[np.ma.array([[4, 5], [6, 7]], mask=[[False, True], [False, False]])], + use_mask=[True, False], + name=[None, 'tmax'], + alias=[None, 'tmax_alias'], + units=[None, 'celsius'], + uid=[None, 3], + did=[None, 7], + name_uid=[None, 'vid'], + attrs=[None, {'foo': 1, 'foo3': 2}]) + + for k in self.iter_product_keywords(keywords): + var = Variable(value=k.value, name=k.name, alias=k.alias, units=k.units, uid=k.uid, did=k.did, + attrs=k.attrs) + rows = [] + for row in var.iter_melted(use_mask=k.use_mask): + self.assertAsSetEqual(row.keys(), ['slice', 'name', 'did', 'value', 'alias', 'units', 'uid', 'attrs']) + self.assertIn('slice', row) + + if k.name is None: + if k.alias is None: + self.assertIsNone(row['alias']) + else: + self.assertEqual(row['alias'], k.alias) + else: + if k.alias is None: + self.assertEqual(row['alias'], k.name) + else: + self.assertEqual(row['alias'], k.alias) + + _assert_key_(k.name, 'name', row) + _assert_key_(k.units, 'units', row) + _assert_key_(k.uid, 'uid', row) + _assert_key_(k.did, 'did', row) + _assert_key_(k.attrs, 'attrs', row, actual_none=OrderedDict()) + + rows.append(row) + if k.use_mask: + self.assertEqual(len(rows), 3) + else: + self.assertEqual(len(rows), 4) + + class TestVariableCollection(TestBase): create_dir = False @@ -248,3 +319,11 @@ def test_get_sliced_variables(self): self.assertNumpyAll(v.value, np.ma.array([4])) for k, v in ret.iteritems(): self.assertTrue(np.may_share_memory(v.value, ret[k].value)) + + def test_iter_melted(self): + variables = [self.get_variable(), self.get_variable('tas_foo2')] + vc = VariableCollection(variables=variables) + test = set() + for row in vc.iter_melted(): + test.update([row['alias']]) + self.assertAsSetEqual(test, [xx.alias for xx in variables]) From 3942d4bed9685398539d2ce15196b77a758b48f6 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Wed, 31 Dec 2014 14:57:09 -0700 Subject: [PATCH 47/71] added method to spatial dimension to return fiona schema There is a new method to create a fiona schema from the spatial dimension object. Also includes some code reformatting. --- src/ocgis/conv/csv_.py | 2 +- src/ocgis/conv/fiona_.py | 21 +- src/ocgis/interface/base/dimension/spatial.py | 202 +++++++++++------- src/ocgis/interface/base/field.py | 5 +- src/ocgis/test/base.py | 2 + .../test/test_ocgis/test_conv/test_fiona_.py | 2 + .../test_base/test_dimension/test_base.py | 10 + .../test_base/test_dimension/test_spatial.py | 43 +++- .../test_interface/test_base/test_field.py | 1 + .../test/test_real_data/test_combinatorial.py | 2 +- 10 files changed, 192 insertions(+), 98 deletions(-) diff --git a/src/ocgis/conv/csv_.py b/src/ocgis/conv/csv_.py index 52956508b..8f25c22de 100644 --- a/src/ocgis/conv/csv_.py +++ b/src/ocgis/conv/csv_.py @@ -65,7 +65,7 @@ def _build_(self, coll): else: raise - fiona_schema = {'geometry': archetype_field.spatial.abstraction_geometry._geom_type, + fiona_schema = {'geometry': archetype_field.spatial.abstraction_geometry.geom_type, 'properties': OrderedDict([['DID', 'int'], ['UGID', 'int'], ['GID', 'int']])} fiona_object = fiona.open(fiona_path, 'w', driver='ESRI Shapefile', crs=fiona_crs, schema=fiona_schema) else: diff --git a/src/ocgis/conv/fiona_.py b/src/ocgis/conv/fiona_.py index 9dc3eeb99..a514401c1 100644 --- a/src/ocgis/conv/fiona_.py +++ b/src/ocgis/conv/fiona_.py @@ -1,15 +1,16 @@ -from ocgis.conv.base import AbstractConverter -import datetime import numpy as np from types import NoneType -import fiona from collections import OrderedDict -from shapely.geometry.geo import mapping -from fiona.rfc3339 import FionaTimeType, FionaDateType, FionaDateTimeType import abc + +import fiona +from shapely.geometry.geo import mapping + +from ocgis.conv.base import AbstractConverter +import datetime from ocgis.util.logging_ocgis import ocgis_lh - + class FionaConverter(AbstractConverter): __metaclass__ = abc.ABCMeta @@ -33,7 +34,11 @@ class FionaConverter(AbstractConverter): np.float16: 'float', np.int16: 'int', np.int32: 'int', - str: 'str'} + str: 'str', + np.dtype('int32'): 'int', + np.dtype('int64'): 'int', + np.dtype('float32'): 'float', + np.dtype('float64'): 'float'} @classmethod def get_field_type(cls, the_type, key=None, fiona_conversion=None): @@ -124,7 +129,7 @@ def _build_(self, coll): # polygon geometry types are always converted to multipolygons to avoid later collections having multipolygon # geometries. - geometry_type = archetype_field.spatial.abstraction_geometry._geom_type + geometry_type = archetype_field.spatial.abstraction_geometry.geom_type if geometry_type == 'Polygon': geometry_type = 'MultiPolygon' diff --git a/src/ocgis/interface/base/dimension/spatial.py b/src/ocgis/interface/base/dimension/spatial.py index 44cc12329..de531d517 100644 --- a/src/ocgis/interface/base/dimension/spatial.py +++ b/src/ocgis/interface/base/dimension/spatial.py @@ -1,4 +1,4 @@ -from collections import deque +from collections import deque, OrderedDict import itertools from copy import copy import numpy as np @@ -15,7 +15,6 @@ import base from ocgis.interface.base.crs import CFWGS84, CoordinateReferenceSystem, WGS84 -from ocgis.util.logging_ocgis import ocgis_lh from ocgis.util.helpers import iter_array, get_none_or_slice, \ get_formatted_slice, get_reduced_slice, get_trimmed_array_by_mask,\ get_added_slice, make_poly, set_name_attributes, get_extrapolated_corners_esmf, get_ocgis_corners_from_esmf_corners @@ -184,7 +183,6 @@ def assert_uniform_mask(self): :raises: AssertionError """ - #todo: check mask on grid corners to_compare = [] if self._grid is not None: to_compare.append(self._grid.value[0].mask) @@ -323,6 +321,55 @@ def get_clip(self, polygon, return_indices=False, use_spatial_index=True, select return(ret) + def get_fiona_schema(self): + """ + :returns: A :module:`fiona` schema dictionary. + :rtype: dict + """ + + fproperties = OrderedDict() + if self.properties is not None: + from ocgis.conv.fiona_ import FionaConverter + + dtype = self.properties.dtype + for idx, name in enumerate(dtype.names): + fproperties[name] = FionaConverter.get_field_type(dtype[idx]) + schema = {'geometry': self.abstraction_geometry.geom_type, + 'properties': fproperties} + return schema + + def get_geom_iter(self, target=None, as_multipolygon=True): + """ + :param str target: The target geometry. One of "point" or "polygon". If ``None``, return the highest order + abstraction. + :param bool as_multipolygon: If ``True``, convert all polygons to multipolygons. + :returns: An iterator yielding a tuple: (int row index, int column index, Shapely geometry, int unique id) + :rtype: tuple + :raises: AttributeError + """ + + target = target or self.abstraction + if target is None: + value = self.geom.get_highest_order_abstraction().value + else: + try: + value = getattr(self.geom, target).value + except AttributeError: + msg = 'The target abstraction "{0}" is not available.'.format(target) + raise ValueError(msg) + + # no need to attempt and convert to MultiPolygon if we are working with point data. + if as_multipolygon and target == 'point': + as_multipolygon = False + + r_uid = self.uid + for (row_idx, col_idx), geom in iter_array(value, return_value=True): + if as_multipolygon: + if isinstance(geom, Polygon): + geom = MultiPolygon([geom]) + uid = r_uid[row_idx, col_idx] + yield (row_idx, col_idx, geom, uid) + def get_intersects(self, polygon, return_indices=False, use_spatial_index=True, select_nearest=False): """ :param polygon: The subset geometry objec to use for the intersects operation. @@ -331,7 +378,7 @@ def get_intersects(self, polygon, return_indices=False, use_spatial_index=True, :param bool use_spatial_index: If ``True``, use an ``rtree`` spatial index. :param bool select_nearest: If ``True``, select the geometry nearest ``polygon`` using :meth:`shapely.geometry.base.BaseGeometry.distance`. - :raises: ValueError, NotImplementedError, ImproperPolygonBoundsError + :raises: ValueError, NotImplementedError :rtype: If ``return_indices`` is ``False``: :class:`ocgis.interface.base.dimension.spatial.SpatialDimension`. If ``return_indices`` is ``True``: (:class:`ocgis.interface.base.dimension.spatial.SpatialDimension`, (:class:`slice`, :class:`slice`)) @@ -418,38 +465,6 @@ def get_intersects(self, polygon, return_indices=False, use_spatial_index=True, return ret - def get_geom_iter(self, target=None, as_multipolygon=True): - """ - :param str target: The target geometry. One of "point" or "polygon". If ``None``, return the highest order - abstraction. - :param bool as_multipolygon: If ``True``, convert all polygons to multipolygons. - :returns: An iterator yielding a tuple: (int row index, int column index, Shapely geometry, int unique id) - :rtype: tuple - :raises: AttributeError - """ - - target = target or self.abstraction - if target is None: - value = self.geom.get_highest_order_abstraction().value - else: - try: - value = getattr(self.geom, target).value - except AttributeError: - msg = 'The target abstraction "{0}" is not available.'.format(target) - raise ValueError(msg) - - # no need to attempt and convert to MultiPolygon if we are working with point data. - if as_multipolygon and target == 'point': - as_multipolygon = False - - r_uid = self.uid - for (row_idx, col_idx), geom in iter_array(value, return_value=True): - if as_multipolygon: - if isinstance(geom, Polygon): - geom = MultiPolygon([geom]) - uid = r_uid[row_idx, col_idx] - yield (row_idx, col_idx, geom, uid) - def get_mask(self): """ :returns: A deepcopy of a the boolean mask used on the spatial dimension. @@ -1053,44 +1068,64 @@ def _get_uid_(self): class SpatialGeometryPointDimension(base.AbstractUidValueDimension): + """ + :keyword str geom_type: (``=None``) If ``None``, default to :attrs:`ocgis.interface.base.dimension.spatial.SpatialGeometryPointDimension.__geom_type_default`. + If ``'auto'``, automatically determine the geometry type from the value data. + """ + _ndims = 2 _attrs_slice = ('uid', '_value', 'grid') - _geom_type = 'Point' + _geom_type_default = 'Point' def __init__(self, *args, **kwargs): - self.grid = kwargs.pop('grid', None) + self._geom_type = None - kwargs['name'] = kwargs.get('name') or self._geom_type.lower() + self.grid = kwargs.pop('grid', None) + self.geom_type = kwargs.pop('geom_type', None) or self._geom_type_default super(SpatialGeometryPointDimension, self).__init__(*args, **kwargs) + if self.name is None: + self.name = self.geom_type.lower() + + @property + def geom_type(self): + if self._geom_type == 'auto': + for geom in self.value.data.flat: + if geom.geom_type.startswith('Multi'): + break + self._geom_type = geom.geom_type + return self._geom_type + + @geom_type.setter + def geom_type(self, value): + self._geom_type = value + @property def weights(self): - ret = np.ones(self.value.shape,dtype=constants.NP_FLOAT) - ret = np.ma.array(ret,mask=self.value.mask) - return(ret) + ret = np.ones(self.value.shape, dtype=constants.NP_FLOAT) + ret = np.ma.array(ret, mask=self.value.mask) + return ret - def get_intersects_masked(self,polygon,use_spatial_index=True): + def get_intersects_masked(self, polygon, use_spatial_index=True): """ :param polygon: The Shapely geometry to use for subsetting. :type polygon: :class:`shapely.geometry.Polygon' or :class:`shapely.geometry.MultiPolygon' - :param bool use_spatial_index: If ``False``, do not use the :class:`rtree.index.Index` - for spatial subsetting. If the geometric case is simple, it may marginally - improve execution times to turn this off. However, turning this off for - a complex case will negatively impact (significantly) spatial operation - execution times. + :param bool use_spatial_index: If ``False``, do not use the :class:`rtree.index.Index` for spatial subsetting. + If the geometric case is simple, it may marginally improve execution times to turn this off. However, turning + this off for a complex case will negatively impact (significantly) spatial operation execution times. :raises: NotImplementedError, EmptySubsetError :returns: :class:`ocgis.interface.base.dimension.spatial.SpatialGeometryPointDimension` """ # only polygons are acceptable for subsetting. if a point is required, buffer it. - if type(polygon) not in (Polygon,MultiPolygon): - raise(NotImplementedError(type(polygon))) + if type(polygon) not in (Polygon, MultiPolygon): + raise NotImplementedError(type(polygon)) # return a shallow copy of self ret = copy(self) - # create the fill array and reference the mask. this is the outpout geometry value array. - fill = np.ma.array(ret.value,mask=True) + # create the fill array and reference the mask. this is the output geometry value array. + fill = np.ma.array(ret.value, mask=True) ref_fill_mask = fill.mask # this is the path if a spatial index is used. @@ -1102,33 +1137,32 @@ def get_intersects_masked(self,polygon,use_spatial_index=True): _add = si.add _value = self.value # add the geometries to the index - for (ii,jj),id_value in iter_array(self.uid,return_value=True): - _add(id_value,_value[ii,jj]) + for (ii, jj), id_value in iter_array(self.uid, return_value=True): + _add(id_value, _value[ii, jj]) # this mapping simulates a dictionary for the item look-ups from two-dimensional arrays - geom_mapping = GeomMapping(self.uid,self.value) + geom_mapping = GeomMapping(self.uid, self.value) _uid = ret.uid # return the identifiers of the objects intersecting the target geometry and update the mask accordingly - for intersect_id in si.iter_intersects(polygon,geom_mapping,keep_touches=False): + for intersect_id in si.iter_intersects(polygon, geom_mapping, keep_touches=False): sel = _uid == intersect_id ref_fill_mask[sel] = False # this is the slower simpler case else: - ## prepare the polygon for faster spatial operations + # prepare the polygon for faster spatial operations prepared = prep(polygon) - ## we are not keeping touches at this point. remember the mask is an - ## inverse. - for (ii,jj),geom in iter_array(self.value,return_value=True): + # we are not keeping touches at this point. remember the mask is an inverse. + for (ii, jj), geom in iter_array(self.value, return_value=True): bool_value = False if prepared.intersects(geom): if polygon.touches(geom): bool_value = True else: bool_value = True - ref_fill_mask[ii,jj] = bool_value + ref_fill_mask[ii, jj] = bool_value # if everything is masked, this is an empty subset if ref_fill_mask.all(): - raise(EmptySubsetError(self.name)) + raise EmptySubsetError(self.name) # set the returned value to the fill array ret._value = fill @@ -1155,7 +1189,7 @@ def update_crs(self, to_crs, from_crs): r_value[idx_row, idx_col] = r_loads(ogr_geom.ExportToWkb()) def write_fiona(self,path,crs,driver='ESRI Shapefile'): - schema = {'geometry':self._geom_type, + schema = {'geometry':self.geom_type, 'properties':{'UGID':'int'}} ref_prep = self._write_fiona_prep_geom_ ref_uid = self.uid @@ -1169,47 +1203,49 @@ def write_fiona(self,path,crs,driver='ESRI Shapefile'): return(path) - def _write_fiona_prep_geom_(self,geom): - return(geom) + @staticmethod + def _write_fiona_prep_geom_(geom): + return geom - def _format_private_value_(self,value): + def _format_private_value_(self, value): if value is not None: try: - assert(len(value.shape) == 2) + assert (len(value.shape) == 2) ret = value - except (AssertionError,AttributeError): - ocgis_lh(exc=ValueError('Geometry values must come in as 2-d NumPy arrays to avoid array interface modifications by shapely.')) + except (AssertionError, AttributeError): + msg = 'Geometry values must come in as 2-d NumPy arrays to avoid array interface modifications by shapely.' + raise ValueError(msg) else: ret = None - ret = self._get_none_or_array_(ret,masked=True) - return(ret) + ret = self._get_none_or_array_(ret, masked=True) + return ret - def _get_geometry_fill_(self,shape=None): + def _get_geometry_fill_(self, shape=None): if shape is None: - shape = (self.grid.shape[0],self.grid.shape[1]) + shape = (self.grid.shape[0], self.grid.shape[1]) mask = self.grid.value[0].mask else: mask = False - fill = np.ma.array(np.zeros(shape),mask=mask,dtype=object) + fill = np.ma.array(np.zeros(shape), mask=mask, dtype=object) - return(fill) + return fill def _get_value_(self): - # we are interested in creating geometries for all the underly coordinates regardless if the data is masked + # we are interested in creating geometries for all the underlying coordinates regardless if the data is masked ref_grid = self.grid.value.data fill = self._get_geometry_fill_() r_data = fill.data - for idx_row,idx_col in iter_array(ref_grid[0],use_mask=False): - y = ref_grid[0,idx_row,idx_col] - x = ref_grid[1,idx_row,idx_col] - pt = Point(x,y) - r_data[idx_row,idx_col] = pt - return(fill) + for idx_row, idx_col in iter_array(ref_grid[0], use_mask=False): + y = ref_grid[0, idx_row, idx_col] + x = ref_grid[1, idx_row, idx_col] + pt = Point(x, y) + r_data[idx_row, idx_col] = pt + return fill class SpatialGeometryPolygonDimension(SpatialGeometryPointDimension): - _geom_type = 'MultiPolygon' + _geom_type_default = 'MultiPolygon' def __init__(self, *args, **kwargs): kwargs['name'] = kwargs.get('name') or 'polygon' diff --git a/src/ocgis/interface/base/field.py b/src/ocgis/interface/base/field.py index fa079a5be..099654d82 100644 --- a/src/ocgis/interface/base/field.py +++ b/src/ocgis/interface/base/field.py @@ -12,7 +12,7 @@ from ocgis.interface.base.attributes import Attributes from ocgis.util.helpers import get_default_or_apply, get_none_or_slice, get_formatted_slice, get_reduced_slice, \ - set_name_attributes + set_name_attributes, iter_array from ocgis.interface.base.variable import Variable, VariableCollection from ocgis import SpatialCollection from ocgis.util.logging_ocgis import ocgis_lh @@ -161,6 +161,7 @@ def as_spatial_collection(self): """ coll = SpatialCollection() + # if there are no vector dimensions, there is no need for a melted representation coll.add_field(1, None, self, properties=self.spatial.properties, name=self.name) return coll @@ -316,7 +317,7 @@ def _get_geometry_union_(value): ret.variables = VariableCollection(variables=new_variables) ## the geometry type of the point dimension is now MultiPoint - ret.spatial.geom.point._geom_type = 'MultiPoint' + ret.spatial.geom.point.geom_type = 'MultiPoint' ## we want to keep a copy of the raw data around for later calculations. ret._raw = copy(self) diff --git a/src/ocgis/test/base.py b/src/ocgis/test/base.py index d95120b4b..ab341a09e 100644 --- a/src/ocgis/test/base.py +++ b/src/ocgis/test/base.py @@ -456,6 +456,8 @@ def get_tst_data(): test_data.update(['nc', 'snippets'], 'dtr', 'snippet_Maurer02new_OBS_dtr_daily.1971-2000.nc', key='snippet_maurer_dtr') test_data.update(['nc', 'snippets'], 'bias', 'seasonalbias.nc', key='snippet_seasonalbias') + # test_data.update(['shp', 'state_boundaries'], None, 'state_boundaries.shp', key='state_boundaries') + return test_data def inspect(self, uri, variable=None): diff --git a/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py b/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py index 72307b4fc..f80991772 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py @@ -2,6 +2,8 @@ import os import fiona +from ocgis.api.request.base import RequestDataset +from ocgis.util.shp_cabinet import ShpCabinet import ocgis from ocgis.api.subset import SubsetOperation diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py index 3beb961ef..f652a80b0 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py @@ -24,6 +24,14 @@ class FakeAbstractDimension(AbstractDimension): class TestAbstractDimension(TestBase): create_dir = False + @property + def example_properties(self): + properties = np.zeros(3, dtype={'names': ['a', 'b'], 'formats': [int, float]}) + properties[0] = (1, 2.5) + properties[1] = (2, 3.5) + properties[2] = (3, 4.5) + return properties + def test_init(self): ad = FakeAbstractDimension() self.assertEqual(ad.name, None) @@ -33,6 +41,8 @@ def test_init(self): self.assertEqual(ad.meta, {}) + FakeAbstractDimension(properties=self.example_properties) + class FakeAbstractUidDimension(AbstractUidDimension): _attrs_slice = None diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py index c86e6a14f..32c26bd0b 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py @@ -1,3 +1,4 @@ +from collections import OrderedDict from copy import deepcopy, copy import os import itertools @@ -6,7 +7,7 @@ from shapely import wkt import fiona from fiona.crs import from_epsg -from shapely.geometry import shape, mapping, Polygon +from shapely.geometry import shape, mapping, Polygon, MultiPoint from shapely.geometry.point import Point from ocgis import constants, ShpCabinet @@ -17,7 +18,7 @@ from ocgis.test.base import TestBase from ocgis.interface.base.dimension.base import AbstractUidValueDimension from ocgis.interface.base.crs import CoordinateReferenceSystem, WGS84, CFWGS84, CFRotatedPole, \ - WrappableCoordinateReferenceSystem + WrappableCoordinateReferenceSystem, Spherical from ocgis.interface.base.dimension.base import VectorDimension from ocgis.util.itester import itr_products_keywords from ocgis.util.ugrid.convert import mesh2_nc_to_shapefile @@ -511,6 +512,16 @@ def test_get_clip(self): ref_poly = ret.geom.polygon.value[0,0] self.assertTrue(ref_poly.intersects(ref_pt)) + def test_get_fiona_schema(self): + sdim = self.get_sdim(crs=Spherical()) + schema = sdim.get_fiona_schema() + self.assertEqual(schema, {'geometry': 'MultiPolygon', 'properties': OrderedDict()}) + + properties = np.zeros(2, dtype={'names': ['a', 'b'], 'formats': [np.int32, np.float64]}) + sdim.properties = properties + schema = sdim.get_fiona_schema() + self.assertEqual(schema, {'geometry': 'MultiPolygon', 'properties': OrderedDict([('a', 'int'), ('b', 'float')])}) + def test_get_geom_iter(self): sdim = self.get_sdim(bounds=True) tt = list(sdim.get_geom_iter()) @@ -1083,15 +1094,40 @@ def test_get_highest_order_abstraction(self): with self.assertRaises(ValueError): gdim2.get_highest_order_abstraction() + class TestSpatialGeometryPointDimension(AbstractTestSpatialDimension): def test_init(self): row = VectorDimension(value=[5]) col = VectorDimension(value=[7]) grid = SpatialGridDimension(row=row, col=col) - sgpd = SpatialGeometryPointDimension(grid=grid) + sgpd = SpatialGeometryPointDimension(grid=grid, geom_type='Wrong') + self.assertEqual(sgpd.name, 'wrong') + self.assertEqual(sgpd.geom_type, 'Wrong') + + value = np.array([[Point(1, 2)]], dtype=object) + sgpd = SpatialGeometryPointDimension(value=value) self.assertEqual(sgpd.name, 'point') + def test_geom_type(self): + row = VectorDimension(value=[5]) + col = VectorDimension(value=[7]) + grid = SpatialGridDimension(row=row, col=col) + sgpd = SpatialGeometryPointDimension(grid=grid) + self.assertEqual(sgpd.geom_type, 'Point') + + mp = MultiPoint([Point(1, 2), Point(3, 4)]) + value = np.array([[None, None]]) + value[0, 1] = Point(3, 4) + value[0, 0] = mp + sgpd = SpatialGeometryPointDimension(value=value) + self.assertEqual(sgpd.geom_type, 'Point') + sgpd = SpatialGeometryPointDimension(value=value, geom_type=None) + self.assertEqual(sgpd.geom_type, 'Point') + + sgpd = SpatialGeometryPointDimension(value=value, geom_type='auto') + self.assertEqual(sgpd.geom_type, 'MultiPoint') + def test_get_intersects_masked(self): sdim = self.get_sdim(crs=WGS84()) self.assertIsNotNone(sdim.grid) @@ -1136,6 +1172,7 @@ def test_init(self): gd = SpatialGeometryPolygonDimension(grid=grid) self.assertEqual(gd.name, 'polygon') self.assertIsInstance(gd, SpatialGeometryPointDimension) + self.assertEqual(gd.geom_type, 'MultiPolygon') def test_get_value(self): # the ordering of vertices when creating from corners is slightly different diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py index 0e06e4240..225eae107 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py @@ -5,6 +5,7 @@ import numpy as np from shapely import wkt +from shapely.geometry import Point from shapely.ops import cascaded_union from datetime import datetime as dt diff --git a/src/ocgis/test/test_real_data/test_combinatorial.py b/src/ocgis/test/test_real_data/test_combinatorial.py index e6c45f359..c63851c27 100644 --- a/src/ocgis/test/test_real_data/test_combinatorial.py +++ b/src/ocgis/test/test_real_data/test_combinatorial.py @@ -313,7 +313,7 @@ def test_combinatorial_projection_with_geometries(self): second = 'MultiPoint' elif ab is None: field = RequestDataset(uri=d['uri'], variable='foo').get() - second = field.spatial.geom.get_highest_order_abstraction()._geom_type + second = field.spatial.geom.get_highest_order_abstraction().geom_type else: second = ab.title() From 3a5d5e305f5cd684903bd55617b61bcb70dd4834 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 2 Jan 2015 13:16:19 -0700 Subject: [PATCH 48/71] added columnar iterator to field This iterator only works in the case of no time, level, and/or realization dimension. --- src/ocgis/api/request/driver/vector.py | 2 +- src/ocgis/interface/base/field.py | 51 ++++++++++++++----- .../test_api/test_request/test_base.py | 13 +++-- .../test_request/test_driver/test_vector.py | 6 ++- .../test_interface/test_base/test_field.py | 20 ++++++++ 5 files changed, 72 insertions(+), 20 deletions(-) diff --git a/src/ocgis/api/request/driver/vector.py b/src/ocgis/api/request/driver/vector.py index 9d06a3ae1..2745b75c3 100644 --- a/src/ocgis/api/request/driver/vector.py +++ b/src/ocgis/api/request/driver/vector.py @@ -17,7 +17,7 @@ def get_crs(self): return CoordinateReferenceSystem(self.rd.source_metadata['crs']) def get_dimensioned_variables(self): - return None + return self.rd.source_metadata['schema']['properties'].keys() def get_source_metadata(self): try: diff --git a/src/ocgis/interface/base/field.py b/src/ocgis/interface/base/field.py index 099654d82..161a9a882 100644 --- a/src/ocgis/interface/base/field.py +++ b/src/ocgis/interface/base/field.py @@ -1,6 +1,6 @@ from contextlib import contextmanager from copy import copy, deepcopy -from collections import deque +from collections import deque, OrderedDict import itertools import logging import numpy as np @@ -184,6 +184,13 @@ def get_intersects(self, polygon, use_spatial_index=True, select_nearest=False): select_nearest=select_nearest)) def get_iter(self, add_masked_value=True, value_keys=None): + """ + :param bool add_masked_value: If ``False``, do not yield masked variable values. + :param value_keys: A sequence of keys if the variable is a structure array. + :type value_keys: [str, ...] + :returns: A dictionary for each value for each variable. + :rtype: dict + """ def _get_dimension_iterator_1d_(target): attr = getattr(self, target) @@ -246,7 +253,7 @@ def _get_dimension_iterator_1d_(target): yield to_yld def get_shallow_copy(self): - return(copy(self)) + return copy(self) def get_spatially_aggregated(self,new_spatial_uid=None): @@ -281,14 +288,10 @@ def _get_geometry_union_(value): ret.spatial.geom.point._value = unioned ret.spatial.geom.point.uid = new_spatial_uid - try: - if ret.spatial.geom.polygon is not None: - unioned = _get_geometry_union_(ret.spatial.geom.polygon.value) - ret.spatial.geom.polygon._value = _get_geometry_union_(ret.spatial.geom.polygon.value) - ret.spatial.geom.polygon.uid = new_spatial_uid - except ImproperPolygonBoundsError: - msg = 'No polygon representation to aggregate.' - ocgis_lh(msg=msg,logger='field',level=logging.WARN) + if ret.spatial.geom.polygon is not None: + unioned = _get_geometry_union_(ret.spatial.geom.polygon.value) + ret.spatial.geom.polygon._value = _get_geometry_union_(ret.spatial.geom.polygon.value) + ret.spatial.geom.polygon.uid = new_spatial_uid ## update the spatial uid ret.spatial.uid = new_spatial_uid @@ -324,13 +327,33 @@ def _get_geometry_union_(value): return(ret) - def get_time_region(self,time_region): + def get_time_region(self, time_region): ret = copy(self) - ret.temporal,indices = self.temporal.get_time_region(time_region,return_indices=True) - slc = [slice(None),indices,slice(None),slice(None),slice(None)] + ret.temporal, indices = self.temporal.get_time_region(time_region, return_indices=True) + slc = [slice(None), indices, slice(None), slice(None), slice(None)] variables = self.variables.get_sliced_variables(slc) ret.variables = variables - return(ret) + return ret + + def iter(self): + """ + :returns: An ordered dictionary with variable values as keys with geometry information. + :rtype: :class:`collections.OrderedDict` + :raises: ValueError + """ + + if any([getattr(self, xx) is not None for xx in ['realization', 'temporal', 'level']]): + msg = 'Use "iter_melted" for fields having dimensions in addition to space.' + raise ValueError(msg) + + spatial_name_uid = self.spatial.name_uid + self_uid = self.uid + for (sridx, scidx, geom, gid) in self.spatial.get_geom_iter(): + yld = OrderedDict([['geom', geom], ['did', self_uid], [spatial_name_uid, gid]]) + for variable in self.variables.itervalues(): + value = variable.value.data[0, 0, 0, sridx, scidx] + yld[variable.alias] = value + yield yld def write_to_netcdf_dataset(self, dataset, file_only=False, **kwargs): """ diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py index f5f9a427f..8d58dd9ae 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py @@ -68,15 +68,15 @@ def test_init(self): def test_init_driver(self): uri = ShpCabinet().get_shp_path('state_boundaries') rd = RequestDataset(uri=uri, driver='vector') - self.assertIsNone(rd.variable) + self.assertIsNotNone(rd.variable) self.assertIsInstance(rd.get(), Field) uri_nc = self.test_data.get_uri('cancm4_tas') rd = RequestDataset(uri_nc) self.assertIsInstance(rd.driver, DriverNetcdf) - rd = RequestDataset(uri_nc, driver='vector') - self.assertIsInstance(rd.driver, DriverVector) + with self.assertRaises(ValueError): + RequestDataset(uri_nc, driver='vector') def test_str(self): rd = self.test_data.get_rd('cancm4_tas') @@ -116,7 +116,12 @@ def test_get_autodiscovered_driver(self): def test_name(self): path = ShpCabinet().get_shp_path('state_boundaries') rd = RequestDataset(uri=path, driver='vector') - self.assertIsNone(rd.name) + self.assertIsNotNone(rd.name) + + rd = RequestDataset(uri=path, driver='vector', name='states') + self.assertEqual(rd.name, 'states') + field = rd.get() + self.assertEqual(field.name, 'states') def test_uri_cannot_be_set(self): rd = self.test_data.get_rd('cancm4_tas') diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py index 3cfa852c7..c47a1ca2b 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py @@ -32,7 +32,8 @@ def test_get_crs(self): def test_get_dimensioned_variables(self): driver = self.get_driver() - self.assertIsNone(driver.get_dimensioned_variables()) + target = driver.get_dimensioned_variables() + self.assertEqual(target, [u'UGID', u'STATE_FIPS', u'ID', u'STATE_NAME', u'STATE_ABBR']) def test_get_field(self): driver = self.get_driver() @@ -40,6 +41,9 @@ def test_get_field(self): sub = field[:, :, :, :, 25] self.assertEqual(sub.spatial.properties.shape, (1,)) self.assertTrue(len(sub.spatial.properties.dtype.names) > 2) + self.assertEqual(len(field.variables), 5) + for variable in field.variables.itervalues(): + self.assertEqual(variable.shape, (1, 1, 1, 1, 51)) # test with a variable driver = self.get_driver(variable=['ID', 'STATE_NAME']) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py index 225eae107..fb193f0e5 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py @@ -332,6 +332,26 @@ def test_get_iter_two_variables(self): self.assertTrue(row['value'] > 3) self.assertEqual(set(vids), set([1, 2])) + def test_iter(self): + field = self.get_field(with_value=True) + with self.assertRaises(ValueError): + list(field.iter()) + + field = self.get_field(with_value=True, with_realization=False, with_level=False, with_temporal=False) + other = deepcopy(field.variables.first()) + other.alias = 'tmax2' + other.uid = 2 + field.variables.add_variable(other) + gids = [] + for row in field.iter(): + self.assertIsInstance(row, OrderedDict) + self.assertEqual(row.keys(), ['geom', 'did', 'gid', 'tmax', 'tmax2']) + for variable in field.variables.itervalues(): + self.assertIn(variable.alias, row) + gids.append(row[field.spatial.name_uid]) + self.assertEqual(len(gids), len(set(gids))) + self.assertEqual(len(gids), 12) + def test_name(self): field = self.get_field(field_name='foo') self.assertEqual(field.name, 'foo') From 784714ec38c9e18e9ced2e2c1f8bfab0a069e2bf Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 2 Jan 2015 15:10:52 -0700 Subject: [PATCH 49/71] file headers rearranged in constants --- src/ocgis/api/collection.py | 68 ++++++++++++++++++------------------- src/ocgis/constants.py | 58 +++++++++++++++++++++++++------ 2 files changed, 81 insertions(+), 45 deletions(-) diff --git a/src/ocgis/api/collection.py b/src/ocgis/api/collection.py index d9848bde4..23135e58d 100644 --- a/src/ocgis/api/collection.py +++ b/src/ocgis/api/collection.py @@ -8,7 +8,6 @@ from ocgis.interface.base.crs import CFWGS84 from ocgis import constants from ocgis.util.helpers import get_ordered_dicts_from_records_array -from ocgis.util.logging_ocgis import ocgis_lh class AbstractCollection(object): @@ -22,7 +21,7 @@ def __init__(self): def _storage_id_next(self): try: ret = max(self._storage_id) + 1 - ## max of an empty list + # max of an empty list except ValueError: if len(self._storage_id) == 0: ret = 1 @@ -95,7 +94,7 @@ def values(self): class SpatialCollection(AbstractCollection): _default_headers = constants.HEADERS_RAW _multi_cast = {'Point': MultiPoint, 'Polygon': MultiPolygon} - + def __init__(self, meta=None, key=None, crs=None, headers=None, value_keys=None): super(SpatialCollection, self).__init__() @@ -104,19 +103,16 @@ def __init__(self, meta=None, key=None, crs=None, headers=None, value_keys=None) self.crs = crs or CFWGS84() self.headers = headers or self._default_headers self.value_keys = value_keys - + self.geoms = OrderedDict() self.properties = OrderedDict() - - # self._uid_ctr_field = 1 - # self._ugid = OrderedDict() @property def _archetype_field(self): ukey = self.keys()[0] fkey = self[ukey].keys()[0] - return(self[ukey][fkey]) - + return self[ukey][fkey] + def add_field(self, ugid, geom, field, properties=None, name=None): """ :param int ugid: @@ -125,61 +121,65 @@ def add_field(self, ugid, geom, field, properties=None, name=None): :param dict properties: :param str name: """ + name = name or field.name - ## add field unique identifier if it does not exist + # add field unique identifier if it does not exist try: if field.uid is None: field.uid = self._storage_id_next self._storage_id.append(field.uid) - ## likely a nonetype from an empty subset - except AttributeError as e: + # likely a nonetype from an empty subset + except AttributeError: if field is None: pass else: - ocgis_lh(exc=e, logger='collection') - - self.geoms.update({ugid:geom}) - self.properties.update({ugid:properties}) + raise + + self.geoms.update({ugid: geom}) + self.properties.update({ugid: properties}) if ugid not in self: - self.update({ugid:{}}) - assert(name not in self[ugid]) - self[ugid].update({name:field}) + self.update({ugid: {}}) + assert (name not in self[ugid]) + self[ugid].update({name: field}) def get_iter_dict(self, use_upper_keys=False, conversion_map=None): r_headers = self.headers + id_selection_geometry = constants.HEADERS.ID_SELECTION_GEOMETRY + default_geometry_key = constants.DEFAULT_GEOMETRY_KEY use_conversion = False if conversion_map is None else True for ugid, field_dict in self.iteritems(): for field in field_dict.itervalues(): for row in field.get_iter(value_keys=self.value_keys): - row['ugid'] = ugid + row[id_selection_geometry] = ugid yld_row = {k: row.get(k) for k in r_headers} if use_conversion: for k, v in conversion_map.iteritems(): yld_row[k] = v(yld_row[k]) if use_upper_keys: yld_row = {k.upper(): v for k, v in yld_row.iteritems()} - yield row['geom'], yld_row - + yield row[default_geometry_key], yld_row + def get_iter_elements(self): - for ugid,fields in self.iteritems(): - for field_alias,field in fields.iteritems(): - for var_alias,variable in field.variables.iteritems(): - yield(ugid,field_alias,var_alias,variable) - + for ugid, fields in self.iteritems(): + for field_alias, field in fields.iteritems(): + for var_alias, variable in field.variables.iteritems(): + yield (ugid, field_alias, var_alias, variable) + def get_iter_melted(self): - for ugid,container in self.iteritems(): - for field_alias,field in container.iteritems(): - for variable_alias,variable in field.variables.iteritems(): - yield(dict(ugid=ugid,field_alias=field_alias,field=field,variable_alias=variable_alias,variable=variable)) - - def gvu(self,ugid,alias_variable,alias_field=None): + for ugid, container in self.iteritems(): + for field_alias, field in container.iteritems(): + for variable_alias, variable in field.variables.iteritems(): + yield (dict(ugid=ugid, field_alias=field_alias, field=field, variable_alias=variable_alias, + variable=variable)) + + def gvu(self, ugid, alias_variable, alias_field=None): ref = self[ugid] if alias_field is None: field = ref.values()[0] else: field = ref[alias_field] - return(field.variables[alias_variable].value) + return field.variables[alias_variable].value def write_ugeom(self, path=None, driver='ESRI Shapefile', fobject=None): """ diff --git a/src/ocgis/constants.py b/src/ocgis/constants.py index a7c33e3ee..0e06223e0 100644 --- a/src/ocgis/constants.py +++ b/src/ocgis/constants.py @@ -4,9 +4,6 @@ # : Standard bounds name used when none is available from the input data. OCGIS_BOUNDS = 'bounds' -#: Standard name for the unique identifier in GIS files. -OCGIS_UNIQUE_GEOMETRY_IDENTIFIER = 'UGID' - #: Default netCDF4 output file type NETCDF_DEFAULT_DATA_MODEL = 'NETCDF4' @@ -34,18 +31,54 @@ #: Default corners dimension name. DEFAULT_NAME_CORNERS_DIMENSION = 'ncorners' + +class HEADERS(object): + ID_DATASET = 'did' + ID_VARIABLE = 'vid' + ID_SELECTION_GEOMETRY = 'ugid' + ID_TEMPORAL = 'tid' + ID_LEVEL = 'lid' + ID_GEOMETRY = 'gid' + ID_CALCULATION = 'cid' + + VARIABLE = 'variable' + VARIABLE_ALIAS = 'alias' + + TEMPORAL = 'time' + TEMPORAL_YEAR = 'year' + TEMPORAL_MONTH = 'month' + TEMPORAL_DAY = 'day' + + LEVEL = 'level' + + VALUE = 'value' + + CALCULATION_KEY = 'calc_key' + CALCULATION_ALIAS = 'calc_alias' + + #: Standard headers for subset operations. -HEADERS_RAW = ['did', 'vid', 'ugid', 'tid', 'lid', 'gid', 'variable', 'alias', 'time', 'year', 'month', 'day', 'level', - 'value'] +HEADERS_RAW = [HEADERS.ID_DATASET, HEADERS.ID_VARIABLE, HEADERS.ID_SELECTION_GEOMETRY, HEADERS.ID_TEMPORAL, + HEADERS.ID_LEVEL, HEADERS.ID_GEOMETRY, HEADERS.VARIABLE, HEADERS.VARIABLE_ALIAS, HEADERS.TEMPORAL, + HEADERS.TEMPORAL_YEAR, HEADERS.TEMPORAL_MONTH, HEADERS.TEMPORAL_DAY, HEADERS.LEVEL, HEADERS.VALUE] + #: Standard headers for computation. -HEADERS_CALC = ['did', 'vid', 'cid', 'ugid', 'tid', 'lid', 'gid', 'variable', 'alias', 'calc_key', 'calc_alias', 'time', - 'year', 'month', 'day', 'level', 'value'] -#: Standard headers for multivariate calculations. -HEADERS_MULTI = ['did', 'cid', 'ugid', 'tid', 'lid', 'gid', 'calc_key', 'calc_alias', 'time', 'year', 'month', 'day', - 'level', 'value'] +HEADERS_CALC = [HEADERS.ID_DATASET, HEADERS.ID_VARIABLE, HEADERS.ID_CALCULATION, HEADERS.ID_SELECTION_GEOMETRY, + HEADERS.ID_TEMPORAL, HEADERS.ID_LEVEL, HEADERS.ID_GEOMETRY, HEADERS.VARIABLE, HEADERS.VARIABLE_ALIAS, + HEADERS.CALCULATION_KEY, HEADERS.CALCULATION_ALIAS, HEADERS.TEMPORAL, HEADERS.TEMPORAL_YEAR, + HEADERS.TEMPORAL_MONTH, HEADERS.TEMPORAL_DAY, HEADERS.LEVEL, HEADERS.VALUE] + +#: Standard headers for multivariate calculation. +HEADERS_MULTI = [HEADERS.ID_DATASET, HEADERS.ID_CALCULATION, HEADERS.ID_SELECTION_GEOMETRY, + HEADERS.ID_TEMPORAL, HEADERS.ID_LEVEL, HEADERS.ID_GEOMETRY, HEADERS.CALCULATION_KEY, + HEADERS.CALCULATION_ALIAS, HEADERS.TEMPORAL, HEADERS.TEMPORAL_YEAR, HEADERS.TEMPORAL_MONTH, + HEADERS.TEMPORAL_DAY, HEADERS.LEVEL, HEADERS.VALUE] #: Required headers for every request. -HEADERS_REQUIRED = ['did', 'ugid', 'gid'] +HEADERS_REQUIRED = [HEADERS.ID_DATASET, HEADERS.ID_SELECTION_GEOMETRY, HEADERS.ID_GEOMETRY] + +#: Standard name for the unique identifier in GIS files. +OCGIS_UNIQUE_GEOMETRY_IDENTIFIER = HEADERS.ID_SELECTION_GEOMETRY.upper() OUTPUT_FORMAT_CSV = 'csv' OUTPUT_FORMAT_CSV_SHAPEFILE = 'csv-shp' @@ -84,3 +117,6 @@ #: The value for the 180th meridian to use when wrapping. MERIDIAN_180TH = 180. # MERIDIAN_180TH = 179.9999999999999 + +# The standard key used to identify geometries in a dictionary. +DEFAULT_GEOMETRY_KEY = 'geom' From 8631af31e8a5ee6fd3dbbb71e1a71ee7e72e8fe6 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 5 Jan 2015 09:42:12 -0700 Subject: [PATCH 50/71] minor --- src/ocgis/api/request/driver/vector.py | 17 +++++++------ src/ocgis/conv/fiona_.py | 15 ++++++++++++ .../test_request/test_driver/test_vector.py | 4 +--- .../test/test_ocgis/test_conv/test_fiona_.py | 24 +++++++++++++++++++ 4 files changed, 48 insertions(+), 12 deletions(-) diff --git a/src/ocgis/api/request/driver/vector.py b/src/ocgis/api/request/driver/vector.py index 2745b75c3..31c303859 100644 --- a/src/ocgis/api/request/driver/vector.py +++ b/src/ocgis/api/request/driver/vector.py @@ -62,15 +62,14 @@ def _get_field_(self): try: records = list(ds) sdim = SpatialDimension.from_records(records, crs=self.get_crs()) - if self.rd.variable is not None: - vc = VariableCollection() - for xx in self.rd: - value = np.array([yy['properties'][xx['variable']] for yy in records]).reshape(1, 1, 1, 1, -1) - var = Variable(name=xx['variable'], alias=xx['alias'], units=xx['units'], conform_units_to=xx['units'], - value=value) - vc.add_variable(var, assign_new_uid=True) - else: - vc = None + # do not load the properties - they are transformed to variables in the case of the values put into fields + sdim.properties = None + vc = VariableCollection() + for xx in self.rd: + value = np.array([yy['properties'][xx['variable']] for yy in records]).reshape(1, 1, 1, 1, -1) + var = Variable(name=xx['variable'], alias=xx['alias'], units=xx['units'], conform_units_to=xx['units'], + value=value) + vc.add_variable(var, assign_new_uid=True) field = Field(spatial=sdim, variables=vc, name=self.rd.name) return field finally: diff --git a/src/ocgis/conv/fiona_.py b/src/ocgis/conv/fiona_.py index a514401c1..1e37c97bd 100644 --- a/src/ocgis/conv/fiona_.py +++ b/src/ocgis/conv/fiona_.py @@ -54,6 +54,21 @@ def get_field_type(cls, the_type, key=None, fiona_conversion=None): :raises: AttributeError """ + # bypass for string types... + try: + the_types_type = the_type.type + except AttributeError: + # likely not a numpy type + pass + else: + if the_types_type == np.string_: + length = the_type.str[2:] + ret = 'str:{0}'.format(length) + if key is not None: + fiona_conversion[key] = unicode + return ret + + # this is for other types... ret = None for k, v in fiona.FIELD_TYPES_MAP.iteritems(): if the_type == v: diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py index c47a1ca2b..0d0ed4794 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py @@ -38,9 +38,7 @@ def test_get_dimensioned_variables(self): def test_get_field(self): driver = self.get_driver() field = driver.get_field() - sub = field[:, :, :, :, 25] - self.assertEqual(sub.spatial.properties.shape, (1,)) - self.assertTrue(len(sub.spatial.properties.dtype.names) > 2) + self.assertIsNone(field.spatial.properties) self.assertEqual(len(field.variables), 5) for variable in field.variables.itervalues(): self.assertEqual(variable.shape, (1, 1, 1, 1, 51)) diff --git a/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py b/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py index f80991772..e2e2e1455 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py @@ -1,5 +1,6 @@ from collections import OrderedDict import os +import datetime import fiona from ocgis.api.request.base import RequestDataset @@ -10,6 +11,7 @@ from ocgis.conv.fiona_ import ShpConverter from ocgis.test.base import TestBase from ocgis.test.test_ocgis.test_api.test_parms.test_definition import TestGeom +import numpy as np class TestShpConverter(TestBase): @@ -33,6 +35,28 @@ def test_attributes_copied(self): with fiona.open(path_ugid) as source: self.assertEqual(source.schema['properties'], OrderedDict([(u'COUNTRY', 'str:80'), (u'UGID', 'int:10')])) + def test_get_field_type(self): + target = ShpConverter.get_field_type(np.int32) + self.assertEqual(target, 'int') + key = 'foo' + fiona_conversion = {} + ShpConverter.get_field_type(np.int32, key=key, fiona_conversion=fiona_conversion) + self.assertEqual(fiona_conversion[key], int) + + target = ShpConverter.get_field_type(str) + self.assertEqual(target, 'str') + + target = ShpConverter.get_field_type(datetime.datetime) + self.assertEqual(target, 'str') + + the_type = np.dtype('S20') + target = ShpConverter.get_field_type(the_type) + self.assertEqual(target, 'str:20') + key = 'hey' + fiona_conversion = {} + ShpConverter.get_field_type(the_type, key=key, fiona_conversion=fiona_conversion) + self.assertEqual(fiona_conversion[key], unicode) + def test_none_geom(self): """Test a NoneType geometry will pass through the Fiona converter.""" From 19206a601fa0e10c161012ee6556864fb63a48f0 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 5 Jan 2015 13:15:39 -0700 Subject: [PATCH 51/71] add class validation method to drivers for operations Drivers now have a validate operations method and a list of acceptable output formats. This method is called from the operations validations. --- src/ocgis/api/operations.py | 4 + src/ocgis/api/request/driver/base.py | 34 +++- src/ocgis/api/request/driver/nc.py | 1 + src/ocgis/api/request/driver/vector.py | 2 + src/ocgis/conv/base.py | 182 ++++++++---------- .../test_ocgis/test_api/test_operations.py | 9 +- .../test_request/test_driver/test_base.py | 47 ++++- 7 files changed, 174 insertions(+), 105 deletions(-) diff --git a/src/ocgis/api/operations.py b/src/ocgis/api/operations.py index 85964f629..849d87363 100644 --- a/src/ocgis/api/operations.py +++ b/src/ocgis/api/operations.py @@ -348,6 +348,10 @@ def _raise_(msg, obj=OutputFormat): e = DefinitionValidationError(obj, msg) ocgis_lh(exc=e, logger='operations') + # assert the driver may be written to the appropriate output format + for rd in self.dataset.iter_request_datasets(): + rd.driver.validate_ops(self) + # no regridding with a spatial operation of clip if self.regrid_destination is not None: if self.spatial_operation == 'clip': diff --git a/src/ocgis/api/request/driver/base.py b/src/ocgis/api/request/driver/base.py index 6c17ecaf8..20e63c0b2 100644 --- a/src/ocgis/api/request/driver/base.py +++ b/src/ocgis/api/request/driver/base.py @@ -1,4 +1,5 @@ import abc +from ocgis.exc import DefinitionValidationError class AbstractDriver(object): @@ -26,6 +27,14 @@ def extensions(self): def key(self): str + @abc.abstractproperty + def output_formats(self): + """ + :returns: A list of acceptable output formats for the driver. If this is `'all'`, then the driver's data may be + converted to all output formats. + :rtype: [str, ...] + """ + @abc.abstractmethod def close(self, obj): pass @@ -48,18 +57,31 @@ def get_field(self, **kwargs): field._has_assigned_coordinate_system = True return field - @abc.abstractmethod - def _get_field_(self, **kwargs): - """Return :class:`ocgis.interface.base.field.Field`""" - @abc.abstractmethod def get_source_metadata(self): return dict + @abc.abstractmethod + def inspect(self): + pass + @abc.abstractmethod def open(self): return object + @classmethod + def validate_ops(cls, ops): + """ + :param ops: An operation object to validate. + :type ops: :class:`~ocgis.OcgOperations` + :raises: DefinitionValidationError + """ + + if cls.output_formats != 'all': + if ops.output_format not in cls.output_formats: + msg = 'Output format not supported for driver "{0}". Supported output formats are: {1}'.format(cls.key, cls.output_formats) + raise DefinitionValidationError('output_format', msg) + @abc.abstractmethod - def inspect(self): - pass + def _get_field_(self, **kwargs): + """Return :class:`ocgis.interface.base.field.Field`""" diff --git a/src/ocgis/api/request/driver/nc.py b/src/ocgis/api/request/driver/nc.py index 759704ae8..9b2047c52 100644 --- a/src/ocgis/api/request/driver/nc.py +++ b/src/ocgis/api/request/driver/nc.py @@ -22,6 +22,7 @@ class DriverNetcdf(AbstractDriver): extensions = ('.*\.nc', 'http.*') key = 'netCDF' + output_formats = 'all' def __init__(self, *args, **kwargs): AbstractDriver.__init__(self, *args, **kwargs) diff --git a/src/ocgis/api/request/driver/vector.py b/src/ocgis/api/request/driver/vector.py index 31c303859..2f4ca427a 100644 --- a/src/ocgis/api/request/driver/vector.py +++ b/src/ocgis/api/request/driver/vector.py @@ -1,4 +1,5 @@ import numpy as np +from ocgis import constants from ocgis.interface.base.variable import Variable, VariableCollection from ocgis.interface.base.field import Field @@ -8,6 +9,7 @@ class DriverVector(AbstractDriver): extensions = ('.*\.shp',) key = 'vector' + output_formats = [constants.OUTPUT_FORMAT_NUMPY] def close(self, obj): pass diff --git a/src/ocgis/conv/base.py b/src/ocgis/conv/base.py index e66a6e811..4bfee6110 100644 --- a/src/ocgis/conv/base.py +++ b/src/ocgis/conv/base.py @@ -52,56 +52,60 @@ def __init__(self, colls, outdir=None, prefix=None, ops=None, add_meta=True, add self.path = os.path.join(self.outdir, prefix + '.' + self._ext) if os.path.exists(self.path): if not self.overwrite: - msg = 'Output path exists "{0}" and must be removed before proceeding. Set "overwrite" argument or env.OVERWRITE to True to overwrite.'.format(self.path) + msg = 'Output path exists "{0}" and must be removed before proceeding. Set "overwrite" argument or env.OVERWRITE to True to overwrite.'.format( + self.path) raise IOError(msg) ocgis_lh('converter initialized', level=logging.DEBUG, logger=self._log) - - def _build_(self,*args,**kwds): raise(NotImplementedError) - + + def _build_(self, *args, **kwargs): + raise NotImplementedError + def _clean_outdir_(self): - ''' + """ Remove previous output file from outdir. - ''' - pass - + """ + def _get_return_(self): - return(self.path) - - def _write_coll_(self,f,coll): raise(NotImplementedError) - - def _finalize_(self,*args,**kwds): raise(NotImplementedError) - + return self.path + + def _write_coll_(self, f, coll): + raise NotImplementedError + + def _finalize_(self, *args, **kwargs): + raise NotImplementedError + def _get_or_create_shp_folder_(self): - path = os.path.join(self.outdir,'shp') + path = os.path.join(self.outdir, 'shp') if not os.path.exists(path): os.mkdir(path) - return(path) - - def _get_should_append_to_unique_geometry_store_(self,store,geom,ugid): - ''' + return path + + def _get_should_append_to_unique_geometry_store_(self, store, geom, ugid): + """ :param sequence store: :param :class:`shapely.Geometry` geom: :param int ugid: - ''' + """ + ret = True test_all = [] for row in store: test_geom = row['geom'].almost_equals(geom) test_ugid = row['ugid'] == ugid - test_all.append(all([test_geom,test_ugid])) + test_all.append(all([test_geom, test_ugid])) if any(test_all): ret = False - return(ret) - + return ret + def write(self): - ocgis_lh('starting write method',self._log,logging.DEBUG) - + ocgis_lh('starting write method', self._log, logging.DEBUG) + unique_geometry_store = [] # indicates if user geometries should be written to file write_ugeom = False - + try: build = True @@ -116,104 +120,84 @@ def write(self): if write_ugeom: ugid_shp_name = self.prefix + '_ugid.shp' ugid_csv_name = self.prefix + '_ugid.csv' - + if self._add_ugeom_nest: - fiona_path = os.path.join(self._get_or_create_shp_folder_(),ugid_shp_name) - # csv_path = os.path.join(self._get_or_create_shp_folder_(),ugid_csv_name) + fiona_path = os.path.join(self._get_or_create_shp_folder_(), ugid_shp_name) else: - fiona_path = os.path.join(self.outdir,ugid_shp_name) - # csv_path = os.path.join(self.outdir,ugid_csv_name) + fiona_path = os.path.join(self.outdir, ugid_shp_name) if coll.meta is None: # convert the collection properties to fiona properties from fiona_ import FionaConverter + fiona_properties = {} archetype_properties = coll.properties.values()[0] for name in archetype_properties.dtype.names: - fiona_properties[name] = FionaConverter.get_field_type(type(archetype_properties[name][0])) + fiona_properties[name] = FionaConverter.get_field_type( + type(archetype_properties[name][0])) - fiona_schema = {'geometry':'MultiPolygon', - 'properties':fiona_properties} - fiona_meta = {'schema':fiona_schema,'driver':'ESRI Shapefile'} + fiona_schema = {'geometry': 'MultiPolygon', 'properties': fiona_properties} + fiona_meta = {'schema': fiona_schema, 'driver': 'ESRI Shapefile'} else: fiona_meta = coll.meta - - ## always use the CRS from the collection. shapefile metadata - ## will always be WGS84, but it may be overloaded in the - ## operations. + + # always use the CRS from the collection. shapefile metadata will always be WGS84, but it may be + # overloaded in the operations. fiona_meta['crs'] = coll.crs.value - ## selection geometries will always come out as MultiPolygon - ## regardless if they began as points. points are buffered - ## during the subsetting process. + # selection geometries will always come out as MultiPolygon regardless if they began as points. + # points are buffered during the subsetting process. fiona_meta['schema']['geometry'] = 'MultiPolygon' - fiona_object = fiona.open(fiona_path,'w',**fiona_meta) - # csv_file = open(csv_path,'w') - - # from ocgis.conv.csv_ import OcgDialect - # csv_object = DictWriter(csv_file,fiona_meta['schema']['properties'].keys(),dialect=OcgDialect) - # csv_object.writeheader() - + fiona_object = fiona.open(fiona_path, 'w', **fiona_meta) + build = False - self._write_coll_(f,coll) + self._write_coll_(f, coll) if write_ugeom: - ## write the overview geometries to disk + # write the overview geometries to disk r_geom = coll.geoms.values()[0] - if isinstance(r_geom,Polygon): + if isinstance(r_geom, Polygon): r_geom = MultiPolygon([r_geom]) - ## see if this geometry is in the unique geometry store + #see if this geometry is in the unique geometry store should_append = self._get_should_append_to_unique_geometry_store_( - unique_geometry_store, - r_geom, - coll.properties.values()[0]['UGID']) + unique_geometry_store, + r_geom, + coll.properties.values()[0]['UGID']) if should_append: - unique_geometry_store.append({'geom':r_geom, - 'ugid':coll.properties.values()[0]['UGID']}) - - ## if it is unique write the geometry to the output files - coll.write_ugeom(fobject=fiona_object) - - # ## write the geometry attributes to the corresponding shapefile - # csv_object.writerow(properties_to_append) + unique_geometry_store.append({'geom': r_geom, 'ugid': coll.properties.values()[0]['UGID']}) + # if it is unique write the geometry to the output files + coll.write_ugeom(fobject=fiona_object) finally: - - ## errors are masked if the processing failed and file objects, etc. - ## were not properly created. if there are UnboundLocalErrors pass - ## them through to capture the error that lead to the objects not - ## being created. - + + # errors are masked if the processing failed and file objects, etc. were not properly created. if there are + # UnboundLocalErrors pass them through to capture the error that lead to the objects not being created. + try: try: self._finalize_(f) except UnboundLocalError: pass except Exception as e: - ## this the exception we want to log - ocgis_lh(exc=e,logger=self._log) + # this the exception we want to log + ocgis_lh(exc=e, logger=self._log) finally: if write_ugeom: try: fiona_object.close() except UnboundLocalError: pass - # try: - # csv_file.close() - # except UnboundLocalError: - # pass - - ## the metadata and dataset descriptor files may only be written if - ## OCGIS operations are present. + + # the metadata and dataset descriptor files may only be written if OCGIS operations are present. if self.ops is not None and self.add_auxiliary_files == True: - ## added OCGIS metadata output if requested. + # added OCGIS metadata output if requested. if self.add_meta: - ocgis_lh('adding OCGIS metadata file','conv',logging.DEBUG) + ocgis_lh('adding OCGIS metadata file', 'conv', logging.DEBUG) lines = MetaConverter(self.ops).write() - out_path = os.path.join(self.outdir,self.prefix+'_'+MetaConverter._meta_filename) - with open(out_path,'w') as f: + out_path = os.path.join(self.outdir, self.prefix + '_' + MetaConverter._meta_filename) + with open(out_path, 'w') as f: f.write(lines) - + # add the dataset descriptor file if requested if self._add_did_file: ocgis_lh('writing dataset description (DID) file', 'conv', logging.DEBUG) @@ -236,7 +220,9 @@ def write(self): except NotImplementedError: if isinstance(rd, Field): for variable in rd.variables.itervalues(): - row = [rd.uid, variable.name, variable.alias, None, variable.attrs.get('standard_name'), variable.units, variable.attrs.get('long_name')] + row = [rd.uid, variable.name, variable.alias, None, + variable.attrs.get('standard_name'), variable.units, + variable.attrs.get('long_name')] writer.writerow(row) else: raise @@ -259,11 +245,11 @@ def write(self): with open(out_path, 'w') as f: f.writelines('\n'.join(to_write)) - ## return the internal path unless overloaded by subclasses. + # return the internal path unless overloaded by subclasses. ret = self._get_return_() - + return ret - + @classmethod def get_converter_map(cls): """ @@ -288,18 +274,20 @@ def get_converter_map(cls): constants.OUTPUT_FORMAT_METADATA: MetaConverter} return mmap - + @classmethod - def get_converter(cls,output_format): - '''Return the converter based on output extensions or key. - + def get_converter(cls, output_format): + """ + Return the converter based on output extensions or key. + output_format :: str - + returns - - AbstractConverter''' - - return(cls.get_converter_map()[output_format]) + + AbstractConverter + """ + + return cls.get_converter_map()[output_format] @classmethod def validate_ops(cls, ops): diff --git a/src/ocgis/test/test_ocgis/test_api/test_operations.py b/src/ocgis/test/test_ocgis/test_api/test_operations.py index 98eff5fed..615742dd7 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_operations.py +++ b/src/ocgis/test/test_ocgis/test_api/test_operations.py @@ -8,6 +8,7 @@ from datetime import datetime as dt import datetime +from ocgis.api.request.base import RequestDataset from ocgis.api.parms.definition import RegridOptions, OutputFormat from ocgis.interface.base.crs import CFWGS84 from ocgis.test.base import TestBase, attr @@ -17,7 +18,7 @@ from ocgis.api.operations import OcgOperations from ocgis.util.helpers import make_poly import ocgis -from ocgis.util.shp_cabinet import ShpCabinetIterator +from ocgis.util.shp_cabinet import ShpCabinetIterator, ShpCabinet class TestOcgOperations(TestBase): @@ -484,3 +485,9 @@ def test_validate(self): field = self.test_data.get_rd('cancm4_tas').get() ops = OcgOperations(dataset=field, snippet=True) self.assertTrue(ops.snippet) + + # test driver validation is called appropriately + path = ShpCabinet().get_shp_path('state_boundaries') + rd = RequestDataset(path) + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=rd, output_format='csv') diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_base.py index 6e864aec6..063e3b57c 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_base.py @@ -1,10 +1,41 @@ from copy import deepcopy +from ocgis import OcgOperations from ocgis.api.request.driver.base import AbstractDriver from ocgis.api.request.driver.nc import DriverNetcdf +from ocgis.exc import DefinitionValidationError from ocgis.interface.base.crs import CFWGS84 from ocgis.test.base import TestBase +class FakeAbstractDriver(AbstractDriver): + output_formats = ['shp'] + key = 'fake_driver' + + def _get_field_(self, **kwargs): + pass + + def inspect(self): + pass + + def get_crs(self): + pass + + def get_source_metadata(self): + pass + + def extensions(self): + pass + + def open(self): + pass + + def get_dimensioned_variables(self): + pass + + def close(self, obj): + pass + + class TestAbstractDriver(TestBase): def test_get_field(self): @@ -35,4 +66,18 @@ def test_eq(self): self.assertEqual(d, deepcopy(d)) d2.key = 'bad' - self.assertNotEqual(d, d2) \ No newline at end of file + self.assertNotEqual(d, d2) + + def test_validate_ops(self): + rd = self.test_data.get_rd('cancm4_tas') + ops = OcgOperations(dataset=rd) + + with self.assertRaises(DefinitionValidationError): + FakeAbstractDriver.validate_ops(ops) + + prev = FakeAbstractDriver.output_formats + FakeAbstractDriver.output_formats = 'all' + try: + FakeAbstractDriver.validate_ops(ops) + finally: + FakeAbstractDriver.output_formats = prev \ No newline at end of file From c182b063a35a3cac453caace614bfb3b5ad06caa Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Tue, 6 Jan 2015 16:33:08 -0700 Subject: [PATCH 52/71] add ugrid netcdf output format #348 The 2d flexible mesh output was added with documentation. closes #348 --- doc/api.rst | 21 +-- doc/appendix.rst | 12 ++ doc/examples.rst | 9 +- .../to_ugrid_2d_flexible_mesh.py | 16 +++ src/ocgis/api/operations.py | 60 +-------- src/ocgis/api/parms/definition.py | 2 +- src/ocgis/api/request/driver/vector.py | 56 ++++---- src/ocgis/constants.py | 1 + src/ocgis/conv/base.py | 27 ++-- src/ocgis/conv/nc.py | 123 +++++++++++++++++- src/ocgis/interface/base/dimension/spatial.py | 66 +++++----- .../test_ocgis/test_api/test_operations.py | 13 +- .../test_api/test_parms/test_definition.py | 2 +- .../test_request/test_driver/test_vector.py | 13 +- .../test/test_ocgis/test_api/test_subset.py | 5 +- .../test/test_ocgis/test_conv/test_base.py | 9 +- .../test/test_ocgis/test_conv/test_nc.py | 79 ++++++++++- .../test_base/test_dimension/test_spatial.py | 79 ++++++++++- src/ocgis/test/test_simple/test_simple.py | 2 +- 19 files changed, 451 insertions(+), 144 deletions(-) create mode 100644 doc/sphinx_examples/to_ugrid_2d_flexible_mesh.py diff --git a/doc/api.rst b/doc/api.rst index 0d98a1af4..441190d64 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -253,16 +253,17 @@ Value Description output_format ~~~~~~~~~~~~~ -====================== =============================================================================================== -Value Description -====================== =============================================================================================== -``'numpy'`` (default) Return a :class:`~ocgis.SpatialCollection` with keys matching `ugid` (see `geom`_). -``'shp'`` A shapefile representation of the data. -``'csv'`` A CSV file representation of the data. -``'csv-shp'`` In addition to a CSV representation, shapefiles with primary key links to the CSV are provided. -``'nc'`` A NetCDF4 file. -``'geojson'`` A GeoJSON representation of the data. -====================== =============================================================================================== +=============================== ============================================================================================================================================ +Value Description +=============================== ============================================================================================================================================ +``'numpy'`` (default) Return a :class:`~ocgis.SpatialCollection` with keys matching `ugid` (see `geom`_). +``'shp'`` A shapefile representation of the data. +``'csv'`` A CSV file representation of the data. +``'csv-shp'`` In addition to a CSV representation, shapefiles with primary key links to the CSV are provided. +``'nc'`` A NetCDF4-CF file. +``'geojson'`` A GeoJSON representation of the data. +``'nc-ugrid-2d-flexible-mesh'`` A flexible mesh representation. See :ref:`2d-flexible-mesh-label` for more details and :ref:`2d-flexible-mesh-example-label` for an example. +=============================== ============================================================================================================================================ .. _agg_selection: diff --git a/doc/appendix.rst b/doc/appendix.rst index 059afc2a2..a07657935 100644 --- a/doc/appendix.rst +++ b/doc/appendix.rst @@ -1,6 +1,18 @@ Appendix -------- +Output Formats +~~~~~~~~~~~~~~ + +.. _2d-flexible-mesh-label: + +2D Flexible Mesh +++++++++++++++++ + +The two-dimensional flexible mesh format is a NetCDF4-based format adhering to `UGRID (v0.9.0) `_ convention. It is used to represent a set of arbitrary boundaries (polygons) with no `gaps `_. Documentation for the format is here: https://github.com/ugrid-conventions/ugrid-conventions/blob/v0.9.0/ugrid-conventions.md#2d-flexible-mesh-mixed-triangles-quadrilaterals-etc-topology. + +See :ref:`2d-flexible-mesh-example-label` for example code converting a shapefile to a UGRID NetCDF file. + Spatial Operations ~~~~~~~~~~~~~~~~~~ diff --git a/doc/examples.rst b/doc/examples.rst index 61703fdbd..ce6637bd0 100644 --- a/doc/examples.rst +++ b/doc/examples.rst @@ -153,4 +153,11 @@ For example, the code below will return all data from the year 2000 for the firs Regridding ---------- -.. literalinclude:: sphinx_examples/regridding.py \ No newline at end of file +.. literalinclude:: sphinx_examples/regridding.py + +.. _2d-flexible-mesh-example-label: + +Converting an ESRI Shapefile to UGRID +------------------------------------- + +.. literalinclude:: sphinx_examples/to_ugrid_2d_flexible_mesh.py \ No newline at end of file diff --git a/doc/sphinx_examples/to_ugrid_2d_flexible_mesh.py b/doc/sphinx_examples/to_ugrid_2d_flexible_mesh.py new file mode 100644 index 000000000..fe7de70a6 --- /dev/null +++ b/doc/sphinx_examples/to_ugrid_2d_flexible_mesh.py @@ -0,0 +1,16 @@ +import tempfile + +import ocgis +from ocgis.constants import OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH + + + +# This is the input shapefile with no gaps between the polygons. Multipolygons not allowed! +SHP = '/path/to/no_gaps/shapefile.shp' +# Write the data to a temporary directory. +ocgis.env.DIR_OUTPUT = tempfile.gettempdir() + + +rd = ocgis.RequestDataset(uri=SHP) +ops = ocgis.OcgOperations(dataset=rd, output_format=OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH) +ret = ops.execute() \ No newline at end of file diff --git a/src/ocgis/api/operations.py b/src/ocgis/api/operations.py index 849d87363..7be2f9d35 100644 --- a/src/ocgis/api/operations.py +++ b/src/ocgis/api/operations.py @@ -4,10 +4,8 @@ from ocgis import env from ocgis.api.parms.base import OcgParameter from ocgis.conv.meta import MetaConverter -from ocgis.calc.base import AbstractMultivariateFunction, AbstractKeyedOutputFunction from ocgis.interface.base.crs import CFRotatedPole, WGS84 from ocgis.api.subset import SubsetOperation -from ocgis.calc.engine import OcgCalculationEngine class OcgOperations(object): @@ -352,60 +350,16 @@ def _raise_(msg, obj=OutputFormat): for rd in self.dataset.iter_request_datasets(): rd.driver.validate_ops(self) + # validate the converter + converter_klass = AbstractConverter.get_converter(self.output_format) + converter_klass.validate_ops(self) + # no regridding with a spatial operation of clip if self.regrid_destination is not None: if self.spatial_operation == 'clip': msg = 'Regridding not allowed with spatial "clip" operation.' raise DefinitionValidationError(SpatialOperation, msg) - # there are a bunch of constraints on the netCDF format - if self.output_format == 'nc': - # we can only write one requestdataset to netCDF - if len(self.dataset) > 1 and self.calc is None: - msg = ('Data packages (i.e. more than one RequestDataset) may not be written to netCDF. ' - 'There are currently {dcount} RequestDatasets. Note, this is different than a ' - 'multifile dataset.'.format(dcount=len(self.dataset))) - _raise_(msg, OutputFormat) - # we can write multivariate functions to netCDF however - else: - if self.calc is not None and len(self.dataset) > 1: - # count the occurrences of these classes in the calculation list. - klasses_to_check = [AbstractMultivariateFunction, MultivariateEvalFunction] - multivariate_checks = [] - for klass in klasses_to_check: - for calc in self.calc: - multivariate_checks.append(issubclass(calc['ref'], klass)) - if sum(multivariate_checks) != 1: - msg = ('Data packages (i.e. more than one RequestDataset) may not be written to netCDF. ' - 'There are currently {dcount} RequestDatasets. Note, this is different than a ' - 'multifile dataset.'.format(dcount=len(self.dataset))) - _raise_(msg, OutputFormat) - else: - # there is a multivariate calculation and this requires multiple request dataset - pass - - # clipped data which creates an arbitrary geometry may not be written to netCDF - if self.spatial_operation != 'intersects': - msg = 'Only "intersects" spatial operation allowed for netCDF output. Arbitrary geometries may not currently be written.' - _raise_(msg, OutputFormat) - # data may not be aggregated either - if self.aggregate: - msg = 'Data may not be aggregated for netCDF output. The aggregate parameter must be False.' - _raise_(msg, OutputFormat) - # either the input data CRS or WGS84 is required for data output - if self.output_crs is not None and not isinstance(self.output_crs, CFWGS84): - msg = 'CFWGS84 is the only acceptable overloaded output CRS at this time for netCDF output.' - _raise_(msg, OutputFormat) - # calculations on raw values are not relevant as not aggregation can occur anyway. - if self.calc is not None: - if self.calc_raw: - msg = 'Calculations must be performed on original values (i.e. calc_raw=False) for netCDF output.' - _raise_(msg) - # no keyed output functions to netCDF - if OcgCalculationEngine._check_calculation_members_(self.calc, AbstractKeyedOutputFunction): - msg = 'Keyed function output may not be written to netCDF.' - _raise_(msg) - # collect projections for the dataset sets. None is returned if one is not parsable. the WGS84 default is # actually done in the RequestDataset object. projections = [] @@ -457,7 +411,7 @@ def _raise_(msg, obj=OutputFormat): # file only operations only valid for netCDF and calculations. if self.file_only: if self.output_format != 'nc': - _raise_('Only netCDF may be written with file_only as True.', obj=FileOnly) + _raise_('Only netCDF-CF may be written with file_only as "True".', obj=FileOnly) if self.calc is None: _raise_('File only outputs are only relevant for computations.', obj=FileOnly) @@ -471,7 +425,3 @@ def _raise_(msg, obj=OutputFormat): else: for c in self.calc: c['ref'].validate(self) - - # validate the converter - converter_klass = AbstractConverter.get_converter(self.output_format) - converter_klass.validate_ops(self) diff --git a/src/ocgis/api/parms/definition.py b/src/ocgis/api/parms/definition.py index 65a84e358..7d897b458 100644 --- a/src/ocgis/api/parms/definition.py +++ b/src/ocgis/api/parms/definition.py @@ -774,7 +774,7 @@ class OutputFormat(base.StringOptionParameter): default = constants.OUTPUT_FORMAT_NUMPY valid = [constants.OUTPUT_FORMAT_CSV, constants.OUTPUT_FORMAT_CSV_SHAPEFILE, constants.OUTPUT_FORMAT_GEOJSON, constants.OUTPUT_FORMAT_METADATA, constants.OUTPUT_FORMAT_NETCDF, constants.OUTPUT_FORMAT_NUMPY, - constants.OUTPUT_FORMAT_SHAPEFILE] + constants.OUTPUT_FORMAT_SHAPEFILE, constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH] def __init__(self, init_value=None): if init_value == constants.OUTPUT_FORMAT_CSV_SHAPEFILE_OLD: diff --git a/src/ocgis/api/request/driver/vector.py b/src/ocgis/api/request/driver/vector.py index 2f4ca427a..549e0b4c1 100644 --- a/src/ocgis/api/request/driver/vector.py +++ b/src/ocgis/api/request/driver/vector.py @@ -1,6 +1,6 @@ import numpy as np -from ocgis import constants +from ocgis import constants from ocgis.interface.base.variable import Variable, VariableCollection from ocgis.interface.base.field import Field from ocgis.api.request.driver.base import AbstractDriver @@ -9,7 +9,7 @@ class DriverVector(AbstractDriver): extensions = ('.*\.shp',) key = 'vector' - output_formats = [constants.OUTPUT_FORMAT_NUMPY] + output_formats = [constants.OUTPUT_FORMAT_NUMPY, constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH] def close(self, obj): pass @@ -29,26 +29,7 @@ def get_source_metadata(self): self.close(data) def inspect(self): - from ocgis import CoordinateReferenceSystem - - meta = self.rd.source_metadata - try: - ds = self.open() - n = len(ds) - finally: - self.close(ds) - - lines = [] - lines.append('') - lines.append('URI = {0}'.format(self.rd.uri)) - lines.append('') - lines.append('Geometry Type: {0}'.format(meta['schema']['geometry'])) - lines.append('Geometry Count: {0}'.format(n)) - lines.append('CRS: {0}'.format(CoordinateReferenceSystem(value=meta['crs']).value)) - lines.append('Properties:') - for k, v in meta['schema']['properties'].iteritems(): - lines.append(' {0} {1}'.format(v, k)) - lines.append('') + lines = self._inspect_get_lines_() for line in lines: print line @@ -56,9 +37,10 @@ def open(self): from ocgis import ShpCabinetIterator return ShpCabinetIterator(path=self.rd.uri) - def _get_field_(self): + def _get_field_(self, format_time=None): #todo: option to pass select_ugid #todo: option for time dimension and time subsetting + #todo: remove format_time option - there for compatibility with the netCDF driver from ocgis import SpatialDimension ds = self.open() try: @@ -76,3 +58,31 @@ def _get_field_(self): return field finally: self.close(ds) + + def _inspect_get_lines_(self): + """ + :returns: A sequence of strings suitable for printing or writing to file. + :rtype: [str, ...] + """ + + from ocgis import CoordinateReferenceSystem + + meta = self.rd.source_metadata + try: + ds = self.open() + n = len(ds) + finally: + self.close(ds) + lines = [] + lines.append('') + lines.append('URI = {0}'.format(self.rd.uri)) + lines.append('') + lines.append('Geometry Type: {0}'.format(meta['schema']['geometry'])) + lines.append('Geometry Count: {0}'.format(n)) + lines.append('CRS: {0}'.format(CoordinateReferenceSystem(value=meta['crs']).value)) + lines.append('Properties:') + for k, v in meta['schema']['properties'].iteritems(): + lines.append(' {0} {1}'.format(v, k)) + lines.append('') + + return lines diff --git a/src/ocgis/constants.py b/src/ocgis/constants.py index 0e06223e0..88e60264b 100644 --- a/src/ocgis/constants.py +++ b/src/ocgis/constants.py @@ -87,6 +87,7 @@ class HEADERS(object): OUTPUT_FORMAT_GEOJSON = 'geojson' OUTPUT_FORMAT_METADATA = 'meta' OUTPUT_FORMAT_NETCDF = 'nc' +OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH = 'nc-ugrid-2d-flexible-mesh' OUTPUT_FORMAT_NUMPY = 'numpy' OUTPUT_FORMAT_SHAPEFILE = 'shp' diff --git a/src/ocgis/conv/base.py b/src/ocgis/conv/base.py index 4bfee6110..2f6173f24 100644 --- a/src/ocgis/conv/base.py +++ b/src/ocgis/conv/base.py @@ -7,6 +7,7 @@ from shapely.geometry.polygon import Polygon import fiona +from ocgis.api.request.driver.vector import DriverVector from ocgis import constants from ocgis.interface.base.field import Field from ocgis.conv.meta import MetaConverter @@ -212,7 +213,14 @@ def write(self): try: for d in rd: row = [rd.did, d['variable'], d['alias'], rd.uri] - ref_variable = rd.source_metadata['variables'][d['variable']]['attrs'] + try: + ref_variable = rd.source_metadata['variables'][d['variable']]['attrs'] + except KeyError: + if isinstance(rd.driver, DriverVector): + # not be present in metadata + ref_variable = {} + else: + raise row.append(ref_variable.get('standard_name', None)) row.append(ref_variable.get('units', None)) row.append(ref_variable.get('long_name', None)) @@ -233,15 +241,17 @@ def write(self): out_path = os.path.join(self.outdir, self.prefix + '_source_metadata.txt') to_write = [] - for rd in self.ops.dataset.itervalues(): + for rd in self.ops.dataset.iter_request_datasets(): try: metadata = rd.source_metadata - except AttributeError: - # assume field object and do not write anything - continue - else: ip = Inspect(meta=metadata, uri=rd.uri) to_write += ip.get_report_no_variable() + except AttributeError: + if isinstance(rd.driver, DriverVector): + to_write += rd.driver._inspect_get_lines_() + else: + raise + with open(out_path, 'w') as f: f.writelines('\n'.join(to_write)) @@ -261,7 +271,7 @@ def get_converter_map(cls): from ocgis.conv.fiona_ import ShpConverter, GeoJsonConverter from ocgis.conv.csv_ import CsvConverter, CsvShapefileConverter from ocgis.conv.numpy_ import NumpyConverter - from ocgis.conv.nc import NcConverter + from ocgis.conv.nc import NcConverter, NcUgrid2DFlexibleMeshConverter mmap = {constants.OUTPUT_FORMAT_SHAPEFILE: ShpConverter, constants.OUTPUT_FORMAT_CSV: CsvConverter, @@ -271,7 +281,8 @@ def get_converter_map(cls): # 'shpidx':ShpIdxConverter, # 'keyed':KeyedConverter, constants.OUTPUT_FORMAT_NETCDF: NcConverter, - constants.OUTPUT_FORMAT_METADATA: MetaConverter} + constants.OUTPUT_FORMAT_METADATA: MetaConverter, + constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH: NcUgrid2DFlexibleMeshConverter} return mmap diff --git a/src/ocgis/conv/nc.py b/src/ocgis/conv/nc.py index 1053d8765..1cd12b681 100644 --- a/src/ocgis/conv/nc.py +++ b/src/ocgis/conv/nc.py @@ -1,14 +1,73 @@ import netCDF4 as nc import datetime +from ocgis.api.request.driver.vector import DriverVector +from ocgis.calc.base import AbstractMultivariateFunction, AbstractKeyedOutputFunction import ocgis +from ocgis.api.parms.definition import OutputFormat +from ocgis.calc.engine import OcgCalculationEngine +from ocgis.calc.eval_function import MultivariateEvalFunction from ocgis.conv.base import AbstractConverter from ocgis import constants +from ocgis.exc import DefinitionValidationError +from ocgis.interface.base.crs import CFWGS84 class NcConverter(AbstractConverter): _ext = 'nc' + @classmethod + def validate_ops(cls, ops): + + def _raise_(msg, ocg_arugument=OutputFormat): + raise DefinitionValidationError(ocg_arugument, msg) + + # we can only write one requestdataset to netCDF + if len(ops.dataset) > 1 and ops.calc is None: + msg = ('Data packages (i.e. more than one RequestDataset) may not be written to netCDF. ' + 'There are currently {dcount} RequestDatasets. Note, this is different than a ' + 'multifile dataset.'.format(dcount=len(ops.dataset))) + _raise_(msg, OutputFormat) + # we can write multivariate functions to netCDF however + else: + if ops.calc is not None and len(ops.dataset) > 1: + # count the occurrences of these classes in the calculation list. + klasses_to_check = [AbstractMultivariateFunction, MultivariateEvalFunction] + multivariate_checks = [] + for klass in klasses_to_check: + for calc in ops.calc: + multivariate_checks.append(issubclass(calc['ref'], klass)) + if sum(multivariate_checks) != 1: + msg = ('Data packages (i.e. more than one RequestDataset) may not be written to netCDF. ' + 'There are currently {dcount} RequestDatasets. Note, this is different than a ' + 'multifile dataset.'.format(dcount=len(ops.dataset))) + _raise_(msg, OutputFormat) + else: + # there is a multivariate calculation and this requires multiple request dataset + pass + + # clipped data which creates an arbitrary geometry may not be written to netCDF + if ops.spatial_operation != 'intersects': + msg = 'Only "intersects" spatial operation allowed for netCDF output. Arbitrary geometries may not currently be written.' + _raise_(msg, OutputFormat) + # data may not be aggregated either + if ops.aggregate: + msg = 'Data may not be aggregated for netCDF output. The aggregate parameter must be False.' + _raise_(msg, OutputFormat) + # either the input data CRS or WGS84 is required for data output + if ops.output_crs is not None and not isinstance(ops.output_crs, CFWGS84): + msg = 'CFWGS84 is the only acceptable overloaded output CRS at this time for netCDF output.' + _raise_(msg, OutputFormat) + # calculations on raw values are not relevant as not aggregation can occur anyway. + if ops.calc is not None: + if ops.calc_raw: + msg = 'Calculations must be performed on original values (i.e. calc_raw=False) for netCDF output.' + _raise_(msg) + # no keyed output functions to netCDF + if OcgCalculationEngine._check_calculation_members_(ops.calc, AbstractKeyedOutputFunction): + msg = 'Keyed function output may not be written to netCDF.' + _raise_(msg) + def _finalize_(self, ds): ds.close() @@ -23,7 +82,14 @@ def _get_file_format_(self): ret = constants.NETCDF_DEFAULT_DATA_MODEL else: for rd in self.ops.dataset.iter_request_datasets(): - rr = rd.source_metadata['file_format'] + try: + rr = rd.source_metadata['file_format'] + except KeyError: + # likely a shapefile request dataset which does not have an origin netcdf data format + if isinstance(rd.driver, DriverVector): + continue + else: + raise if isinstance(rr, basestring): tu = [rr] else: @@ -38,6 +104,20 @@ def _get_file_format_(self): # likely all field objects in the dataset. use the default netcdf data model ret = constants.NETCDF_DEFAULT_DATA_MODEL return ret + + @staticmethod + def _write_archetype_(arch, dataset, is_file_only): + """ + Write a field to a netCDF dataset object. + + :param arch: The field to write. + :type arch: :class:`~ocgis.Field` + :param dataset: An open netCDF4 dataset object. + :type dataset: :class:`netCDF4.Dataset` + :param bool file_only: If ``True``, this is writing the template file only and there is no data fill. + """ + + arch.write_to_netcdf_dataset(dataset, file_only=is_file_only) def _write_coll_(self, ds, coll): """ @@ -60,7 +140,7 @@ def _write_coll_(self, ds, coll): # no operations object available is_file_only = False - arch.write_to_netcdf_dataset(ds, file_only=is_file_only) + self._write_archetype_(arch, ds, is_file_only) # append to the history attribute history_str = '\n{dt} UTC ocgis-{release}'.format(dt=datetime.datetime.utcnow(), release=ocgis.__release__) @@ -68,3 +148,42 @@ def _write_coll_(self, ds, coll): history_str += ': {0}'.format(self.ops) original_history_str = ds.__dict__.get('history', '') setattr(ds, 'history', original_history_str+history_str) + + +class NcUgrid2DFlexibleMeshConverter(NcConverter): + + @classmethod + def validate_ops(cls, ops): + NcConverter.validate_ops(ops) + should_raise = False + if ops.abstraction == 'point': + should_raise = True + else: + for ds in ops.dataset.itervalues(): + try: + abstraction = ds.s_abstraction + except AttributeError: + # might be a field object + abstraction = ds.spatial.abstraction + if abstraction == 'point': + should_raise = True + break + + if should_raise: + msg = 'Only polygons may be written to "{0}"'.\ + format(constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH, ops.abstraction) + raise DefinitionValidationError(OutputFormat, msg) + + @staticmethod + def _write_archetype_(arch, dataset, is_file_only): + poly = arch.spatial.geom.polygon + """:type poly: :class:`ocgis.SpatialGeometryPolygonDimension`""" + + try: + poly.write_to_netcdf_dataset_ugrid(dataset) + except AttributeError: + if poly is None: + msg = 'Field does not have a polygon representation.' + raise ValueError(msg) + else: + raise \ No newline at end of file diff --git a/src/ocgis/interface/base/dimension/spatial.py b/src/ocgis/interface/base/dimension/spatial.py index de531d517..f04969dcf 100644 --- a/src/ocgis/interface/base/dimension/spatial.py +++ b/src/ocgis/interface/base/dimension/spatial.py @@ -15,9 +15,9 @@ import base from ocgis.interface.base.crs import CFWGS84, CoordinateReferenceSystem, WGS84 -from ocgis.util.helpers import iter_array, get_none_or_slice, \ - get_formatted_slice, get_reduced_slice, get_trimmed_array_by_mask,\ - get_added_slice, make_poly, set_name_attributes, get_extrapolated_corners_esmf, get_ocgis_corners_from_esmf_corners +from ocgis.util.helpers import iter_array, get_formatted_slice, get_reduced_slice, get_trimmed_array_by_mask,\ + get_added_slice, make_poly, set_name_attributes, get_extrapolated_corners_esmf, get_ocgis_corners_from_esmf_corners, \ + get_none_or_2d from ocgis import constants, env from ocgis.exc import EmptySubsetError, SpatialWrappingError, MultipleElementsFound, BoundsAlreadyAvailableError from ocgis.util.ugrid.helpers import get_update_feature, write_to_netcdf_dataset @@ -396,10 +396,16 @@ def get_intersects(self, polygon, return_indices=False, use_spatial_index=True, elif type(polygon) in (Polygon, MultiPolygon): # for a polygon subset, first the grid is subsetted by the bounds of the polygon object. the intersects # operations is then performed on the polygon/point representation as appropriate. - minx, miny, maxx, maxy = polygon.bounds if self.grid is None: - raise NotImplementedError + if self.geom.polygon is not None: + target_geom = self.geom.polygon + else: + target_geom = self.geom.point + masked_geom = target_geom.get_intersects_masked(polygon, use_spatial_index=use_spatial_index) + ret_slc = np.where(masked_geom.value.mask == False) + ret = ret[ret_slc[0], ret_slc[1]] else: + minx, miny, maxx, maxy = polygon.bounds # subset the grid by its bounding box ret.grid, slc = self.grid.get_subset_bbox(minx, miny, maxx, maxy, return_indices=True, use_bounds=use_bounds) @@ -432,14 +438,23 @@ def get_intersects(self, polygon, return_indices=False, use_spatial_index=True, ref = ret.grid.corners.mask for (ii, jj), mask_value in iter_array(grid_mask, return_value=True): ref[:, ii, jj, :] = mask_value + + # barbed and circular geometries may result in rows and or columns being entirely masked. these rows and + # columns should be trimmed. + _, adjust = get_trimmed_array_by_mask(ret.get_mask(), return_adjustments=True) + # use the adjustments to trim the returned data object + ret = ret[adjust['row'], adjust['col']] + + # adjust the returned slices + if return_indices and not select_nearest: + ret_slc = [None, None] + ret_slc[0] = get_added_slice(slc[0], adjust['row']) + ret_slc[1] = get_added_slice(slc[1], adjust['col']) + else: raise NotImplementedError + assert not self.uid.mask.any() - # barbed and circular geometries may result in rows and or columns being entirely masked. these rows and columns - # should be trimmed. - _, adjust = get_trimmed_array_by_mask(ret.get_mask(), return_adjustments=True) - # use the adjustments to trim the returned data object - ret = ret[adjust['row'], adjust['col']] if select_nearest: if self.geom.polygon is not None and self.abstraction in ['polygon', None]: @@ -452,15 +467,9 @@ def get_intersects(self, polygon, return_indices=False, use_spatial_index=True, distances[centroid.distance(geom)] = select_nearest_index select_nearest_index = distances[min(distances.keys())] ret = ret[select_nearest_index[0], select_nearest_index[1]] + ret_slc = np.where(self.uid.data == ret.uid.data) if return_indices: - # adjust the returned slices if necessary - if select_nearest: - ret_slc = select_nearest_index - else: - ret_slc = [None, None] - ret_slc[0] = get_added_slice(slc[0], adjust['row']) - ret_slc[1] = get_added_slice(slc[1], adjust['col']) ret = (ret, tuple(ret_slc)) return ret @@ -1052,11 +1061,6 @@ def get_highest_order_abstraction(self): def get_iter(self): raise NotImplementedError - def _get_slice_(self, state, slc): - state._point = get_none_or_slice(state._point, slc) - state._polygon = get_none_or_slice(state._polygon, slc) - return state - def _get_uid_(self): if self._point is not None: ret = self._point.uid @@ -1220,6 +1224,10 @@ def _format_private_value_(self, value): ret = self._get_none_or_array_(ret, masked=True) return ret + def _format_slice_state_(self, state, slc): + state._value = get_none_or_2d(state._value) + return state + def _get_geometry_fill_(self, shape=None): if shape is None: shape = (self.grid.shape[0], self.grid.shape[1]) @@ -1274,15 +1282,15 @@ def __init__(self, *args, **kwargs): @property def area(self): r_value = self.value - fill = np.ones(r_value.shape,dtype=constants.NP_FLOAT) - fill = np.ma.array(fill,mask=r_value.mask) - for (ii,jj),geom in iter_array(r_value,return_value=True): - fill[ii,jj] = geom.area - return(fill) + fill = np.ones(r_value.shape, dtype=constants.NP_FLOAT) + fill = np.ma.array(fill, mask=r_value.mask) + for (ii, jj), geom in iter_array(r_value, return_value=True): + fill[ii, jj] = geom.area + return fill @property def weights(self): - return(self.area/self.area.max()) + return self.area/self.area.max() def write_to_netcdf_dataset_ugrid(self, dataset): """ @@ -1293,7 +1301,7 @@ def write_to_netcdf_dataset_ugrid(self, dataset): """ def _iter_features_(): - for ctr, geom in enumerate(self.value.data.flat): + for ctr, geom in enumerate(self.value.compressed()): yld = {'geometry': {'type': geom.geom_type, 'coordinates': [np.array(geom.exterior.coords).tolist()]}} yld = get_update_feature(ctr, yld) yield yld diff --git a/src/ocgis/test/test_ocgis/test_api/test_operations.py b/src/ocgis/test/test_ocgis/test_api/test_operations.py index 615742dd7..4132b5a61 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_operations.py +++ b/src/ocgis/test/test_ocgis/test_api/test_operations.py @@ -372,13 +372,20 @@ def test_keyword_output_format_esmpy(self): def test_keyword_output_format_nc_package_validation_raised_first(self): rd = self.test_data.get_rd('cancm4_tas') - rd2 = self.test_data.get_rd('rotated_pole_ichec',kwds={'alias':'tas2'}) + rd2 = self.test_data.get_rd('rotated_pole_ichec', kwds={'alias': 'tas2'}) try: - ocgis.OcgOperations(dataset=[rd,rd2],output_format='nc') + ocgis.OcgOperations(dataset=[rd, rd2], output_format=constants.OUTPUT_FORMAT_NETCDF) except DefinitionValidationError as e: self.assertIn('Data packages (i.e. more than one RequestDataset) may not be written to netCDF.', e.message) - pass + + def test_keyword_output_format_nc_2d_flexible_mesh_ugrid(self): + rd = self.test_data.get_rd('cancm4_tas') + output = constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH + ops = OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[25], output_format=output) + ret = ops.execute() + with self.nc_scope(ret) as ds: + self.assertEqual(len(ds.dimensions['nMesh2_face']), 13) def test_keyword_regrid_destination(self): """Test regridding not allowed with clip operation.""" diff --git a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py index 3f76793b6..7c37d3f97 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py +++ b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py @@ -633,7 +633,7 @@ def test_init_esmpy(self): self.assertEqual(oo.value, constants.OUTPUT_FORMAT_ESMPY_GRID) def test_valid(self): - self.assertAsSetEqual(OutputFormat.valid, ['csv', 'csv-shp', 'geojson', 'meta', 'nc', 'numpy', 'shp']) + self.assertAsSetEqual(OutputFormat.valid, ['csv', 'csv-shp', 'geojson', 'meta', 'nc', 'numpy', 'shp', constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH]) class TestRegridDestination(TestBase): diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py index 0d0ed4794..24c0f1bd9 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py @@ -64,12 +64,17 @@ def test_get_source_metadata(self): self.assertIsInstance(meta, dict) self.assertTrue(len(meta) > 2) + def test_inspect(self): + driver = self.get_driver() + driver.inspect() + + def test_inspect_get_lines(self): + driver = self.get_driver() + lines = driver._inspect_get_lines_() + self.assertTrue(len(lines) > 5) + def test_open(self): driver = self.get_driver() sci = driver.open() self.assertIsInstance(sci, ShpCabinetIterator) self.assertFalse(sci.as_spatial_dimension) - - def test_inspect(self): - driver = self.get_driver() - driver.inspect() diff --git a/src/ocgis/test/test_ocgis/test_api/test_subset.py b/src/ocgis/test/test_ocgis/test_api/test_subset.py index 13314a7ea..5c32c7bec 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_subset.py +++ b/src/ocgis/test/test_ocgis/test_api/test_subset.py @@ -90,7 +90,10 @@ def test_dataset_as_field(self): ops = OcgOperations(dataset=field, output_format=k.output_format, prefix=str(ii)) try: ret = ops.execute() - except ValueError as ve: + except ValueError: + if k.output_format == constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH: + self.assertIsNone(field.spatial.geom.polygon) + continue self.assertIsNone(k.crs) self.assertIn(k.output_format, [constants.OUTPUT_FORMAT_CSV, constants.OUTPUT_FORMAT_CSV_SHAPEFILE, constants.OUTPUT_FORMAT_GEOJSON, constants.OUTPUT_FORMAT_SHAPEFILE]) diff --git a/src/ocgis/test/test_ocgis/test_conv/test_base.py b/src/ocgis/test/test_ocgis/test_conv/test_base.py index f0b7ce6ca..f8f78953f 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_base.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_base.py @@ -5,12 +5,14 @@ import tempfile import numpy as np +from ocgis import constants +from ocgis.conv.esmpy import AbstractConverter from ocgis.test.base import TestBase, nc_scope from ocgis.api.collection import SpatialCollection from ocgis.conv.csv_ import CsvConverter, CsvShapefileConverter import ocgis from ocgis.conv.fiona_ import ShpConverter, GeoJsonConverter -from ocgis.conv.nc import NcConverter +from ocgis.conv.nc import NcConverter, NcUgrid2DFlexibleMeshConverter class AbstractTestConverter(TestBase): @@ -71,6 +73,10 @@ def run_overwrite_true_tst(self,Converter,include_ops=False): ## if the file is overwritten the modification time will be more recent! self.assertTrue(all([m2 > m for m2,m in zip(mtimes2,mtimes)])) + def test_get_converter_map(self): + self.assertEqual(AbstractConverter.get_converter_map()[constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH], + NcUgrid2DFlexibleMeshConverter) + def test_multiple_variables(self): conv_klasses = [CsvConverter, NcConverter] rd = self.test_data.get_rd('cancm4_tas') @@ -131,4 +137,3 @@ def test_add_auxiliary_files_csv_shp(self): def test_add_auxiliary_files_shp(self): self.run_auxiliary_file_tst(ShpConverter,['ocgis_output.dbf', 'ocgis_output.shx', 'ocgis_output.shp', 'ocgis_output.cpg', 'ocgis_output.prj']) - \ No newline at end of file diff --git a/src/ocgis/test/test_ocgis/test_conv/test_nc.py b/src/ocgis/test/test_ocgis/test_conv/test_nc.py index a8a20bd9b..a0da735da 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_nc.py @@ -1,9 +1,14 @@ +from copy import deepcopy import numpy as np +import os +from ocgis.util.shp_cabinet import ShpCabinet +from ocgis.api.request.base import RequestDataset +from ocgis.exc import DefinitionValidationError from ocgis.test.base import nc_scope from ocgis.util.itester import itr_products_keywords from ocgis.api.operations import OcgOperations -from ocgis.conv.nc import NcConverter +from ocgis.conv.nc import NcConverter, NcUgrid2DFlexibleMeshConverter from ocgis.test.test_ocgis.test_conv.test_base import AbstractTestConverter import ocgis from ocgis import constants @@ -57,6 +62,15 @@ def test_get_file_format(self): with nc_scope(rd.uri) as ds: self.assertEqual(file_format, ds.file_format) + # use a shapefile as the input format + path = ShpCabinet().get_shp_path('state_boundaries') + rd = RequestDataset(uri=path) + of = constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH + ops = OcgOperations(dataset=rd, output_format=of) + conv = NcConverter([coll], self.current_dir_output, 'foo', ops=ops) + file_format = conv._get_file_format_() + self.assertEqual(file_format, constants.NETCDF_DEFAULT_DATA_MODEL) + def test_write_coll(self): # use a field as the input dataset field = self.get_field() @@ -89,3 +103,66 @@ def test_write_coll(self): self.assertIn('ocgis', ds.history) if k.with_ops: self.assertIn('OcgOperations', ds.history) + + +class TestNcUgrid2DFlexibleMeshConverter(AbstractTestConverter): + + def test_init(self): + self.assertEqual(NcUgrid2DFlexibleMeshConverter.__bases__, (NcConverter,)) + + def test_validate_ops(self): + rd1 = self.test_data.get_rd('cancm4_tas') + rd2 = deepcopy(rd1) + rd2.alias = 'tas2' + output_format = constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=[rd1, rd2], output_format=output_format) + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=rd1, output_format=output_format, file_only=True) + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=rd1, output_format=output_format, abstraction='point') + + uri = self.test_data.get_uri('cancm4_tas') + rd = RequestDataset(uri=uri, s_abstraction='point') + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=rd, output_format=output_format) + + rd = RequestDataset(uri=uri, s_abstraction='point').get() + with self.assertRaises(DefinitionValidationError): + OcgOperations(dataset=rd, output_format=output_format) + + def test_write_archetype(self): + rd = self.test_data.get_rd('cancm4_tas') + coll = OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[25]).execute() + field = coll[25]['tas'] + path = os.path.join(self.current_dir_output, 'foo.nc') + with self.nc_scope(path, 'w') as ds: + NcUgrid2DFlexibleMeshConverter._write_archetype_(field, ds, None) + with self.nc_scope(path) as ds: + self.assertEqual(len(ds.dimensions['nMesh2_face']), 13) + + # test with the polygons as none + field.spatial.geom._polygon = None + field.spatial.geom.grid = None + with self.assertRaises(ValueError): + NcUgrid2DFlexibleMeshConverter._write_archetype_(field, None, None) + + def test_write_archetype_from_shapefile(self): + """Test writing from a shapefile.""" + + uri = ShpCabinet().get_shp_path('state_boundaries') + rd = RequestDataset(uri) + field = rd.get() + sub = field[0, 0, 0, 0, [15, 33]] + coll = sub.as_spatial_collection() + conv = NcUgrid2DFlexibleMeshConverter([coll], outdir=self.current_dir_output, prefix='foo') + ret = conv.write() + with self.nc_scope(ret) as ds: + self.assertEqual(len(ds.dimensions['nMesh2_face']), 2) + + of = constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH + for a in [True, False]: + ops = OcgOperations(dataset=rd, slice=[0, 0, 0, 0, 15], output_format=of, add_auxiliary_files=a) + ret = ops.execute() + with self.nc_scope(ret) as ds: + self.assertEqual(len(ds.dimensions['nMesh2_face']), 1) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py index 32c26bd0b..200f2f607 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py @@ -4,13 +4,13 @@ import itertools import numpy as np -from shapely import wkt +from shapely import wkt, wkb import fiona from fiona.crs import from_epsg from shapely.geometry import shape, mapping, Polygon, MultiPoint from shapely.geometry.point import Point -from ocgis import constants, ShpCabinet +from ocgis import constants, ShpCabinet, RequestDataset from ocgis.interface.base.dimension.spatial import SpatialDimension, SpatialGeometryDimension, \ SpatialGeometryPolygonDimension, SpatialGridDimension, SpatialGeometryPointDimension, SingleElementRetriever from ocgis.util.helpers import iter_array, make_poly @@ -541,6 +541,71 @@ def test_get_geom_iter(self): with self.assertRaises(ValueError): list(sdim.get_geom_iter(target='polygon')) + def test_get_intersects_no_grid(self): + """ + Test an intersects operation without a grid. + """ + + #todo: test field get_intersects + #todo: test with point only + #todo: test with point and polygon + + poly = 'POLYGON((-94.32738470163435807 42.05244651522352228,-93.32425350681413079 37.7891389372375528,-90.87912121943982413 37.03679054112238589,-88.6220760310943092 35.97096364662588996,-84.04528995472702491 36.9113991417698486,-80.97320067059007442 38.03992173594261317,-81.03589637026632886 38.60418303302898835,-83.98259425505075626 37.91453033659008298,-86.55311794177760021 37.47566043885623088,-86.74120504080639193 38.79227013205778007,-87.36816203756902155 37.28757333982743916,-90.18946852300092587 37.03679054112238589,-93.13616640778533906 41.67627231716593883,-94.64086320001567287 44.93644870033168104,-98.15182238188647545 45.50070999741805622,-95.83208149386470609 44.4348831029215674,-94.95434169839700189 42.24053361425231401,-94.95434169839700189 42.24053361425231401,-94.32738470163435807 42.05244651522352228))' + poly = wkt.loads(poly) + path = ShpCabinet().get_shp_path('state_boundaries') + field = RequestDataset(path).get() + sdim = field.spatial + """:type sdim: :class:`ocgis.SpatialDimension`""" + actual_ret_slc = [[0, 0, 0, 0, 0, 0, 0, 0, 0], [5, 10, 13, 21, 26, 29, 32, 35, 39]] + actual_ret_slc = [np.array(xx) for xx in actual_ret_slc] + + keywords = dict(ctr=range(2), return_indices=[True, False], select_nearest=[False, True]) + + for k in self.iter_product_keywords(keywords): + ret = sdim.get_intersects(poly, return_indices=k.return_indices, select_nearest=k.select_nearest) + + if k.return_indices: + ret, ret_slc = ret + if k.select_nearest: + self.assertEqual(ret_slc, (np.array([0]), np.array([26]))) + else: + for idx in range(2): + self.assertNumpyAll(ret_slc[idx], actual_ret_slc[idx]) + + if k.select_nearest: + actual_shape = (1, 1) + else: + actual_shape = (1, 9) + + for element in [ret, ret.geom, ret.geom.polygon, ret.geom.polygon.value, ret.uid, ret.geom.uid, + ret.geom.polygon.uid]: + self.assertEqual(element.shape, actual_shape) + ret.assert_uniform_mask() + + if k.select_nearest: + actual = 'POLYGON((-88.07159127841086388 37.51103836239961709,-88.08791027842606525 37.47632136236728684,-88.31174227863449744 37.44290336233616756,-88.35921427867872069 37.40936136230492082,-88.41989327873523052 37.42034336231515113,-88.46768627877973756 37.40080836229695649,-88.5113652788204206 37.29690536220019936,-88.50146927881120007 37.25783636216380756,-88.45073927876396169 37.20572436211528355,-88.42255527873770404 37.15696536206986167,-88.45051127876374153 37.09872736201562304,-88.47684127878827098 37.07220036199092306,-88.4907432788012045 37.06823736198722941,-88.51731727882597056 37.06482636198405345,-88.55931927886507538 37.07287136199154531,-88.61426827891625635 37.10910236202528267,-88.68842127898531658 37.1354653620498425,-88.73916627903257393 37.14123636205520995,-88.74656027903945699 37.15216136206538522,-88.8633502791482357 37.20224736211203265,-88.93256727921269089 37.21845936212713468,-88.99324027926920166 37.22008836212864935,-89.06510427933612561 37.18591136209681736,-89.11689327938435667 37.11218836202816362,-89.14641927941185884 37.09323736201051247,-89.16962127943347127 37.06428736198354557,-89.17440427943792258 37.02576236194767034,-89.15031627941549175 36.99849136192227661,-89.12993027939650403 36.98816536191265669,-89.19358427945577716 36.9868223619114076,-89.2101262794711829 37.02902336195070632,-89.23775327949691416 37.0417833619625867,-89.26413027952148127 37.08717436200486617,-89.28431127954027602 37.09129436200870344,-89.30336927955802651 37.08543436200324095,-89.30977727956400258 37.06095936198045138,-89.26431927952165779 37.02778336194955244,-89.26207627951957591 37.00873636193180971,-89.28284327953890909 36.99925736192298587,-89.31105827956518795 37.00973236193274118,-89.38302827963221375 37.04926336196955816,-89.38007027962946438 37.09913336201600487,-89.42388027967025721 37.13725336205150995,-89.44060627968583788 37.16536736207768854,-89.46830427971163147 37.22431636213258344,-89.46539827970893555 37.25378136216002645,-89.48968327973155112 37.25605136216213964,-89.51397627975417493 37.27645236218114633,-89.5139772797541724 37.30501236220774786,-89.50067227974177797 37.32949136223054154,-89.46883327971214328 37.33945936223982187,-89.43582827968138815 37.3557663622550109,-89.4276652796737892 37.41106836230652277,-89.45371427969804756 37.45323536234579365,-89.49487727973638584 37.49177536238168074,-89.52506827976449699 37.57200636245640624,-89.51346327975369377 37.61597836249735138,-89.51927727975912319 37.6504253625294325,-89.51347127975370199 37.67989036255687552,-89.52161927976131039 37.69484836257080929,-89.58153527981708919 37.7061553625813346,-89.66656127989627123 37.74550536261798328,-89.67596027990502705 37.78402136265385991,-89.69115827991917911 37.80484636267324561,-89.72855127995400437 37.84104336270696223,-89.85182228006881644 37.90511536276663662,-89.86115328007750236 37.90553936276702984,-89.8669212800828916 37.89192836275434928,-89.90065928011429719 37.87595636273947264,-89.93798328014906929 37.87809736274147099,-89.97902328018727758 37.91193736277298854,-89.95833828016802158 37.96368636282117848,-90.01092228021698816 37.96937136282647884,-90.04203528024596892 37.99325836284872082,-90.11945328031808344 38.03232536288510346,-90.134827280332388 38.05400436290529598,-90.20764428040020277 38.08895936293784956,-90.25417728044354249 38.12222336296882474,-90.28975328047667404 38.16687036301041047,-90.3368352805205177 38.18876736303080577,-90.36488928054664882 38.23435336307325372,-90.3694662805509239 38.32361336315639022,-90.35880728054098654 38.36538336319529208,-90.33972528052321138 38.3909003632190533,-90.30195828048803719 38.4274103632530597,-90.26589928045446243 38.51874136333811549,-90.26134428045021707 38.5328203633512274,-90.24105728043132046 38.56285736337920156,-90.18381928037801742 38.61032236342340696,-90.1836892803778909 38.65882236346857326,-90.20235028039526526 38.70041336350730887,-90.19668128038999555 38.72401536352928986,-90.16350728035909867 38.77314736357504898,-90.13528528033280907 38.78553336358658044,-90.12183428032028587 38.80055936360058411,-90.11322828031227061 38.83051536362847855,-90.13292028033060888 38.853079363649492,-90.24403828043409703 38.91455736370674856,-90.27904328046669491 38.92476536371625429,-90.3198532805047023 38.92495636371643286,-90.41318628059163132 38.96237836375128438,-90.46995728064449338 38.95922736374835438,-90.53054528070092033 38.8916593636854202,-90.57044828073809128 38.8713773636665394,-90.62733528079107259 38.88084536367534838,-90.66899928082986548 38.93530336372607792,-90.70619328086451105 39.03784136382157044,-90.70771228086591975 39.05822736384055105,-90.69052228084991896 39.09374936387363419,-90.71686028087444242 39.14425936392067484,-90.71831728087580871 39.19592136396879312,-90.73246228088899556 39.22479436399568442,-90.73820728089432919 39.2478583640171621,-90.77946928093275858 39.29685036406279153,-90.85062428099902831 39.35049936411274984,-90.94802428108972947 39.40063236415944203,-91.03647528117211607 39.44445836420025842,-91.06452128119823897 39.47403036422780076,-91.0937502812254678 39.52897336427896846,-91.1563292812837318 39.55263936430100813,-91.20338928132755996 39.60006736434517904,-91.31781228143412932 39.68596236442517977,-91.36723728148017187 39.72468536446123721,-91.37356928148605562 39.76131836449535228,-91.38186328149379278 39.80381736453493602,-91.44934028155661565 39.86309436459013966,-91.45114128155829292 39.88528836461081539,-91.43420328154252275 39.90187436462625925,-91.43054028153910906 39.92188236464490103,-91.44739528155480457 39.94611036466746157,-91.48744328159210681 40.00579836472304862,-91.50416028160768178 40.06675736477981786,-91.51628428161896522 40.134589364842995,-91.50670128161003447 40.20050436490437562,-91.49908728160295368 40.25142236495180015,-91.48684928159154595 40.309668365006047,-91.44874728155606647 40.37194636506404777,-91.41896828152833621 40.3869193650779934,-91.38590928149754689 40.39240536508310697,-91.37290828148543653 40.40303236509299722,-91.3855512814972144 40.44729436513422627,-91.37494628148733966 40.50369736518675268,-91.3822552814941389 40.52853836520988295,-91.41302628152280363 40.54803436522804105,-91.41127128152116654 40.5730123652513015,-91.37576228148809321 40.60348036527968674,-91.26221128138234917 40.63958736531330374,-91.21506028133842392 40.64385936531728305,-91.16264428128961583 40.65635236532892094,-91.12930328125855794 40.68218936535298269,-91.12013228125002229 40.70544336537464147,-91.0928952812246564 40.76158736542693362,-91.08905028122107694 40.83376736549415398,-91.04935328118409643 40.87962336553685816,-90.98341928112269272 40.92396536557815523,-90.96085128110168228 40.95054136560290203,-90.95479428109604214 41.07039736571452693,-90.95793028109895317 41.10439336574619063,-90.99048528112928125 41.14440436578345839,-91.01840228115527509 41.1658573658034328,-91.05646628119072261 41.17629036581315205,-91.10167228123282257 41.23155236586461569,-91.10249628123359855 41.26784836589841632,-91.07342928120652914 41.33492536596089195,-91.05593528119023006 41.40140736602280924,-91.02763728116387654 41.42353636604342171,-91.00084228113892948 41.43111236605047054,-90.94980028109138459 41.42126336604130188,-90.84428428099313635 41.44465236606308167,-90.7800422809332872 41.44985236606792967,-90.70835428086652996 41.45009336606814543,-90.65892928082048741 41.46235036607956204,-90.60083828076639634 41.50961836612358979,-90.54097528071064005 41.5260033661388519,-90.4551262806306795 41.52757936614031564,-90.43509828061203848 41.54361236615524433,-90.42313528060088856 41.5673053661773082,-90.34849428053138354 41.58688236619555312,-90.33947628052297318 41.60283136621039546,-90.34126228052464569 41.64912236625350772,-90.32615728051057147 41.72276836632210006,-90.30501628049090357 41.75649736635351417,-90.25543828044470729 41.78176936637704841,-90.19596528038931638 41.80616736639976949,-90.15464528035083447 41.93080236651584869,-90.14279628033980885 41.98398936656538183,-90.15066328034713194 42.03345336661144671,-90.16822628036348419 42.06106636663716358,-90.16677628036214287 42.10376736667694075,-90.17621428037092812 42.12052436669253552,-90.19170228038535697 42.12271036669457658,-90.2310632804220063 42.15974136672906525,-90.32373028050831465 42.19733736676407432,-90.36785828054941305 42.21022636677608375,-90.40730128058613957 42.24266136680628847,-90.41811228059621897 42.26393936682610786,-90.42780928060524559 42.34064536689754021,-90.4417252806182006 42.3600833669156458,-90.49117128066426119 42.38879136694238525,-90.56371128073180898 42.42184336697317093,-90.60595528077115546 42.46056436700922632,-90.64847328081076228 42.47564736702327082,-90.6518992808139501 42.49470036704101972,-90.63845628080142092 42.50936336705467511,-90.42010328059807023 42.50836536705374158,-89.92369128013574198 42.50411536704978488,-89.83473928005290077 42.50346836704918019,-89.40061327964859572 42.49750236704362294,-89.35955927961036593 42.49791736704401046,-88.93918727921885647 42.49087936703745783,-88.76505827905668866 42.49092236703749847,-88.7066232790022724 42.48967136703633685,-88.29798927862169933 42.4919883670384877,-88.19479027852558772 42.48963136703629573,-87.79738227815546736 42.48915236703584952,-87.83701527819238208 42.31423536687294984,-87.76030227812093187 42.15648236672602422,-87.67060627803739692 42.05985236663603644,-87.61267627798345359 41.8473653664381402,-87.52990627790636324 41.72362636632289679,-87.53268627790895096 41.46975036608645837,-87.53248427790876462 41.30133836592960961,-87.53176527790809303 41.17379036581082374,-87.53205227790836318 41.00996336565825118,-87.53269627790895413 40.74544836541190307,-87.53719127791315202 40.49464636517832616,-87.53569527791175631 40.4832823651677387,-87.53535727791144438 40.16623136487245915,-87.53579327791184994 39.88733936461272123,-87.53559727791166267 39.60937636435384945,-87.5385892779144541 39.47748336423101279,-87.54023727791597764 39.35056236411281105,-87.59768827796949608 39.33830536410139445,-87.62526227799517642 39.30744136407265188,-87.61064327798155205 39.29769936406357544,-87.61582427798637696 39.2814563640484522,-87.60692027797809089 39.25820236402679342,-87.58458827795729462 39.24879136401803237,-87.5886172779610348 39.20850536398051389,-87.59423227796627032 39.19816736397088164,-87.60795027797904311 39.196107363968963,-87.64428227801288074 39.16854636394329248,-87.67035227803715713 39.14671936392296914,-87.6594802780270328 39.13069336390803699,-87.66228727802965182 39.11350936389203525,-87.63169327800116548 39.10398336388316665,-87.6308922780004167 39.0890153638692226,-87.61203127798285095 39.08464736386515881,-87.58534327795798902 39.06247736384450775,-87.5817722779546699 38.99578536378240301,-87.59188127796409162 38.9941263637808504,-87.54792727792315077 38.97712036376501032,-87.53349227790970133 38.96374636375256273,-87.53020427790663405 38.93196336372295718,-87.5392222779150444 38.90490536369775754,-87.55908127793352946 38.86985636366512153,-87.55052927792556261 38.85793636365401937,-87.50790927788587226 38.79560536359597478,-87.51904827789624619 38.77674536357839941,-87.50802327788598234 38.76976836357189882,-87.50833627788627211 38.73668036354108324,-87.54391327791940114 38.68602136349390719,-87.58850127796092977 38.67221536348105104,-87.62521527799512455 38.64285836345371195,-87.62867127799835032 38.62296436343518025,-87.6198512779901364 38.59925636341310451,-87.64061927800946705 38.593225363407484,-87.65288027802088777 38.573919363389507,-87.67296927803960216 38.54747136336487756,-87.65141527801952748 38.51541736333501831,-87.65355927802151825 38.50049036332111996,-87.67993527804608789 38.50405336332443795,-87.69284427805810367 38.4815803633035074,-87.75612427811704208 38.46617236328916078,-87.75868827811943618 38.45714336328074978,-87.73898027810108147 38.4455273632699317,-87.74845627810989868 38.41801136324430388,-87.78404827814304667 38.37817036320720376,-87.83453327819006518 38.35257036318336077,-87.85011227820459112 38.28614436312149394,-87.86303627821661166 38.28540836312080842,-87.87406927822688374 38.31683436315007896,-87.88347627823564778 38.31559836314892209,-87.88849627824032495 38.30070536313505158,-87.91413927826420149 38.28109436311679303,-87.91368127826378043 38.30239236313662587,-87.92595027827520937 38.30481836313888522,-87.98005027832560643 38.24113136307957461,-87.98604027833117414 38.23486036307374292,-87.97796027832364985 38.20076036304197231,-87.93231927828114181 38.17117736301442221,-87.93202127828085679 38.1575733630017524,-87.95059927829815649 38.13695936298255162,-87.97353427831951933 38.13180536297775802,-88.01857927836147155 38.10334836295125172,-88.01236127835568368 38.09239236294104813,-87.96489727831148286 38.09679436294514687,-87.9753262783211909 38.07335236292331615,-88.03476127837654985 38.05413036290541129,-88.0431232783843285 38.04516636289706355,-88.04150627838282617 38.03834936289071322,-88.02172927836440408 38.03357736288628388,-88.02924427837142218 38.00828136286271786,-88.0217372783644123 37.97510136283180771,-88.04254327838378913 37.95631036281430681,-88.0418032783830995 37.93454336279403805,-88.0646542784043902 37.92982836278964953,-88.07897527841772956 37.94404536280288909,-88.0840332784224529 37.92370536278394866,-88.03047327837255409 37.91763636277829619,-88.02662027836896641 37.90580336276727991,-88.0449002783859811 37.89604936275819114,-88.10011627843741167 37.90621536276765369,-88.10149027843868907 37.89535136275753757,-88.07577027841473694 37.86785436273193284,-88.03427227837609337 37.84379136270952415,-88.0421692783834402 37.82756736269440978,-88.08929727842733826 37.83129436269788215,-88.0860622784243219 37.81765736268518197,-88.03560727837732713 37.80572836267407411,-88.07250427841171359 37.73544636260861296,-88.13367027846865653 37.70079036257634186,-88.15940427849263017 37.66073336253903392,-88.15766427849101206 37.62852636250903515,-88.13420227846916077 37.58362036246721516,-88.07159127841086388 37.51103836239961709))' + self.assertTrue(ret.geom.polygon.value[0, 0].almost_equals(wkt.loads(actual))) + else: + actual = ['\x01\x03\x00\x00\x00\x01\x00\x00\x00\x91\x00\x00\x00\xa1\xd4c\xe3t\xb2Y\xc0i\x85\xca\x19f\x7fE@w\x17\xc3B`\xc0Y\xc0\x8c\xc9\n\xd8\xea\x7fE@\xcef\xb0\xfd\x17\xe0Y\xc0\'\xd2\x06\xba\xd2\x7fE@e\xb1\xeb\xc4\x98\x03Z\xc0W)\x00Yd\x80E@\xe6\x02\xac;\xc9\x03Z\xc0`bdFT\xbdE@?\xec&\xde\xb4\x03Z\xc0\xb3\x88\x99\xa5y\xc0E@\x03\xe1<\x82\xce\x03Z\xc0K6/\t,\xedE@\xa0\xd13\xa3\xd2\x03Z\xc0\x88|\xdag\xaa\x12F@\x86\xe6\xc4\x04\xe8\x03Z\xc0\xed\xe2\xbd\rF\x17F@P~\x84G\xce\x03Z\xc0\xea\x91\xe0`\x84IF@S)\xc5t\xd4\x03Z\xc0\x16\xc8\xd8\xb7\xa8\x7fF@\x95\xc1J\xb2\xc1\x02Z\xc0\n\x9d\x1a\x16\xb8\x7fF@J\xa3\xa7u\xce\x02Z\xc0\x0c8\x07\x80?\x9bF@|\xd1\x94J+\x03Z\xc02S-\xdc\x07\xf1F@0\x90\xdcG!\x03Z\xc0&\xca\xba\x01\xb4\xf8F@\x17qP/\xab\xbfY\xc0\xb4~\xecwv\xf8F@\x06\xd3\xf2\xc5\x91\xbcY\xc0\xa0\x80\xa8}\x88\xf8F@%ojx-\x80Y\xc0\xb8\xc4\x16\x04\xa4\xf8F@\xc9\x19\\\x08\xec Y\xc0\xa2\xe1g\xa5^\xf8F@\x00\x98Z\xd5\x0c\xf8X\xc0T\xbb\x04)\xc6\xf8F@\x0cS7\xfc\xe8\xedX\xc04\x95\x92g\xac\xf8F@\x90\xc2\xe3\xf4o\xc0X\xc0=\x00\xb2YC\xf8F@\x1cL\n|\xbf\xaeX\xc0\x8c\xfe\xb8F\x19\xf8F@\xab_9\xff\xf0\x80X\xc0\x9e;\xaeV;\xf7F@CQ\xceb\xa3~X\xc0\x16I\xfb/%\xf7F@\x7f\x0fE\x8e\xeeNX\xc0\xe0jFO\xdf\xf7F@e\n8pH$X\xc0\xd4\x86\xb9\xed\x90\xf7F@p\x965\x0f\xa1%X\xc0\xf2O\xb6s\xaf\xe8F@\xdb\xaeW\xef\xb1&X\xc0\xd9\x1b\xf64u\xe7F@\xc7\xf2\xe6\xb2\x12*X\xc0\xd0\x10\xc2\x94\x96\xdeF@2\xc3\xcb\x88L5X\xc0$\xfeF\xb9I\xd3F@\x97\xd3-)\xb86X\xc0\x9a\x14\xc9\xb8\xf7\xcdF@1\xe2`$\xf55X\xc0\x8bZ&y\xc3\xcaF@\xa2\xc8\xc6T;1X\xc0\xbcu1\xbb<\xc2F@Ku\xc5\xeb;/X\xc0\xa8\xd2\xc6\xedjE@w\xf7\x9d\xbdt&X\xc0\xaes\xb7\xedWfE@\x1fB\xbf\xbf\x82(X\xc0\x12y`WocE@\xde\xd2\x85a\x01)X\xc0\xee\x102<\xd2_E@\xc1\xe7m<\x19(X\xc0%\xa3\x1c[\xabZE@\xa5F\xee\xd5\x08$X\xc0\xa2\x89\x94\xd8\x91UE@\x8d\x98\xb9s\xa2"X\xc0Hg\xf6\xb2\xc9TE@\xbe^\xd7p\xd2 X\xc0\x1e;\xe9\xd2\x9bPE@\r\xe6P\x8eC\x1fX\xc0\x8f`@.MJE@?+E@jp\x95/\xa9\x1fX\xc0\xa8k\'\xd6\x85>E@\x1d\xa3C\x93\x05#X\xc0\x08\x91\'\xb9\x9fBE@\x96C\x0c\x99l%X\xc0\x94\xbb\x8d=WBE@Sm\xdc\xf9\xbf&X\xc0\xa5\xea\x0b\x1f\xed@E@.\xe4\x8a[F(X\xc0\xd4\xb1\xc2\x9d\xe7BE@\x80\x13\x1e=\xbf(X\xc0\xf9E\x8d]~FE@\xfd\xaf\xb8%\xb3-X\xc0e\x16\x89\xec_NE@UC\x9b\t\xc7-X\xc0\xb5\x8b\x1d\xa9\x9aOE@(\x8c\xfbCt,X\xc0\xcb\xb5&\xa4\x11RE@\xe5\r[g\xbd,X\xc0\xbe\x15d\x040TE@\x00\xa4\x8a\x08@.X\xc0\xf8\xbeGo\x94UE@\x8a;\xf0t(3X\xc0\x1e\xea\xd21\xc3UE@\x1cLZ4\xde3X\xc0\xc2\xcc\xa416WE@\xac\xf9\xa4V\xd93X\xc0\xa8s\xc1o\x1fZE@\x8c\x8e\xb3\x82 :X\xc0Pr*S\xa8]E@\xee\x8cG&!>X\xc0\xc4\xf8\x92\x8eN\\E@l\x8a\x0fi\x95>X\xc0H\xbd\xc9p\x18]E@cX\xaa\x88\x14>X\xc0bv\x8e\xadC`E@or\n\xa8\xb1>X\xc0\xc1N\x94i\x10aE@\xc4[\xc5\n\xf8@X\xc0\xa6\xb6-\xaf8aE@$\t\xda\x9bYHX\xc0\x8a\x15\xc0\xeb\x0fcE@\xb4\xbe$\xbeTJX\xc0\xf6M\xd0(9fE@\xbff\xd0\xa4\x8eMX\xc0\x19(sg\x02hE@ZL`G]NX\xc0\x8e\x92\xe8\x84\xackE@\xb9\x8c\xcci\x90OX\xc0\xbe\xdez\xa5\x08mE@tub\x8e_QX\xc0\x0c\x7fRE\xcdlE@r\x8f06\xeeSX\xc0\xf3\xc4/\x86NnE@\x11\xcb\xead\xeaXX\xc0\x152|\x0e\x08oE@\xa8\xfe;\xcdC]X\xc0m\xfb\x08T\xdblE@\x82K\xee\x14\xec^X\xc0N5&U\xb7mE@\x9d=\xadxd`X\xc0h\x00\xd0\xf2\x18nE@\xf9E\x8f\x99\x85dX\xc0\xb5\x96\xc7\xf2\x8alE@s}jZ\xa3hX\xc0&\xb7R\x99>nE@\xe1\xd8\x8f]\xe3kX\xc0\xb3p\xa2|\x1dkE@ws.\x80jnX\xc0\\\xcb\xda7\xd3mE@\xcd\xbf\x08\x80kqX\xc0\x9cq.\x1dOlE@w*\xef\x82\x02sX\xc0r\xd0E\x9b\xbflE@\x98\xb6\xd0\xa6dtX\xc0\x1a\x07\xb9U\xecnE@\x1dU4\xcb\xdfxX\xc0\xdeG\x87\x18\x8bmE@#ZM\xcc\xf4xX\xc0\xc0q\xa4\x19gjE@3%N\x8e|{X\xc0P\xba\x86\xe2jeE@{Ha\xf0\xaa}X\xc0\xc8d4I\x08cE@N\x91\xaaq\xb0\x7fX\xc0W\xd9[\xe8&bE@E\xf3p\xf8\x1e\x82X\xc0\x9c\xef>\xe5tbE@\xc4\xfdQ\xe7\xcb\x87X\xc0\x08\x8a>ZxgE@h#U\'\xe1\x87X\xc0B\x14M\x14\xfdhE@\xd2P\xd5\x89E\x89X\xc0H\xfePO\xfbjE@idx\xaa\xbd\x8aX\xc0E\xbd\x16\x13wkE@4\x19Q\x99\xdc\x93X\xc0\xae\xe4H\xa3\xdepE@\xef\xc6\xb5}\t\x99X\xc0\x97\x84\xe7\xfe\xc6uE@\xdcCh\xc4F\x9dX\xc0\xc0\xa8\xf4\xde\xf4wE@\xa7\xd2\xc9\x84\xd9\x9fX\xc0\xa6\xa7\xf2\x97\xf2~E@(\xe8\xca\x10A\xd0X\xc0@*c\x9d\x06\x7fE@\xc0\xcfp<\x19\xe2X\xc0E\x0cg\xd8\x04\x7fE@\x16\x8f\xff\\\xae\x0cY\xc0jR\x876\xdc~E@x+\xa5\xc8\xd4NY\xc0\x96\xd7*\xe2P~E@\xd0\xd6\x92\x83\x8c\x85Y\xc0\x9e\x12\x17\xa1\xb4~E@\xa1\xd4c\xe3t\xb2Y\xc0i\x85\xca\x19f\x7fE@', '\x01\x03\x00\x00\x00\x01\x00\x00\x002\x01\x00\x00\xb4\xa07R\xbe\xeeV\xc0E\xbd_\xf4\xf1\xbfE@\xb2\x0b\xf6I\xf6\x04W\xc0\xda\x92\xefA\xe4\xbfE@\x95\xf6\xbb\xb9\x00\x1dW\xc0\x80\x05\x04b\xee\xbfE@\x85\'_h\xb6#W\xc0\x94}\xbc\x7f\x08\xc0E@}\xabd\xd4\xbdAW\xc0sT\xba\xe3)\xc0E@3Pp\xf7zCW\xc0:\xe7I\xc1/\xc0E@\xa4\x15v\x9a\r`W\xc0\x13?\xba\x00\x10\xc0E@e!\x855\xd6iW\xc08\xc44\xfb\x18\xc0E@\x1aT\x923U~W\xc0VZ\xe4\xc6\t\xc0E@\xd4\xa2\xe4\\\xcb\x8fW\xc0\xe6lE\x8a\xdd\xbfE@\x94\xcc\xc1\x9f"\x9dW\xc0\xeaA\x82\xd1\xc1\xbfE@\x9b\xefY\x9b\x07\xb7W\xc0\x14\xbc\xbe\xfe\x00\xc0E@\x9b\x13\x08\xe3\xe8\xbaW\xc0d\x02\xa7f\xeb\xbfE@\x16\x82\x006a\xd9W\xc0\xa8\xc3\xe1\xf4\n\xc0E@\x8b\x95\xd6\xe0\xbe\xddW\xc0\xd6:\xb7\xf8\xf0\xbfE@;\xdd\xaa}{\xf7W\xc0\xa4}\xb7h\xdd\xbfE@? R\x11\xe8\x03X\xc0\xec\xb5\xff\xf0\xcf\xbfE@\x88(A\x15x\x1dX\xc0\xfc\xd5\x7f\xc5\xf6\xbfE@\xff\xa5\xcb\xf88\x1dX\xc0Q\xcf\xa1\x8e\xa3\xecE@9fD\xffE\x1dX\xc0\xed\x13\xca\xd7z\x19F@\xd5\xe0\x1dv \x1dX\xc00\xd1}o\xe8DF@<\xb3U\xdf:\x1dX\xc0\xe26\xea\xca|PF@\x878\xafG"\x1dX\xc0\xa4\xb3\xf3\x8c\x92fF@pj^k0\x1dX\xc0\x90\x1f\xbeOj|F@o]\x97w\x16\x1dX\xc0tI\x14\x9a9\xa3F@3y\x19[I\x1dX\xc0\x16\x07\x9a\xba@\xa6F@ \x8e<\xdf\x90\x1eX\xc0`\x9aS\x98\x0c\xaaF@\x1e\x08\x9aI\x15"X\xc0&\xd1eV\x04\xb0F@\xdeNq\xb3\xb9&X\xc0\xcb\x0e\x80O\xc1\xb2F@\x95\xf6\xb5\xe2\\,X\xc0G@\xcc\xd6\x9c\x1b\x140X\xc0\xf6\xcd)\xe1WKG@\x0f\\\x8d\xbdX1X\xc0\xcd\x00NA\xccLG@\x0c\xd4!A22X\xc0v\xfe\x06\xea\xe2OG@\xd6\x1b)e\x932X\xc0\xc7\xef\xefi\x9cPG@\xc4\xa9P\xe7\xcb2X\xc0\xa0\xc7\xc6\x0f\xe3VG@D\xa6\xa8\x04\x052X\xc0H\x0e\x11n\x80ZG@7\xbf\x8b\xc8\xf11X\xc0\x8al\xa7s\x93aG@\xc8\x88\xc4G\x053X\xc0R\xfc\xa1\xb5\xf0gG@\x10\xed?\x03+1X\xc0r?\xdbj$lG@\x9a\xf8\x90\xc1\xcb1X\xc0\xac\x91\xdfM\x05oG@S_\t\xe0\xa0G@\xce\x0eb\x1c\x9d5X\xc0\xc68\xca\x00\x9e\xa5G@\x8b\xf6\x87\x1b16X\xc0\x85{\xf8\xe3D\xa8G@|3\x87Y\xa95X\xc0X\r|\xa1\xce\xabG@\x1b65\xbep6X\xc0\x96\xa0\xba\xc03\xaeG@\xb8\x88\xf2\xba\xbf5X\xc0\x88i\x0e\xc2*\xb1G@3\xd4\x92|f6X\xc0 \x1f\xeb\x06X\xb4G@\r\x1e\xf5\xfe\x807X\xc0\xb3\xc0\x07\x0c\xe0\xb4G@\xec\xbb\xcf\xdf\xc56X\xc0J\xd0\xd3\x88\xe7\xb7G@d\xba^\xc1w7X\xc0b\x98\x16\xa8\x13\xbbG@\xcb*g\xdd\x806X\xc0\xc2\x9a\xaaK\x14\xc0G@\xe9\x00!\x80\x157X\xc0\xf9\r;\xce\xbb\xc2G@\x04\xc2\xd3\x19Y6X\xc0\xd4\x89\x99j\xb4\xc5G@\xa6J\r[\xf46X\xc0\xee\x88\xdf\xac\x0f\xc8G@\xed\xbe_\xba\x8a6X\xc0\x06\xe84\x87\xf2\xccG@-\x9b\xaf\xb9\xe47X\xc0RN"\xb0\xc0\xceG@\x06\x92\tX\xec8X\xc0*\r\xc4/C\xd6G@\x1c5\xd7;\x1d;X\xc0\xcd\xc9\xc2qg\xdbG@\xf3\xab\xdc\x16\xa6;X\xc0\xef\xe6\xa8\x93\xba\xe1G@\n\x1cg\x17M=X\xc0\x95!\xdc8\xb0\xe5G@eK\x8c\x1b\xf8>X\xc0\x90\xc4\xdc\xfb\xa2\xe7G@\xa0A\x18\xf5\x8a>X\xc0ws\xd0\xdd\xfc\xe9G@\x1d\x17D\x93\x05@X\xc0\x16\x01\xa3\xa0b\xefG@$>B\xf5PAX\xc0\n,=\xa8\x12\xf0G@\xf2\xc9\x130\xfb@X\xc0C\xb9\x84n}\xf5G@X\xc9\x17N\x13CX\xc0\xc07:\xf6:\xfaG@z\xd0]\xe5JDX\xc0h\xbe\t@*\x06H@(\xa0C%\xefEX\xc0\xcd\x19]\x0b\x01\tH@\xae\xcc \x83VFX\xc0>U\xa6\xa9\xec\x0cH@\xd9N\x9d\xc5\xccGX\xc0\x90\x17\x0c1\xe5\x0eH@y\xdd\r \xbdGX\xc0\xcb\xde=nF\x12H@B\xa09\xa2\xbcHX\xc0\xfa\xea\xc5\xb7\xfe\x12H@\xd6\xc9\x18\x9dmGX\xc0\xdcmtna\x14H@\xf4\x85\xed\xde\xcbHX\xc0\xb8\xb3\x7fwy\x15H@\xdc\xf0\x16\xff\xb8HX\xc0\x85\xc3\xbc\xd9m\x16H@\x8b\xe5H\x1e\xc9HX\xc0\xf4q*\xd6\xf7\x18H@\xd3Y\x9b}_HX\xc0"\x0b\xe5:\x14\x1aH@<\x85R\xf9\x18GX\xc0(b\x16\xd0\x92\x1aH@~\xe1\xf6\xbb\xf1HX\xc0\xeb\x07\xe4zb\x1cH@\xa8\x1f\x00\xb6\xfdFX\xc0\xc0\x1f\xca\xb80\x1dH@\xa7\x9dt\xd9)HX\xc0\xc7\x9f\x07\x18\xe4\x1dH@\x82\xef\xed\x14\xecGX\xc0$?\xd3\x97,!H@\x14\xf7\xd0\xf5\xbeHX\xc0\x9amU\x97\xda!H@\x9b\x1b\xaeS&GX\xc0\xdc\xf3\xf5s\x91#H@\xaf\xef\xf536GX\xc0\xc48#z\xa0$H@\x0f\xadfTZHX\xc0l\x05\xd0X\x82%H@\\\xf3\xa55GGX\xc0\x8d\xe7\xa0\xb4\xbe%H@\xc5\x07\xc8\x15XGX\xc0&\x12\x82\xf7\xdc&H@:\x16\x85\x14}HX\xc0\xcda*\xd8\xcd\'H@8\xb0\x15\xb24GX\xc0bo\xedX\xf3(H@q\xc8F\xd7\xc6HX\xc0\xb8\xc5\xd5\x15\xba)H@s0\xedRdHX\xc0\xebXkYE.H@\xeaa\xbf\x17\xa0IX\xc0\xf2(a\xd7}.H@S\xab[\xf4\x8fHX\xc0/\x9a\xba\x95\xac/H@\xa4Q\x134\xa7HX\xc0&\x1f\xed[41H@C\xc6^\x18*JX\xc0\x9d\x0e\x15\xbf\xb01H@\xf5G\x83\x92CHX\xc01\xce\x7f\x9654H@\x17Qi\xb4\x96IX\xc0Q_\xe8\x98z4H@\xd2g\xd2\x96\xb4IX\xc0\xaa\x91\xb4\xdb\xb55H@J[\x11\xb3\xd8GX\xc0\xa2\xd3\xcc\x1aC5H@\xb9\x9e_\x12\xa8GX\xc0\x84\x9bu\xf8\xf27H@a\x95\xc8\xf50IX\xc0\x9e\xa4\xc9\xf7\x138H@W\x10[1\x9dHX\xc0+$|[7BH@\xeb\x8b\xbb1~IX\xc0\xf29\x07;LBH@\xbc%D\xb0\xebHX\xc0\xfa\xba\x9d[oDH@\xd4\x1a\xaaR\xf4IX\xc0\xfaj\xe5<\xeaDH@L\x90]\x92DJX\xc0,3\xcc \xc5EH@\x83\xc4\xaa1bIX\xc0\x94\xcb\x99=WFH@\x8e/3\x95\xbfJX\xc0\xa2\xb9\x06?\xf8GH@G{\x16t\xbcIX\xc0\x92\x07\x8d[SIH@r\xa6\x8a\xd2\x1fJX\xc0\x10\xb6L\xba\xb4JH@O\xbb\x89\x11\x03IX\xc0\x14\xd4\x1a\xb7\x1fKH@\xd5L}\x10CIX\xc0\xcc\xee\xc3Y\x98NH@)0o\x8c\xdeGX\xc0\x1b\xab\x89VuOH@\xdc\x03\x15\x0c(HX\xc0\xe2\x17\x1d\x1a\x9dPH@\x99~$j\xe3FX\xc0\xac\x9b.\x15\xa2PH@bC\x87\x058FX\xc0\x10-~\xfaVVH@J\xc7\x0e\xa8xGX\xc0\x8c\x80\x1e\xbc\xfdXH@J\x8bD\xe6\x0bGX\xc0\xa4d&\xdc\xb2ZH@\xabT\x8c\xaa\xa0HX\xc0\xe0;\x06a\xf5\\H@z*\xdf\xeazHX\xc0\xfc5\x05\xda\xa4_H@\xbcM`*wIX\xc0s\xcc\xed?\xb9`H@\xa2\x08Bi\xe9HX\xc0@P\xa8\xc1\xbbaH@.\xed&\xe8pIX\xc0~\x18\nd\xfdcH@\xa9\xe07\xe7!KX\xc0\xa6x\xe1\x05\x98fH@eYg\xa5\x8aJX\xc0X\xb5\x9d&\xbagH@\x80k\xeb\t\x8cKX\xc0\xfa\xb1T\x87chH@\xefF\xd3\x02\xf5JX\xc0\xc2\xf3\xa4g\x01kH@3\x8d\xa9\x1d?KX\xc0Y\xcc\x16H\xd7oH@\x9f\xcd\x90\xfe\xd8MX\xc0\x00\xce\xb87FwH@\x01;\x89\x15\xafNX\xc0\x0f\xf0\x13\x96\xff\x7fH@\xf4\x96i\xe6\n\x1aX\xc0\xfe\x84"l\xff\x7fH@\xe5\xf2\xc1\xc0\xb4\xd1W\xc0Q\xdf\xa1\xb7\xff\x7fH@w\xc3\xa6\x94\x18\xcaW\xc0dJ\x93\xe1\xff\x7fH@h\x9b\x8f1\xb8\xc9W\xc0\x90pb\xdc\x94\xafH@r3\xb3!@\xb5W\xc0\x0e\x8c\xfd\xdcW\xaaH@\xaa&\xcb\x9a\x99\xabW\xc0\x90$\xe9\xd2FpH@`\xe3\x1b\x94q\xacW\xc0O\xb8\x89\xe6\x88cH@\xb6\x91\x18\xff\x7f\xa4W\xc0b\x9fT\xbfY[H@\xd4\xef\x19\x83\x8f\x9bW\xc0\xf4c\xdd\x03\xfbZH@\xca\xc3p\xa7\xb5\x92W\xc0|\xa6HI\x96ZH@\xeb\x1b\xcd\xdf\xc5\x8eW\xc0\xdc\xbc}RtSH@_\xf4\x1c\x87\x02vW\xc0vF\xecd\xf7OH@\x7f\xeb"\t\x03tW\xc0\xec\xcd\xe0\x94@CH@\x07\xd45\xa5\xfdqW\xc0\x8b\xba\x17\xcb{AH@\x90\x08t\xa8\xe7`W\xc0\xf4d\x1c\x01cDH@4\xf9j\x1e\xc8]W\xc0\xbc\x92\xfe\xaeVFH@\xb3[\x89\x17L]W\xc0\xacj\xf9\xee\xddKH@\x90\xbdu\x9bxSW\xc03\x1c\xc4\x91\x8eQH@\xd9\xd4\xbb0\xdaEW\xc0\xa8\x93\xae\xea3PH@\xee?\xb2p\x9aO\x1b\xcd\x18-H@d\xa1\x1a\xc4\x0f\x08W\xc0\xbc\xc3p\xdf\xf1.H@\xae\x7faq@\x02W\xc0\\\xab`L\x81-H@\xe8\xad\x8e\xb0\xb0\xfeV\xc0\xc87\xca\r\r H@\xa2\x8e\x17\xf3{\xf2V\xc0"A\xb8\xf8b\x1aH@\xd1F\x95e\x90\xedV\xc05tv\xef/\x19H@m\x1e\xee\xee\t\xedV\xc0F,\xec\xec\xb2\x0eH@Xbp\xd0f\xe4V\xc0n\xda\xf2\xdb^\rH@\xb6\xd9\xf6y\x94\xe4V\xc0\xdeV)\xbf\x93\x05H@\x13\xe6~\x16S\xcfV\xc0\xbd\x00-\xfcg\nH@T\xef$\xcc\xbc\xc1V\xc0\xba\xeek\xe1\x00\x19H@\x87\xdf\x03\xe4S\xb7V\xc0\x80\x16\x80\x92\x89 H@d\x9c\xf8K\x93\xafV\xc0\xa0\xae\xc7\x1cR\x0bH@\xd7,\xf4/Q\xa4V\xc0P\xf2S\xd8\x93\x0fH@`\xfeq0\xa3\xa3V\xc0?\xc3\x83>\xdf\x0bH@\x9b\xc9\xb7\x1bL\x89V\xc0\x89orBo\x0eH@\xe7\x98\x17u\xb5\x81V\xc0cU\x1c\xa6\x04\x0bH@5\xfb\x1bW+\x7fV\xc0\xa8\x15,\xe5\x03\x03H@\x90\xf0[\xfa\x9fyV\xc0\xb8D\x81j\n\xffG@\xb2\xc2\x1a\xb3\xf4oV\xc086\x04\xd7c\x03H@5^\x0f\x8d\xf6aV\xc0K&\xa8F6\x00H@\xcf\xf8\x7f\x92\nhV\xc0SZD@\x0c\xffG@@E\xfeV\xbahV\xc0\xdd2\xc0M\xcd\xfaG@\xa3Bj\xb6\xfa\x7fV\xc0\x92\x9aIS\x8b\xe9G@\xb9^\xe8\xd4\x9d\xa0V\xc0(\xbd\xb4B\xdf\xdaG@\xc9\x8f\xdb\xd9_\xc1V\xc0H\xed\xf1\xf5\x03\xbbG@KY\x1c{\xfe\xddV\xc0\xb6\t\xdb\xe1\xfd\x8fG@\xbd\xb2\xa9\x14C\xf3V\xc0|^&\xc4\xaavG@\x9br\x88\xdb\xa9\x05W\xc0\x905\x9f\xe4\\eG@\x8eQ|g\xbc\rW\xc0\xfc\x16\x88\xb8\x87UG@\x87\xa2\r\xc8f\x13W\xc0T9}WRUG@\xea\xad\x16\xa7b\x12W\xc0\xf7\x12\x9f\x1cSTG@\xb2\x9e\xd5\xd1y\x12W\xc0\x04\xe3\xd1\xf9>5G@\xa9\xc0(\x10~\x12W\xc0\xb0\xd0\x1e{\x0b\x14G@!\xc5\xee\n\x85\x12W\xc0\xf9eB\xa5_\tG@\x8b\x9a\xdf\xaa\xf3\x14W\xc0\x18EZ\xcaG\x07G@\xfc:\x16\x8e(\x16W\xc0\xd4\xc7\x17p\xe4\x02G@\x19\x8c\xf4\x8e[\x17W\xc0\x05\x925m\x14\x02G@\xc2\xc0003\x1bW\xc0\x1a<*\xbaC\x03G@\x0c\xf3\xaa\x10\x97\x1dW\xc0|\'\x1c\xe0\x97\xfdF@\x99\x1b\x03\xd8\x88!W\xc0>~\xb8J\xc5\xfdF@\xb4\xc6\x90\xfb^#W\xc0\x1fz\xd91\xc3\xf9F@\x90\x9e\x10(\xa3*W\xc0,\xfb\x9d\xc45\xf5F@\x97[q\n3-W\xc0\xd8\x0f\x83\xec\n\xf2F@v`+s\xfb.W\xc0\\\xcd\x17Q(\xecF@\x8d=\xa3\xb8\xeb/W\xc0eV\\\xb9,\xebF@\x89\xeb\xca\xe4\xdc1W\xc0\xbe\x9d"#\xb5\xe1F@?\xe5\xfeKZ5W\xc0v,\xce\xd0\x8d\xddF@\xe6vQ\x8e\n7W\xc0X\x89.\xb5\xf3\xdaF@\xd2\x7f]Y\xaa8W\xc0\x00\xb6\xc6\xe5\x8d\xd2F@\xbf\x06\xf5\x00\x1e8W\xc0\xb0AgO\x17\xcaF@;17@q5W\xc0<\x9a\xa0\x91\x1d\xc8F@\xd3V\xd6z\xc70W\xc0o\x068\xc89\xc8F@\xf9\xb5k\x14\x9a.W\xc0\x88\xd1\xb8\t\x0c\xc6F@\x98M:\xf1\xdd+W\xc0\xd0[u\xb5*\xbcF@\xa5}\xbf\x86\xe8)W\xc0\xa1\xdc\x9f\xb1D\xbaF@\xfcrhFG)W\xc0[/\xaa\xf9?\xb8F@\x93\x94\x19$\x85)W\xc0([Z\xa4\x9e\xb2F@\xd2\xd0\xf8\xe5\xd4+W\xc0\xbe\xeeVIy\xaeF@n\x01\xba\xc8E-W\xc0\x8b#\x8e\xd2\xba\xa8F@\xd581/\xc8/W\xc0\x88\xea\x01\xde\x17\xa6F@\xc8\x91\x1e\xcaX0W\xc0\xa9\xe0\x1c&/\x9bF@\xf6\x00\x1d*\xce0W\xc0v\x1e\x04\xea\xe2\x97F@\xb2F\t\x05\xad/W\xc0\xb9\xb9\xf0\xa5\x05\x94F@\x80\x8b\xa9\xff\xb4/W\xc0\x1a\x9c[\xedv\x8eF@&\xd7:\'\xfe2W\xc0n\x80\xab\xebe\x88F@\x19\xabc\xd9\xd40W\xc0]\x14\xb8\xce\xd4\x82F@_\x1d\xd6\x98\x181W\xc0\x1eK\xa6\xf1 \x80F@1\x84\x1b4\xfc/W\xc0\x14\xc4\x1d\x8e\xc3wF@U\x06\x06T@0W\xc0F\xf11\xcc\x1euF@\t\x13\xcfVf1W\xc00wp\xb2"sF@\x0fE\x8b\xb0\xe90W\xc0\x04\xb3 \xb2]nF@Dw\xf8\xaf\xb40W\xc0\xcah\xf3r\xedjF@x8a\xb1\x8e3W\xc0Z\x82\xd7.\x82_F@\x9buhc-/W\xc0\x9a\xf6n\x0fW[F@\x9b\xde\x03\xf0W(W\xc0p\xdd\xcdnBRF@\xf8C\x13k\xf9&W\xc0t\xf3r\x0f\x1eNF@\x83I\xb2\xfb\x96 W\xc0#\xe0{\xd2\x9eIF@\xbe=\xfa\xd9\xd0\x15W\xc0x\xdf2O\xc3FF@NvW\xb7\x82\x14W\xc0\x84\xb28\xd2.EF@\x1c\xb4\xa7\xec\xfc\x12W\xc0\xab\x0c\xf5\xd4\xff>F@/zOB\xf1\x0fW\xc0\xf5\x17HLe:F@&smZ1\rW\xc0\xb8{\x82N\x1d8F@\xf6\xf8\x8cg\xd8\x05W\xc0T\xd8M\x0825F@?\x1d\xac\x93;\xfeV\xc0\xdc\\\xa8\x85\xa7.F@\xb7\x9f\xfdj\x16\xfcV\xc0\xa5BW\x00h+F@\x18\xdb\xf9g\x0e\xfbV\xc0\xef\xc5\xe7\x81\xa4(F@\xadjH\xc5\x07\xfbV\xc0\x8eY\xb6^\xe8$F@\xdcK\xfb]\xe0\xf8V\xc0\x93\xae\xb5\x9b\xf5 F@8c\x89\xd3Q\xf6V\xc0W\xd1\xe3\xd3x\x18F@\xf1\x8d\xa5\xbe4\xf0V\xc0P\x10\x9a\xaa\x90\x11F@q\x9d\xa90\xbe\xe9V\xc0\xef\xfd\x05\x07\x90\x08F@\xed\xc4\x9b\xaa\x83\xe6V\xc0\x17\xc9\xd8\xaa9\x05F@nx\xa2\'m\xe4V\xc0\x006\xe2jy\x04F@J\tM\xa3\xd1\xe1V\xc0XOP+a\x04F@\xf5\x9e\xa4\xfbA\xdbV\xc0\xc2\xda\x84\xc6(\xfeE@"m\xf0\x15\xe5\xd7V\xc0\xff&\x17\x91=\xf9E@\xe41\xd8G\xaf\xd2V\xc0.\xab\xe6\xbbp\xecE@\x1f\xd6\xe2\x1b\x12\xd0V\xc0\xed\xb2V\xa7\xdf\xe4E@B;\x98\x15\x92\xd0V\xc0"\x91\x90sw\xdcE@*J2s\x89\xd0V\xc0\xaf\x02\xd6\x7f\xb2\xd6E@1\xad\x11P\xe9\xceV\xc0\xaa_\x88p\xa8\xccE@\xe2\xce\x8fNe\xcfV\xc0\xb3\xc8\xf54<\xc6E@\xbd\xb7\xf2\xe8N\xceV\xc0\xae\x03\x15}\x1a\xc0E@\xadb)@\x1c\xe7V\xc0v\xfdZ\x86\x14\xc0E@\xb4\xa07R\xbe\xeeV\xc0E\xbd_\xf4\xf1\xbfE@', '\x01\x03\x00\x00\x00\x01\x00\x00\x00\xf8\x00\x00\x00j\xcaN?\xb0\xc7V\xc0m\xbb\xdb\xf7KZD@X\x11E\x81F\xc8V\xc0^\xef*\xfbQWD@MA\x8f\xc3h\xcaV\xc0\xb7\xde\xb3Z\x03TD@\x18\xc12\x8c\xc3\xcdV\xc0j\xa1\xd2\xfbiRD@|\x8b\xd3\x11\xc8\xd0V\xc0Lz\xb0\xff\xddQD@\xf3\x83=}\x0c\xd8V\xc0\xc3S8\xd8>MD@\xb7\xfa\xc7DR\xdaV\xc0\x9ez\x1cxXID@\xda\xe1\xc8\x05o\xdaV\xc0\x89\xdeu\xfd%FD@f/\xdb\xdev\xd8V\xc0\xfe\xa0(%\xa7CD@\xad/\xb0\x1e\xff\xd7V\xc0|G\xbf\'y@D@\x88=H\xdf\xac\xd8V\xc0\xe2\xf7\x16\xf1@9D@\xcdX\xb2\xba\xdd\xd7V\xc0\xe8\xa6\x85\x90\x963D@O\xd0\xd7\xbc\xb2\xd8V\xc0H\xe7\xc8V:2D@o\xceV`\xd0\xdaV\xc0{\x99\xe1\x92\x861D@\xf1\x9c\x84F\xb8\xdcV\xc0x\xe8@\xf0\x9b/D@\x89\x95\x92\xcb\x87\xdeV\xc0\xd2-t\xb1\x0c2D@\x86\r\'Oa\xdfV\xc0\x0e\x8cf\xf1\x052D@\xad\to.\x06\xe0V\xc0\xf4\xdbzK\xdc3D@\xf0>\xa7\xb1\xc5\xe1V\xc0Hv\x04n\x804D@#\x95\xf0\x15\xe5\xe1V\xc0\xb0\x91\xf6\xe8\xb07D@\xc2M\x1du|\xe2V\xc0\x98*\x1c#|8D@\xf6\xe4\xa0\x15 \xe2V\xc0\xccC\x14\xe7K:D@\xec\xa9\x9d\x9d\x14\xe5V\xc0\xca\xbf\xe8\x7f\\;D@D\x17\xc4|\x81\xe5V\xc0\xec\xd6\x05\xbb\x04>D@\x0e\n\x8c\xa3z\xe7V\xc08\r\xc0\xb0\x9f@D@S\xa3j\xa2\xd7\xe7V\xc0\xda\xc2n-6DD@\x8c\xdbI\x0fK\xecV\xc0e\xce(]\x9dFD@x\xb4\xf9J(\xecV\xc0\x8e\x92\xcf\xd6dJD@J\x97|\xf0\xe2\xedV\xc0\x1c\x10\xa7\xb0\xf5KD@\xe2\xa1\x982x\xefV\xc0\xbcA\x01j\rND@\x87\xcb\xa8T\x91\xfcV\xc0z\x13\x19\xac\xdbMD@\x1e,\xaa\xf7\\\x0cW\xc0P\xc9\x0f\xb2\xcfLD@B@\x9b\x08#\x17W\xc0X(\x18\xeb\xbeLD@\xc6I\x82%_)W\xc0L\xd2\xed\t\xb5KD@n\xfb\x82\xaf\xf0-W\xc0t\xab`8zKD@\x9e3\xd8\xc5uFW\xc0\xd9\xa6\xfb\xe4\xcbJD@\x84\'S\x86\xb2WW\xc0\x84\xb2\x82\x8aMJD@\x9e\x94\xda\xb8\x06dW\xc0\xc1n\xa4\x17XJD@\xbf\x17\x02\xcbRrW\xc0\xb2\x99\x95\x98\nJD@\xca| \xe2\'\x81W\xc0*\xf7\x9a\x90yID@\xe3\x03\xb6\xd1A\x8fW\xc0\\\xee\x04m\x15ID@:[\x84\x07\x0e\x9fW\xc03y\xb8\x8f\x7fID@,\xe2\xa8\xbb\xf3\xa8W\xc0\x90\xcf\xc9\xfd\xb1ID@\x80\xd5\x90`\xeb\xbaW\xc0\xfc\xb5\x98J\xe2ID@YeSX\xea\xcdW\xc0~J\xf3r{JD@\x8f\xbb)\xc9{\xd8W\xc0B]\xeew\xcbJD@\x9d\xfaoa\x1e\xf1W\xc0VJ\xd4\xefeKD@enj\xa3{\xf0W\xc0\xddqS\xcbyOD@[\xbay\xe6&\xf1W\xc0"\xc3y\xabQRD@\xb3\x881{\x1a\xf8W\xc0\xba\x98V\xf0~]D@\x8d\xfcf\xbb\x83\xf6W\xc0\x167\xc4\xedseD@\x7f\x83\xe7\xfe+\xf6W\xc0:\xff\xaa\'\x96lD@\xb7\xae\xad\xbff\xf5W\xc0\xf8\xd1\x9b\x00foD@\xa9\xe5u\xe4\x89\xf5W\xc0\x00\x1c\xd8\x84WsD@\x1b\xb7\xcfJ\x9b\xf5W\xc0\\\x1b\x85\x7f\xb4|D@l\xc9\xed\xf0\x18\xf7W\xc0\xce\xe5\xe0\xd8V\x80D@rc\x10\xb1\x02\xf7W\xc0F\xf1\x1f\xf5z\x84D@\xa7\xa5TT>\xf8W\xc0\xfd\xbb\x10yn\x88D@\xd6\xfaD\xf7\xed\xf6W\xc0\x8a\x84\x02\xda\xf9\x8dD@L\xb8\x99\x9c\x1b\xf8W\xc0\xc2\x85G\x95\x04\x95D@"\xbf\xf8\xfb\x06\xf7W\xc0\x80{$\xd8[\x95D@\x18m\xce\x1a\xfd\xf6W\xc0\xfa\xf10\xd9\x1b\x97D@\x96\xc0\x17c\xa1\xfaW\xc0\x8e\xcb\x80\x11\xd7\x98D@\xdd\xf9\x0f&\x06\xfbW\xc0\xf2\xcf\xcb\xf8\x9a\x9aD@\xa0Ys\x84M\xfaW\xc0,\xf8\x13\xd7\xd4\x9cD@R\xbd\x9b\xe4\x88\xfbW\xc0\x0e\x9cr\xc8\xa9\xa6D@\x1eKd#Q\xfaW\xc0\xcdA\x97\xec{\xa7D@9P\xb8"r\xf9W\xc0\xe0\x83S\xf0\xb7\xa4D@\x8a\xab>"\xe7\xf8W\xc0\xd9\x03?\xed\x93\xa6D@\x02\xf4weX\xfcW\xc0[w\xb6\xa7\x87\xabD@w\xe9\xd6\xe1)\xfcW\xc0\xd6\xa9s\xfb\x88\xb2D@\xa2v\x11\x1c\xd8\xfbW\xc0\xbc\xcb\x06P/\xbbD@\x18U\xdb\xfc\x00\xfdW\xc0\x9e2p0w\xbcD@\x9f\x1b$E@N\xbf\x15\xc9\x98\x17X\xc0\x95\x0c3\xa1%&E@\xfd(5-\xf2\x18X\xc0h\x06\x1d\xc5\x15*E@9}\x19\xb0%\x1bX\xc0w\x03\xb1/\xb5,E@>y\x02LZ\x1aX\xc0D7\xdc\xee\xc10E@\xca\xc2\xfdk\xba\x1aX\xc0\t\x1f\xbdl\x175E@y\x9e\xd1\x08w\x19X\xc0\xdeh_\xaf\x8c8E@\x00w\xf6GY\x19X\xc0\x06\xb8\xd8\xce\xd3;E@\xe6R6\t\x1f\x1cX\xc0\xb4&\xa8m\x9f>E@\x1d"VN\xbc\x1eX\xc0R&\xefS1BE@2JVMQ\x1fX\xc0\x0c^\xdf\x142HE@?+\xd2\xc6\xedjE@\x93\x10\xe0\x19\xae$X\xc0\xd8&\x00\xe5\xcbjE@H\x189\xf7\x98#X\xc0\x8dU\xee]_lE@\xa5\xf7\xbc\x95f"X\xc0\xe4\xcf\xed\xd3\xcdrE@\x0f\xc5\xad5\xd5"X\xc0\x1e\x84\xb8\x92\xf9tE@\xb2\x86!\xb3\xf4 X\xc0\x8c\xc7X\xaa\xe7yE@T/ \xf5\x18!X\xc0\n\xa3\x8dDD~E@\x9ba\xc8\xf2\xef\x1fX\xc0*5\xcf\xdd\x8a\x81E@\xbcI9\xd9G!X\xc0\xc3\x11\x81\xd3\x97\x86E@J\x1c\'T\xb1\x1eX\xc0\xfak\xb3\xd3\xeb\x87E@W\xc0\xe9\xf3\x92\x1dX\xc0\xa0u\xda\xae\xac\x89E@\xf6\x89t\xd5}\x1dX\xc0\x9a\x84\xd8f?\x8bE@\x00\x91mv\xe5\x1cX\xc0\xe2\x16b\xdf*\x90E@\xaf\xa9$\x81G\x1eX\xc0\x07\xbc\x056\xc3\x9aE@Td\xda\x06/\x1fX\xc0\xbc\xe9Ct\xe4\x9bE@\x1f\x06c0\xc0#X\xc0\xe4\xfa\xe9\xd5\xdc\x9cE@\xe6\xf7\xd1\x95I$X\xc0\xc3\xb9bN\xac\x9eE@\xd9\xa8N\xf3\xcf#X\xc0\x08\xe0\x1c\xefj\xa0E@\xa9\xb7\xc4\xb6\x86$X\xc0B+\xd1\x0c\xbe\xa1E@N\xb9\xab|\x10%X\xc0\xea\x83)(!\xa5E@l\x04\xc1\x96\x98"X\xc0#\x80\xd5aa\xa7E@\x1a\xb6\x98\x19w!X\xc0\xc2\xd4\xed\x12\xb1\xadE@\xbfO\x19y\x9a!X\xc0|\xa5\xfeK.\xb1E@\\\x9b\x18~\xb1#X\xc0\xb7\xd6\xcd\x08K\xb3E@\xfa\x9e6\x08\xb4%X\xc0\x9c\xa9\x06\xc1\xbf\xb7E@fV\x19\xeb\\%X\xc0L\xd5\r\x91\xaf\xbdE@\x08\xe52\xccJ&X\xc0\xac1h\x10\xfb\xbfE@\x88(A\x15x\x1dX\xc0\xfc\xd5\x7f\xc5\xf6\xbfE@? R\x11\xe8\x03X\xc0\xec\xb5\xff\xf0\xcf\xbfE@;\xdd\xaa}{\xf7W\xc0\xa4}\xb7h\xdd\xbfE@\x8b\x95\xd6\xe0\xbe\xddW\xc0\xd6:\xb7\xf8\xf0\xbfE@\x16\x82\x006a\xd9W\xc0\xa8\xc3\xe1\xf4\n\xc0E@\x9b\x13\x08\xe3\xe8\xbaW\xc0d\x02\xa7f\xeb\xbfE@\x9b\xefY\x9b\x07\xb7W\xc0\x14\xbc\xbe\xfe\x00\xc0E@\x94\xcc\xc1\x9f"\x9dW\xc0\xeaA\x82\xd1\xc1\xbfE@\xd4\xa2\xe4\\\xcb\x8fW\xc0\xe6lE\x8a\xdd\xbfE@\x1aT\x923U~W\xc0VZ\xe4\xc6\t\xc0E@e!\x855\xd6iW\xc08\xc44\xfb\x18\xc0E@\xa4\x15v\x9a\r`W\xc0\x13?\xba\x00\x10\xc0E@3Pp\xf7zCW\xc0:\xe7I\xc1/\xc0E@}\xabd\xd4\xbdAW\xc0sT\xba\xe3)\xc0E@\x85\'_h\xb6#W\xc0\x94}\xbc\x7f\x08\xc0E@\x95\xf6\xbb\xb9\x00\x1dW\xc0\x80\x05\x04b\xee\xbfE@\xb2\x0b\xf6I\xf6\x04W\xc0\xda\x92\xefA\xe4\xbfE@\xb4\xa07R\xbe\xeeV\xc0E\xbd_\xf4\xf1\xbfE@\xadb)@\x1c\xe7V\xc0v\xfdZ\x86\x14\xc0E@\xbd\xb7\xf2\xe8N\xceV\xc0\xae\x03\x15}\x1a\xc0E@\xdd\x9c\x13\n\x19\xcfV\xc0L\xa4\x01\xc7z\xbbE@\xa9\xa1\x00\xa7\x7f\xcdV\xc0\xc8}\xb2PG\xb6E@\xb7{\x96\x04\xb0\xccV\xc0o\xd6f\xfbl\xafE@\x9c\x80P\xc2T\xcbV\xc0E5U\x1dN\xadE@\r\xa6\xa7\x1d\x06\xc5V\xc0\xb9Q\xce \x1a\xa8E@%\xd2g\\@\xc4V\xc0\xe2\x8e\xben\xed\xa3E@\xa3^BZk\xc4V\xc0\xc7\x97O\xd0\x02\xa1E@\x03M\xe8\xa0S\xcaV\xc0\x8a[L\xc8\xe3\x92E@F\r3\xdf\xc9\xcaV\xc0\xa2\x01\x06\x16\x9c\x8aE@\xad\xe5\xa1a9\xcaV\xc0D\xe6\x0e/d\x8aE@5\x05\xf8\xe0\xbd\xc9V\xc0f\xb5}"+\x80E@v\x1f\xb9V\xc0N\x00\x92\x94\xd0VE@\x0e\x02/\x14\xb8\xafV\xc0C\xc2\xec\x9e\x18TE@\x0f4\xd9uw\xacV\xc06\x1a\x01\xa3\xa7QE@3\x90\xde4\x85\xaaV\xc0z{1Z#IE@$\x81\xfc\xf7\xe8\xa8V\xc0!M\xfc\xa5!GE@\x0e\xf7\x8c\x96\x0b\xa8V\xc0\x8b<\x80\xee\xa7CE@\xf2}\xbbw\xdc\xa8V\xc0\xe0\xa3\x9d\xd12AE@p\xd8\xc2\xb7\xb8\xa9V\xc0T\xe1tWR?E@_Z\x13\x96\x80\xa9V\xc0\x90\xe5N\x03\xe2^66\x17.E@\xfdG-:a\x9bV\xc0\xef-sD\x9a+E@\xdf\xcd\x02Z\xc2\x9aV\xc0\x92R\xe2\xc3\xc8!E@!\xefc9\x11\x9aV\xc0\xc47\x15\x87\x0f\x1fE@q!u\xfd\x8a\x97V\xc0\xb4\x07\x95\xb2\xe8\x1aE@\x85\xe05\xff\xb7\x94V\xc0\xe6C\xd0YB\x19E@\x04.\xa4\xbd\xc9\x8eV\xc0\xb8\xf5\xb4gr\x14E@\xb84\xa4\xd9D\x8cV\xc0\xac\xea)\xf9\xb4\x0fE@\x01\xd2B\x18G\x8bV\xc0d\xa8\xaaWm\x0fE@\xdaykv\xac\x8aV\xc0\x7f"\xc3?H\rE@J\x1f)8\xc4\x8aV\xc0\xcc\xcb\xcf\x05\xd1\x07E@#s\x99w\xa4\x89V\xc0\xc5\xc4-3H\x04E@2\x83\x02\x93#\x89V\xc0J\x81\x12]\xf3\xfdD@\x9beQ\xb5\xe5\x89V\xc0\xcc\x9c-\x88$\xf7D@\x1c\x9b\xf5\xb1\x8a\x8cV\xc0\r\xe5\x04~0\xe7D@\x04*\xcd\x19Y\x90V\xc0R\xcd\xc2\x04\x11\xe4D@/]\x01c\x85\x93V\xc0\xce\xff\xdb\xe7\xd4\xe0D@KI\xc9\xc2\xdf\x94V\xc0\xe8\xf7\x7f\xac\x83\xdcD@\x8d\x8d\xbf=\xd7\x95V\xc0\x9a\x14\x13q\x16\xd3D@J\xa7\xb8\xfa\xb9\x95V\xc06p\x05\x94)\xcdD@ln\xf4\xbaM\x96V\xc0Z\xb4\x1c\xf6\x1e\xcbD@\xbc\xfd\xff\xa5\x14\x9bV\xc0\xd6IUv\x9d\xc8D@\x92qu\xa6\xd8\x9bV\xc0B+\x0b\x17\x95\xc5D@\xf8\xb6\xfa\xc9 \x9dV\xc0\xfc\xce}\xb8\x87\xc3D@/\xa6\xc8V\x9f\xa2V\xc0~\x93\x0b\x14T\xc3D@\xe6\x84g"t\xa6V\xc0\x8c\xf8\xb3,;\xc1D@\r\xe0\xb7\xe5+\xaaV\xc0|\xcd\xfaK.\xbbD@j\x931\xadU\xadV\xc0b\xb4\xcf\xa8\x9c\xb9D@\xc4\x86u6\xec\xb1V\xc0\x1e%(\xc3\x94\xb9D@\x1d\xd9\xef\xc0\x08\xb6V\xc0<*e^\xea\xb8D@9A\x1e\x87\xc9\xbcV\xc0<\x13>\xf5\xeb\xb5D@\x87|\xc8\xcc\r\xc0V\xc0B\x8b\xa4\xb0.\xb7D@\xf3\xaf(\xcf\xc4\xc1V\xc0^i\x8cp6\xb6D@\xe5\xd9\x92q\x94\xc3V\xc0\x82\xb8\nQa\xb3D@tV\xba\x10\xb3\xc4V\xc0\x91\xb3\x9a\xd5\xde\xaaD@\t\xf7\x8fL\x8f\xc6V\xc0\x0e\xe9\xf1\xdaH\xa2D@\x80\xb3t\xcc\x81\xc6V\xc0\x98Y\x07\x82\xa3\x9dD@\x9e\xc2\xbf$\x9d\xc3V\xc09\xe2\xc5\xae\x90\x96D@\xf5\xee\xc2\x80-\xc1V\xc0\x96\xf6l\xd0:\x95D@\xa8g`\x1cd\xbfV\xc0D8\x9e\xd7{\x92D@\xc2J\xcf\xbaN\xbdV\xc0N\xe6\x05\xc3\\\x8dD@\xc8\xedxY\x1b\xbdV\xc0\xf6\xbb\xe7\xc7\x02\x89D@\xb1)_\x96~\xbdV\xc0\x8bt\xa1U\xc0\xca\x08#}\xc4\xf6B@\xcb\xbb\x87,\xa1\xa2U\xc0v\xf4$7\xf4\xf5B@\x1c\xc9\x1e\xf4;\xa5U\xc09\x1a\xde3|\xf6B@\x7f/0\xb7J\xa6U\xc0\xaf\xb2\xfd\xb2\xe5\xf5B@\xe5\x86\xc3\x97X\xa7U\xc0|\xd5\xfd"\xd2\xedB@\xf4Y\x9b\xfdP\xa9U\xc0\x0c i\xbdI\xecB@\xeb\xf0\xdb\x80\x9e\xaaU\xc0\xbc\x0b\x14\xfev\xecB@}\xa4\x06C\xec\xaaU\xc0\xad>\x05\x7f)\xeeB@\x15\xda\xa5}B\xaaU\xc0\x19\xa8!\x86\x87\xf3B@{\xdd\x80?\xcb\xaaU\xc0xxP\x9e\xe3\xf4B@\xc5\xc3\x19\xa5\x1a\xacU\xc0\xba\xedi\x9c\xb7\xf4B@\xf8N\xaf\xe8\xa5\xaeU\xc0\xfd\xab\xeb\xf3\x82\xf2B@\xb4\x19`\xac>\xb0U\xc08\x8e\x89p\xfd\xf2B@n\x1bTSa\xb3U\xc0^\xd8\x8dTI\xfdB@`\x0f\xeb7\xe2\xb4U\xc0L\xbc\xcdj\xeb\xfeB@\x06\x9f]\xda?\xb7U\xc0L\x15\rhS\xfeB@m;\xeb\xe1\x9a\xb9U\xc0|\xc7S\xc1\x12\xfaB@\xb0\x02\x86\xe5\x9e\xbbU\xc0lE\xe4\xb4\x11\xf8B@\x96\xca\xd7IL\xbfU\xc0Q\xa7\xddo\x1e\xf7B@\xc9Qm\x8d\xd7\xc0U\xc0rU\xbb\xad^\xf6B@R\xa5_\xb1U\xc2U\xc0\xeb\xd7\x0c\x859\xf4B@H\xed\x97P\x90\xc4U\xc04\x8d\x8c>P\xe7B@=\x14[\xb4\xcf\xc6U\xc0\xa98AYb\xe4B@K\x02\xc8\xb5p\xc8U\xc0\xa5\n\xc5\x14\x16\xe5B@\xd7\x90\xbd\xfc\x1d\xcaU\xc0\x92\xd0\x08\x11\xda\xe9B@G\xde\xeb\xfb?\xcbU\xc0\x10vA\x91X\xebB@&\x97\xedE\x83\xceU\xc0X%\x17\xe9\xaf\xecB@\xd6\xab\xcd\xact\xd1U\xc0\xa5\xd5E\x02w\xefB@\xcaL\x054\xe0\xd3U\xc0\x96\xef\x01]e\xf2B@\x06&\xae\x9f\xcd\xd8U\xc0f\xfby\x13\xad\xf7B@Wj\x88J\xf2\xdcU\xc0\x89\x1f5\xe6\xdf\xf7B@\xf2Zh\xb2N\xe0U\xc0F9\x05G3\xf5B@NC\xeeC\xad\xe6U\xc0\xe8Nl\xe2N\xfcB@Z\xc6d\x04#\xe8U\xc0\x96\xc0\xa7\xc03\xf6B@\xd6\x8c@\xdd\x0f\xe6U\xc0z3\xd6\xa1\x04\xf2B@ni\xa0\x1a\xfe\xe5U\xc0[\xf2\xfca\xb5\xeeB@\x7f\xd6\xf1\xb9\xe2\xe6U\xc0\xd7H\xdbE\x02\xecB@FANd\xb5\xe9U\xc0^\x10y\xa6\x01\xeaB@\xed\x0ekh\xd2\xebU\xc0\x01\x8b\xe4?\x0e\xebB@\x8e\xe6\xac\x8d\x80\xebU\xc0Z\xf9u\x83\xd2\xf2B@\x9au\xfa\xf1f\xedU\xc0N?\xa5&+\xf3B@\xb4\xd5V\xd5\x9a\xeeU\xc0\xacB\xee\xc5\x81\xf2B@\xb23\x06\xf8=\xf0U\xc0\x17\xd5\xc4\xde\xf5\xf2B@9\xce\x12\xa3\xb6\xf4U\xc0\x84A\xf6\xabj\xf0B@\xb6\x9d\x07(\xdc\xf6U\xc0\xe2\xbd,\x90\n\xf2B@\x1f@u\xcf\x89\xf9U\xc0r\x90\xd54Y\xf6B@\xcf\xa0\rU\x00\xfbU\xc0\x896\n\x97\xbf\xf5B@\x8cv%\x97\xce\xfbU\xc0b}\xf3\xf7\xbc\xf3B@\x13\xe1\xacV\xf5\xfbU\xc0B\xd4\xb3Q\x07\xf0B@\x81\xc1\x1b.A\xfaU\xc0\x13g\xc3\xaeW\xebB@\xe3\xfa\xe0\x11\xe4\xfaU\xc0!,\xd2-\xa5\xe7B@\xdb\x0e&\x93"\xfcU\xc0\xfb\x10\xf5\xb2W\xe6B@\xc7\xaf\xcb\xf7[\xfdU\xc0y\xf6\xe5Q[\xe3B@+_\xd7\xe1\xb7\x00V\xc0\xb7\x1e\xd7\xb6\x92\xe6B@U\x08\xc0cG\x02V\xc0\x88\x92c\x1b"\xe7B@\x1c\xd0[\x0b\x82\x05V\xc0\x82\x08\x18\xff\xa8\xe8B@\xe7\x9d\xee\x0b\xb7\x05V\xc0{\x91\x8a\xdag\xeaB@a\xe3\xc5\xe6\xb2\x02V\xc06\x013\xba\xed\xe9B@9\xb2Z\x841\x02V\xc0\n\xbe\xf9Z\x01\xecB@5\xf3\x94k\xd9\x04V\xc0\xf2\xcf\x0c\xda\x15\xefB@d\xb0\x14\xd1~\x06V\xc0\x80\xbc\x9a\xdf\x9a\xf2B@\xe6k\x1bNh\x06V\xc0$\x1cq\xdd\xfe\xf3B@\xf1\xcfj\xa5\xdf\x02V\xc0\xf4R\xda\xbe\xb1\xf2B@T@\x8a%\xb4\x01V\xc0\x9d\xd8U]\xf1\xf3B@\xc5\x801F\xf3\x01V\xc0h\xe0\xbb\x1bu\xf5B@h\xa6\x1d\xcd`\x05V\xc0\x00&2\xfa;\xf6B@\xc9\x7fS\xee\r\x05V\xc0\xe5\x96{z\xd6\xf8B@\xc6\xd2\xb2K#\x04V\xc0\x92\x89\xa4\x9d\x04\xf7B@Xa\xa8\xe7\xac\x02V\xc0w\xf2\xed\x1d\x9f\xf7B@\xacTq\x07\xb9\x02V\xc0\x82\x8e\xc2`h\xfaB@x\xeb\xc0$d\x01V\xc0\xe6\xd2\x17\x1f\xd0\xfcB@\xd3\xccd#\xdf\x01V\xc0\xb2T\x1b]\x0f\x01C@6\xfc2\x03d\x01V\xc0\xc3\xbdUCL\x04C@\xe8\x0e\xf3\t\xa8\x02V\xc0\x12\xb8\xc5\xa1\xe8\x04C@\xd9\x96#\x88\xc2\x02V\xc0\xd6\xc2\xe9\x02\xc8\x05C@\xd8r^\x879\x02V\xc0\x1e3e\xbe\xed\x06C@i\x14\xe9\xbek\xfeU\xc0\x84\xeb7\x9cc\tC@9\x96\x83\xe0\xc0\xfdU\xc0\xae\xa4\xf7\xc1c\x0cC@\x90\x91\xf5\x86\xca\x00V\xc0\xb1\x9dP\x83\xd3\x0bC@\x88<$g0\x01V\xc0B|\xe7\x84:\rC@\xb5\xba\xb7bN\xfeU\xc0\xa4\xb6\x85\xff\xde\x10C@\x13\xf9Z\x9e\xd6\xfcU\xc0\x0erh\xe2\x87\x11C@\x1dY\x93<\xa6\xfbU\xc0\x19h,]+\x14C@viz\x1e\xab\xfbU\xc02\xfb\xcb#\xe9\x15C@\xfa\x0b\xb5\xe6\x96\xfeU\xc0\x97\xcc\xfc\x83\xb2\x19C@\'\xfe\xaeH\x1b\xffU\xc0\xa3C\x85\xe7\x0f\x1eC@\x07v\xcd$\xb9\xfeU\xc0\xbc;{d\xdd\x1eC@\x81\xba\xf4\xc4B\xfbU\xc0ms\xc2I\x04\'C@\x11G\n\xc1y\xfaU\xc0\xb1\x1d\xff\xca\xb4&C@\x90\x08\x08B\x81\xfaU\xc0\x9aVl\xe6\xfa#C@we\x815C@R\xb5\xefsK\xefU\xc0\xd6[g\n\x079C@k\x98GY\x8e\xf0U\xc0\xe8oy\xac\x83:C@[\x8e\x15Wd\xf0U\xc0 P7\x89\xab;C@\xbd\xde\x86\x8fW\xecU\xc0fd\xe3l\xa4=C@\xb2\xa6A\x0f\x84\xebU\xc0iu\x13\xd2\x84@C@cFK\xea\xd3\xe9U\xc0\xb46w\x11\x10@C@Z\xcd\xb4\xc9\xb0\xe9U\xc0@\xa872\xf9AC@$\x19\xbc\xed\x11\xebU\xc0\x08\x93\xa8\x8a\x13FC@f\x93\\\xca\xc8\xe9U\xc0H&\x900vIC@8\x0f\x00\xe8\xff\xe8U\xc0\x16\x7f\x07\xcf\xeeKC@~\xd6\xb1\xa4\xab\xe7U\xc0\xbcc\xb9n\xb4LC@\xff\xbbt&<\xe8U\xc0\x87\xc3\xd7K\xbdOC@\xb6\xfc\xf0\x86\x03\xe8U\xc04\x7f\xcf.IRC@\xc5\x9fC\x01\xaa\xe5U\xc0\x8b\x02-\'\x0bVC@\x89!\xa3y\xcf\xe2U\xc0&HL\x8c\xcfWC@\x159\xe2\x94\x88\xe0U\xc0\xd6\x81\xca\x8aK^C@!\x08\x11t\x83\xe0U\xc0\xb4\x84\r\xc5\x87bC@\x02\xa7D\x168\xe1U\xc0\xc6\xf1^dlcC@\xb5v\xea\x95\x81\xe0U\xc0]\x95\x84e\xd6eC@\x92\x0b\'\xdf;\xe3U\xc0\xba\xf8\xd7\xdb\xd0mC@\xf2!\xd7\xfc\xc7\xe3U\xc0`\x14\rtWoC@\xdd:(\x9e\x82\xe2U\xc0G\x87_\xf0\xd3sC@\xbbs\xec\xdd\xee\xe1U\xc0P!T\x93JwC@\x9b\x92\xcb\xbc$\xe2U\xc0d\xfat\n\\{C@\n\xd1\x92=\x11\xe3U\xc0,P\xb3G\x12}C@\x1c\xfe\x02b\xe1\xe5U\xc0\xdcC^\x88?\x7fC@9\xe2\xca\xc1;\xe5U\xc0\xf4\xa3\x11\xe5u\x7fC@\xd5\xf0\xa6Cv\xe5U\xc0\xea9\x1dB\xff\x87C@0\xc4<\x85+\xe7U\xc0*\xbf\x8d\xb9\xd5\x8aC@\xe3S\x01\x8a`\xe8U\xc0vT\xfe\xdad\x8bC@\x8e\x87\xa4\xa9m\xe8U\xc0g\x9a\xadSO\x8dC@n\xec-\xeab\xeaU\xc0`\xda\x8ey\x87\x8eC@\xeb\x9e\xc4\xec4\xeaU\xc0\x06\xe5e\x8f\xba\x90C@\xff\xbd=\r\xe7\xeaU\xc0\x9a\xde:\xb3\xc7\x92C@K]\xbc\xeb;\xe9U\xc0\x82\x7f`\xed\x92\x95C@\xe7`H\xa8\xe8\xe6U\xc00`\xcd\x0b\x1a\x99C@\xef\x05\xd2\xe6\x07\xe6U\xc0\xda\xb1U\x8c]\x99C@\x01\xad\xcd\xe7\xab\xe5U\xc0\x1f\xab\xc3M\xb0\x9aC@\xe6\xdc\xf3\xe4i\xe5U\xc0\n\xfe9e\xd8\x9fC@}L&\xc8\xd7\xe6U\xc0[\x9fj\xc6\x0c\xa1C@2\x82;\xaai\xe7U\xc0jk\x1b\xc3\x06\xa4C@\xfe\x1c\x8b\xc7\x14\xe7U\xc0\xb6XD\x03\x1b\xa6C@Z\xda\x12L\x04\xe8U\xc0\x99\x10\x16=Z\xa7C@8\xc5U\x86@\xe6U\xc0=p\x15\x97M\xabC@\xab.`?\x93\xe2U\xc0W\x89@:\xdf\xacC@\x9b\xa7)?x\xe2U\xc0\xcd\xe0\xc4,\x1e\xbdC@C\x1d\xce9G\xe2U\xc0v\xedq\x0b\x00\xceC@\x13\x83\xe3oJ\xe2U\xc0\x87\xbb\x17V\x94\xf1C@\x89\x13,KC\xe2U\xc0,\xd9\xc1\x11G\x15D@+\xd0\xd8\xd4H\xe2U\xc0\x82\x90P2\xdc=D@W\x95\x86Wa\xe2U\xc0Z\xc3t\x92P?D@y\x14!\xb2\x17\xe2U\xc0\x80&\x1f\xdaj_D@=X\xff$\r\xe2U\xc0\x84\xd4\xc4zF\x81D@\xdfp;q\x08\xe2U\xc09\xc2@\xc3>\x96D@&\xd0\xef8\x14\xe2U\xc0PYmA\x92\xa6D@g\xa9/\x88\x17\xe2U\xc0\x0c\xd0\xad\xc7 \xbcD@cc\x05\xfc\xe9\xe1U\xc0\xa9\xf0\xec\xc9\x9f\xdcD@HH\x11r\xad\xddU\xc0<,\x88\xc9\xf7\xd5D@\xfaH1\xee\xd5\xdaU\xc0\x1e\xa4O,\x93\xd6D@z;{NI\xdcU\xc0\xe0\x93\x0c\x0f=\xd4D@U\xc0\xad>c\x0e\xeccC@\xe4\xa5\xf0\xcc\x9aAU\xc0<\x91\xa5L\xd4aC@"\xc2\n\x8eaDU\xc0\xf4\xca\xd1\xe7\r`C@~l\xfdv\xa8GU\xc0r\x8d-\xebh[C@\x15\xfe\x14\xbbLJU\xc0\xfb\x18\x01\x8a\xfbXC@\xa1+\xf4`!MU\xc0\x03\x1e\x1a\x8b\x10YC@M\xb9\x18\x86^QU\xc0v\xad\x84\xb9G_C@\xd4\xeb\xbe\xcapUU\xc0U-z9V^C@\x01\xb3^\x90\xc3ZU\xc0t\x9b\xcdu\x84^C@\xd5\xad\xcf\x92\x96\\U\xc0X\x17\xac\x91\xc7\\C@AK\xcd\x14\t]U\xc0\'N\xec\x16\xebXC@\xf9]\x8dp)[U\xc0X\x17\x8d\x0c\xdcJC@H\xe0\x7f\xaf\xb7ZU\xc0\xe3\x9b\xb9l\xdeGC@\xba\xbe`\xf4\xab[U\xc0z\x03\xc3\xf3\xbcDC@j\xb2\x0b5\xd9]U\xc0^\x90\xff\x91SBC@\x81\x82%\xf8u`U\xc0H\x06>xW\x89\x9esU\xc0\xd3\xb2~\x86\xa1$C@K\xe1r\n\xc1uU\xc0\n^\xf0\x83]#C@\xfaGB\xa9\x8cvU\xc0\xba@\xc2-\x89\x1eC@2*\xc9l_zU\xc0v\xb5\xdfH\n\x17C@\xc5\x07\xf0H\x8bzU\xc0\x08\x89\x99\xcdM\x08C@\xbe\xfdOi\x93{U\xc0\'\x06\xc2\xba[\x04C@B\xf6yiY}U\xc0$\xa4)\xfc\x83\x01C@V\xa6\xce+m\x80U\xc0\xb1\xd5\xb6\xe99\x00C@T`\r\x11\x06\x82U\xc0 ~[\xff\x18\xffB@\xb6,\xe5\xaf_\x83U\xc0\x89N\n\x97\xbf\xfbB@\xf6\x1b\\\x18\xb8\x86U\xc0\xb0\xe1Mxs\x01C@\tK\x95#3\x8cU\xc0\xc3\xe1\xf1\xe7E\x02C@\xae\xc0\xe2O#\x90U\xc0\xd4\xef\x8c[6\x05C@2\xee/\xd3\xc5\x91U\xc0\xbd\x98\x899r\x07C@\tY\x1e\xf5\xa6\x92U\xc0\x16n\x7f\xf0\x0b\nC@\x84\xe2h\x18\r\x93U\xc0:\xc1a,=\x13C@\xa0\xbd\x92\xe0\xdc\x95U\xc0\xfe\x90\x94b\xb1\x16C@', '\x01\x03\x00\x00\x00\x01\x00\x00\x00F\x01\x00\x00\x06\xdd\x95\xf3\x94\x04V\xc0\xfe\xc0~\xb4i\xc1B@\x9f\xb4nR\xa0\x05V\xc0x\xe00\x19\xf8\xbcB@\x01\xe1\xe2\x95\xf3\x13V\xc0\xdaB\xb0\x0e\xb1\xb8B@\x9b\xcb\xe2]\xfd\x16V\xc0>\xac\xff\xf3e\xb4B@\x98\xfe\x0e\x88\xdf\x1aV\xc0\xfc\x1d\xb1\xcf\xcd\xb5B@g\tn\x92\xee\x1dV\xc0\xae\x03<\xb0M\xb3B@\xfe5o5\xba V\xc0\x15T\xb2\xfe\x00\xa6B@W\x1c\x9a\x12\x18 V\xc0I\x9b+\xc8\x00\xa1B@\x16T\x8f\xe9\xd8\x1cV\xc0G\xa3\x07-U\x9aB@\x8f\xbbK%\x0b\x1bV\xc0\xf1X\xe4p\x17\x94B@>1B-\xd5\x1cV\xc0\xea\x89#\x19\xa3\x8cB@\xd2qH\x91\x84\x1eV\xc0T\xc1\x88\xdc=\x89B@\xb6K\x7fVh\x1fV\xc0\xdc\x0c{\x00\xbc\x88B@\xa6\x8d\xee\xb9\x1b!V\xc0\xf1Q\xf0:L\x88B@\xdc\xaf\x16\xe3\xcb#V\xc0\xcaHJ\xd9S\x89B@\x98:\xe6+P\'V\xc0Ph\xf2\x10\xf7\x8dB@2\xc7\x1f\x18\x0f,V\xc0X\xdf\xd1\xedV\x91B@\x0f\xb0\x14\x80N/V\xc0>\x04z\x08\x14\x92B@\xe5\xbd\xc3\xa4\xc7/V\xc0\x90\xe4\x04\x06z\x93B@\xc9{\x87!A7V\xc0\x9a\xfc\xd6=\xe3\x99B@rb\xab.\xaf;V\xc0\xa8\xeb\xf3y\xf6\x9bB@\xfe!\xad?\x91?V\xc0P\xc9\xfe\xda+\x9cB@\x03\xa5#\xab*DV\xc0\x90\x14\x8a\xf1\xcb\x97B@\xfe\x04\xf3-{GV\xc0\x8e\xf300\\\x8eB@\xfb$\xf8\xee^IV\xc0\xdcL\xae3\xef\x8bB@\xd6\xf75\x13\xdbJV\xc0\xfa\xa1z\x91:\x88B@\xf9\x1d\x91p)KV\xc08\x04[.L\x83B@W\t,\xc8\x9eIV\xc0\xb8e\xa0\x90\xce\x7fB@)1\x17\xc7PHV\xc09:\xdc3|~B@\x11RQ\xafcLV\xc0|\xaf\xf51P~B@\xf8\x96~\xb5rMV\xc0\xfa2\x9b\t\xb7\x83B@/\xfd\x87Y7OV\xc0\xb8\x92>(Y\x85B@f\x1b\xb0\x82\xe7PV\xc0\'\xee\x8c\x87(\x8bB@\x9a\xe0\xef\'2RV\xc0{\x91\x9d\x88\xaf\x8bB@\x05\x94\xfbfjSV\xc0\x18a_\x83\xef\x8aB@\\1\x15d\xd3SV\xc0\xa8\x0b1\x84\xcd\x87B@\xdcOi\x9b\xeaPV\xc0\x1fy\xbbg\x8e\x83B@\x03f\x96\xdb\xc5PV\xc0>d\xeaE\x1e\x81B@\xd5\xe0\xb2\x1a\x1aRV\xc0\x8c\xdfL\xaa\xe7\x7fB@{x\xfc`\xe8SV\xc0c\x1b\xf8\xe8>\x81B@\xaa\x9d\x0b\x89\x83XV\xc0oF\x08CN\x86B@+LK\x12SXV\xc0\x90\xe6\xe9f\xb0\x8cB@\x99\xa6\xc0\xda [V\xc0~\xa3\xa6\x84\x91\x91B@mj\xae\xe42\\V\xc0\x88\xf9\xf9\xc1*\x95B@\r|\x83\xb2\xf8]V\xc0\x98\xa8\x07f\xb6\x9cB@\xbb\xbd\xdd\x15\xc9]V\xc0\xe4g]\xe8{\xa0B@\x15\xf6\x89\xf8V_V\xc0\x94J\x81J\xc6\xa0B@\xb1\x1d\xc4\xfc\xe4`V\xc0(<\x7f\xcab\xa3B@\x99\xdb\xf5\x00\xe5`V\xc0lG$\xa5\n\xa7B@M\xe0\xbe\x03\x0b`V\xc0\x1e\x8c\xe0\xc5,\xaaB@\xf7\xe8L]\x01^V\xc0\x0e\x92\x85gs\xabB@\xd3\xf9K\x9c\xe4[V\xc0zs\x8d\xc0\x89\xadB@Zu1\xde^[V\xc0nCZ\xe3\x9d\xb4B@\x01B\x9e\xa7\t]V\xc09\x05c\x0bC@\n\xb9\xc8pD\x90V\xc0\x0ea\xe1\x03\xa5\x0fC@\x96\xe3WQ\x8b\x92V\xc0\x93\xe6\x0f\x02\\\x15C@`~\x90\xb5\x8e\x95V\xc0~bi\x87)\x18C@\xf7\xa6\x91XZ\x97V\xc0\xba\r\x7fJ\xff\x1dC@9\xfc\xe5U\xa5\x97V\xc0\x13\xa7\xa5)l)C@`\xdf\xcf\xb2\xf6\x96V\xc0\x90\xb6\xcd\xe1\xc4.C@/^\x1a\x0f\xbe\x95V\xc0\x9b\x02\xea\x05\t2C@\xf6\xdc\xd2HS\x93V\xc0Z\x12\xfea\xb56C@=ej~\x04\x91V\xc0QO\xf3\x1dfBC@\xf1a\\\xdd\xb9\x90V\xc0A\x9e)u3DC@\x93\xfa\x83{m\x8fV\xc0c\x03\xc8\xb5\x0bHC@Cf\xf1\xb1\xc3\x8bV\xc0\xe2u\x0f\x0b\x1fNC@S\xf6\xae\x90\xc1\x8bV\xc0H|\x8cJTTC@\x95(\x97N\xf3\x8cV\xc0\xfe\xf8$%\xa7YC@\xab\xc0\x14m\x96\x8cV\xc0v\x14\x12\x89\xac\\C@7s=\xe7v\x8aV\xc0\x90\xc5(~\xf6bC@7\xaa\x97\x83\xa8\x88V\xc0\xb8Bu[\x8cdC@x`\x02"\xcc\x87V\xc0\xe6\x95\xae\xbaxfC@\x1b;\xd4!?\x87V\xc0\x10\xce\xd2SNjC@\xb0?\x10\xc4\x81\x88V\xc0/\xe2_\xb41mC@\xf0[\xbcR\x9e\x8fV\xc0\xde\xb777\x10uC@y\xf1X\xd8\xdb\x91V\xc0C\xd1 \xb6^vC@N\xd2\xe4yx\x94V\xc0\xd01Z\xf8dvC@\x01\x93\xde\xa4q\x9aV\xc0JX\xd76/{C@\xa7\xb8\xb3\xc7\x13\x9eV\xc0\x1e]V\xf6\xc7zC@\x16j1t\xf4\xa1V\xc0\xbc\x19\xdf\xe4!rC@\x17u\x819\x82\xa4V\xc0\x14\xb6\x1fK\x89oC@\xfa\xa7\xe0B&\xa8V\xc0\xd0\xe8v\x8a\xbfpC@\x8e\r\\\xe2\xd0\xaaV\xc0j\x85G\x05\xb8wC@\xf4}ME2\xadV\xc0\\\x06^\xfc\xd7\x84C@\xfdRs(K\xadV\xc0\xb7\xb6\x87\xfes\x87C@9Z]\x841\xacV\xc0\xd5\xed\xa9\xfa\xff\x8bC@\x0e\x8a\xf1\t\xe1\xadV\xc0-\x17A\x17w\x92C@\xd9`\x0b\xe9\xf8\xadV\xc0\xb6j\x85\xf3\x13\x99C@2~y\xa9\xe0\xaeV\xc0\xe2\xd7\xcc\x0f\xc6\x9cC@\x0eG\xc0\xc9>\xafV\xc04\xbf\xa7\xd2\xb9\x9fC@\xf1u\x1f\xd3\xe2\xb1V\xc0J\xbaV1\xff\xa5C@\xc2\x04\xd3\xa0p\xb6V\xc0\x08\x11\xc5)\xdd\xacC@\t\xc6\x08n\xac\xbcV\xc0Y\xe4\xda\xebG\xb3C@\xc9\xefj\x9cU\xc2V\xc0>V\xfd\x02\xe4\xb8C@\x1e)\xde\x1d!\xc4V\xc0\xae\xd5\xe7\x06\xad\xbcC@\xab\xf6-\x01\x00\xc6V\xc0B\x042f\xb5\xc3C@U\xa1\x87L\x01\xcaV\xc0\xd9\x13\xfe\xe2\xbc\xc6C@*\xeayT\x04\xcdV\xc06\xa1\xe4\x01\xcf\xccC@\xb6\xc1R\tW\xd4V\xc0\x1a\xbf`\x9d\xcd\xd7C@\x13u\xcc\xd0\x80\xd7V\xc0\x1c r}\xc2\xdcC@v\xb1!\x8f\xe8\xd7V\xc0\x06\xadR\xe1r\xe1C@\xc8c\xb0rp\xd8V\xc06Q\xc6|\xe3\xe6C@\xe9\x83\xbd\xfd\xc1\xdcV\xc0f\xa3J\xe0y\xeeC@\xc7\x8a\xae\x7f\xdf\xdcV\xc02\xc4\x0e!Q\xf1C@\xa0\x82\x8f\xfc\xc9\xdbV\xc0\xd5\x95\x82\x9ep\xf3C@\x8d4\xd3\xf8\x8d\xdbV\xc0Zt\xc7=\x00\xf6C@h\xaa\xd1\x1f\xa2\xdcV\xc0\xa2S\xf9$\x1a\xf9C@\xf4ENE2\xdfV\xc0\x8am5\x00\xbe\x00D@\x9e\\|)D\xe0V\xc0\x8c?]\x81\x8b\x08D@\x82?:\xcd\n\xe1V\xc0\xee1l9:\x11D@\xcfV6\xcbm\xe0V\xc0\x1a\xfc\x84 \xaa\x19D@d\x15\xc8\x0b\xf1\xdfV\xc0\xb9y\xa9\x9b. D@\x13\xa1\xe3\x89(\xdfV\xc0D\'\x866\xa3\'D@\xf1\x9c\x84F\xb8\xdcV\xc0x\xe8@\xf0\x9b/D@o\xceV`\xd0\xdaV\xc0{\x99\xe1\x92\x861D@O\xd0\xd7\xbc\xb2\xd8V\xc0H\xe7\xc8V:2D@\xcdX\xb2\xba\xdd\xd7V\xc0\xe8\xa6\x85\x90\x963D@\x88=H\xdf\xac\xd8V\xc0\xe2\xf7\x16\xf1@9D@\xad/\xb0\x1e\xff\xd7V\xc0|G\xbf\'y@D@f/\xdb\xdev\xd8V\xc0\xfe\xa0(%\xa7CD@\xda\xe1\xc8\x05o\xdaV\xc0\x89\xdeu\xfd%FD@\xb7\xfa\xc7DR\xdaV\xc0\x9ez\x1cxXID@\xf3\x83=}\x0c\xd8V\xc0\xc3S8\xd8>MD@|\x8b\xd3\x11\xc8\xd0V\xc0Lz\xb0\xff\xddQD@\x18\xc12\x8c\xc3\xcdV\xc0j\xa1\xd2\xfbiRD@MA\x8f\xc3h\xcaV\xc0\xb7\xde\xb3Z\x03TD@X\x11E\x81F\xc8V\xc0^\xef*\xfbQWD@j\xcaN?\xb0\xc7V\xc0m\xbb\xdb\xf7KZD@\x02\xb4\x0c\xff\xf1\xc5V\xc0\x1a\xc7\xdd\xb1{aD@\xd3b\xf3\xff\xb2\xc5V\xc0~\xa2\x97\xe3\xb8jD@\xb4(\xaa\x9a(\xc3V\xc0\x9e\xe3\x99\x7f\x97pD@a\xablW\xf0\xbeV\xc0\xbe\xe5A\x7fDvD@\xb1)_\x96~\xbdV\xc0\xf5\xeb\xb5D@\x1d\xd9\xef\xc0\x08\xb6V\xc0<*e^\xea\xb8D@\xc4\x86u6\xec\xb1V\xc0\x1e%(\xc3\x94\xb9D@j\x931\xadU\xadV\xc0b\xb4\xcf\xa8\x9c\xb9D@\r\xe0\xb7\xe5+\xaaV\xc0|\xcd\xfaK.\xbbD@\xe6\x84g"t\xa6V\xc0\x8c\xf8\xb3,;\xc1D@/\xa6\xc8V\x9f\xa2V\xc0~\x93\x0b\x14T\xc3D@\xf8\xb6\xfa\xc9 \x9dV\xc0\xfc\xce}\xb8\x87\xc3D@\x92qu\xa6\xd8\x9bV\xc0B+\x0b\x17\x95\xc5D@\xbc\xfd\xff\xa5\x14\x9bV\xc0\xd6IUv\x9d\xc8D@ln\xf4\xbaM\x96V\xc0Z\xb4\x1c\xf6\x1e\xcbD@J\xa7\xb8\xfa\xb9\x95V\xc06p\x05\x94)\xcdD@\x8d\x8d\xbf=\xd7\x95V\xc0\x9a\x14\x13q\x16\xd3D@KI\xc9\xc2\xdf\x94V\xc0\xe8\xf7\x7f\xac\x83\xdcD@/]\x01c\x85\x93V\xc0\xce\xff\xdb\xe7\xd4\xe0D@\x04*\xcd\x19Y\x90V\xc0R\xcd\xc2\x04\x11\xe4D@\x1c\x9b\xf5\xb1\x8a\x8cV\xc0\r\xe5\x04~0\xe7D@\x9beQ\xb5\xe5\x89V\xc0\xcc\x9c-\x88$\xf7D@2\x83\x02\x93#\x89V\xc0J\x81\x12]\xf3\xfdD@#s\x99w\xa4\x89V\xc0\xc5\xc4-3H\x04E@J\x1f)8\xc4\x8aV\xc0\xcc\xcb\xcf\x05\xd1\x07E@\xdaykv\xac\x8aV\xc0\x7f"\xc3?H\rE@\x01\xd2B\x18G\x8bV\xc0d\xa8\xaaWm\x0fE@\xb84\xa4\xd9D\x8cV\xc0\xac\xea)\xf9\xb4\x0fE@\x04.\xa4\xbd\xc9\x8eV\xc0\xb8\xf5\xb4gr\x14E@\x85\xe05\xff\xb7\x94V\xc0\xe6C\xd0YB\x19E@q!u\xfd\x8a\x97V\xc0\xb4\x07\x95\xb2\xe8\x1aE@!\xefc9\x11\x9aV\xc0\xc47\x15\x87\x0f\x1fE@\xdf\xcd\x02Z\xc2\x9aV\xc0\x92R\xe2\xc3\xc8!E@\xfdG-:a\x9bV\xc0\xef-sD\x9a+E@\x95\x84\x1c:E\x9cV\xc0>^66\x17.E@\xff\xcb\xaaYo\x9fV\xc0\n\xfe^\xea\xc31E@H\xc8z\xd8\x13\xa4V\xc0\x97\x97\xa4\xf6\xfe5E@\np\xa8\xf8\xc7\xa6V\xc0\xf8\x00\xef\xc5\xf3:E@_Z\x13\x96\x80\xa9V\xc0\x90\xe5N\x03\xe2E@H\x05\x00\xb7\xf60V\xc0\x86\xa6K\x8b\xd6>E@\rz\xd8P9-V\xc0\xb6\x9a%\x8d\xad>E@I\x8f\x9fA\x12\x13V\xc0\xac8\x8dy\xf9>E@\'\xf6\xa4qw\x0cV\xc0"B\x9a=\xac>E@\x9d\xc5\xadO\x08\xf3U\xc0\t\x97u\x8b\x9c>E@\xac\x85\x87\xa8\x91\xf5U\xc0\xa2\xfbO\xdd8(E@\xa3\xe6\xe2\xca\xa8\xf0U\xc0\x96\xbe;\x9d\x07\x14E@m*\x986\xeb\xeaU\xc0\x1e\xa5\n>\xa9\x07E@U>\x90\x166\xe7U\xc0\xb4N\xe4wv\xecD@cc\x05\xfc\xe9\xe1U\xc0\xa9\xf0\xec\xc9\x9f\xdcD@g\xa9/\x88\x17\xe2U\xc0\x0c\xd0\xad\xc7 \xbcD@&\xd0\xef8\x14\xe2U\xc0PYmA\x92\xa6D@\xdfp;q\x08\xe2U\xc09\xc2@\xc3>\x96D@=X\xff$\r\xe2U\xc0\x84\xd4\xc4zF\x81D@y\x14!\xb2\x17\xe2U\xc0\x80&\x1f\xdaj_D@W\x95\x86Wa\xe2U\xc0Z\xc3t\x92P?D@+\xd0\xd8\xd4H\xe2U\xc0\x82\x90P2\xdc=D@\x89\x13,KC\xe2U\xc0,\xd9\xc1\x11G\x15D@\x13\x83\xe3oJ\xe2U\xc0\x87\xbb\x17V\x94\xf1C@C\x1d\xce9G\xe2U\xc0v\xedq\x0b\x00\xceC@\x9b\xa7)?x\xe2U\xc0\xcd\xe0\xc4,\x1e\xbdC@\xab.`?\x93\xe2U\xc0W\x89@:\xdf\xacC@8\xc5U\x86@\xe6U\xc0=p\x15\x97M\xabC@Z\xda\x12L\x04\xe8U\xc0\x99\x10\x16=Z\xa7C@\xfe\x1c\x8b\xc7\x14\xe7U\xc0\xb6XD\x03\x1b\xa6C@2\x82;\xaai\xe7U\xc0jk\x1b\xc3\x06\xa4C@}L&\xc8\xd7\xe6U\xc0[\x9fj\xc6\x0c\xa1C@\xe6\xdc\xf3\xe4i\xe5U\xc0\n\xfe9e\xd8\x9fC@\x01\xad\xcd\xe7\xab\xe5U\xc0\x1f\xab\xc3M\xb0\x9aC@\xef\x05\xd2\xe6\x07\xe6U\xc0\xda\xb1U\x8c]\x99C@\xe7`H\xa8\xe8\xe6U\xc00`\xcd\x0b\x1a\x99C@K]\xbc\xeb;\xe9U\xc0\x82\x7f`\xed\x92\x95C@\xff\xbd=\r\xe7\xeaU\xc0\x9a\xde:\xb3\xc7\x92C@\xeb\x9e\xc4\xec4\xeaU\xc0\x06\xe5e\x8f\xba\x90C@n\xec-\xeab\xeaU\xc0`\xda\x8ey\x87\x8eC@\x8e\x87\xa4\xa9m\xe8U\xc0g\x9a\xadSO\x8dC@\xe3S\x01\x8a`\xe8U\xc0vT\xfe\xdad\x8bC@0\xc4<\x85+\xe7U\xc0*\xbf\x8d\xb9\xd5\x8aC@\xd5\xf0\xa6Cv\xe5U\xc0\xea9\x1dB\xff\x87C@9\xe2\xca\xc1;\xe5U\xc0\xf4\xa3\x11\xe5u\x7fC@\x1c\xfe\x02b\xe1\xe5U\xc0\xdcC^\x88?\x7fC@\n\xd1\x92=\x11\xe3U\xc0,P\xb3G\x12}C@\x9b\x92\xcb\xbc$\xe2U\xc0d\xfat\n\\{C@\xbbs\xec\xdd\xee\xe1U\xc0P!T\x93JwC@\xdd:(\x9e\x82\xe2U\xc0G\x87_\xf0\xd3sC@\xf2!\xd7\xfc\xc7\xe3U\xc0`\x14\rtWoC@\x92\x0b\'\xdf;\xe3U\xc0\xba\xf8\xd7\xdb\xd0mC@\xb5v\xea\x95\x81\xe0U\xc0]\x95\x84e\xd6eC@\x02\xa7D\x168\xe1U\xc0\xc6\xf1^dlcC@!\x08\x11t\x83\xe0U\xc0\xb4\x84\r\xc5\x87bC@\x159\xe2\x94\x88\xe0U\xc0\xd6\x81\xca\x8aK^C@\x89!\xa3y\xcf\xe2U\xc0&HL\x8c\xcfWC@\xc5\x9fC\x01\xaa\xe5U\xc0\x8b\x02-\'\x0bVC@\xb6\xfc\xf0\x86\x03\xe8U\xc04\x7f\xcf.IRC@\xff\xbbt&<\xe8U\xc0\x87\xc3\xd7K\xbdOC@~\xd6\xb1\xa4\xab\xe7U\xc0\xbcc\xb9n\xb4LC@8\x0f\x00\xe8\xff\xe8U\xc0\x16\x7f\x07\xcf\xeeKC@f\x93\\\xca\xc8\xe9U\xc0H&\x900vIC@$\x19\xbc\xed\x11\xebU\xc0\x08\x93\xa8\x8a\x13FC@Z\xcd\xb4\xc9\xb0\xe9U\xc0@\xa872\xf9AC@cFK\xea\xd3\xe9U\xc0\xb46w\x11\x10@C@\xb2\xa6A\x0f\x84\xebU\xc0iu\x13\xd2\x84@C@\xbd\xde\x86\x8fW\xecU\xc0fd\xe3l\xa4=C@[\x8e\x15Wd\xf0U\xc0 P7\x89\xab;C@k\x98GY\x8e\xf0U\xc0\xe8oy\xac\x83:C@R\xb5\xefsK\xefU\xc0\xd6[g\n\x079C@\xf2=)\xb5\xe6\xefU\xc0\xce>we\x815C@\x02G\xd4\xd8-\xf2U\xc0L%\xef\xe2g0C@\x00PD\xfeh\xf5U\xc0\xa8\xb4\x91\x06!-C@F4T=h\xf6U\xc0R\xc5\xe4`\xa0$C@\xec\x8c\x83\xfc;\xf7U\xc0\xec\xcd\xe0B\x88$C@\x0f\x1bE\xc0\xf0\xf7U\xc0B\xfdE\x07\x8e(C@\x17t\x16\xe0\x8a\xf8U\xc0\xa82\xf4\x86e(C@1O\x18\xb9B@t\r\xae\x1d"-T\xc0\xb0>\xd6a\xb6\xb1B@\x12\x05\xf5\x03\xb4.T\xc0\x00\xc5\xc8\xa0D\xb2B@\xcc\xab\xe0\xc6\xc3/T\xc0\xa89\xbd`\xa1\xb1B@ C\x1a$\xda/T\xc06\xc0\xf2\xbc\x85\xb0B@\xba\xa2\x1e\xea\xd40T\xc05\x9b}\x81\x8a\xafB@\x9fp~\x0bH1T\xc0\xdao\x81\xd9n\xb1B@\xd2\x0cr\xd1&3T\xc0J\x90c\xf9$\xb2B@\xc3L\xc1\xd4,3T\xc0W\xa9<:\xdf\xb4B@\xf1\x93\x8c\x01o6T\xc0x\xa7\xfb\xf23\xb6B@\xd7\xf7N\xa3&8T\xc0|\x87\xcd\xd5\xc0\xb1B@y\x8d&|L6T\xc0\xa0\xceK\xb6\xeb\xacB@\xda|jZ\xbf6T\xc0\xf7Z\xde\xd4q\xabB@{,\xa2\xa8\xc9;T\xc0`\xd3\xd4M\x93\xa6B@IZm\xf2\xf1=T\xc0<\xf1ZkY\xa5B@\x9d\x98gR\xa0>T\xc0\xe3[\x98\xe7\xf2\xa5B@\xcd\xd7r\x94\x19?T\xc0Q\xaf\xbd\xea2\xa7B@\xfa\x0c\x87}\x98AT\xc0\xba\xa0\xa1\xa8\x9d\xa4B@E\x18\xe7\xd6\x01IT\xc0\x91\xb3\x17\xc10\xa3B@{$\xea\x89DNT\xc0\xee\xb9\x1eX\xbf\x9eB@\xd9\x0eP\xbb\xf5ST\xc0\xa0\x9d\xed3\x98\xa5B@}\xf8q\x80\xf6VT\xc0(\\\x7f\xcab\xabB@\xe0b\x83C\x05YT\xc0v\xdb\xf8\xe7\xd3\xa7B@W\x88~d\xd0YT\xc0\xa2R\xfc\x08-\xa4B@A\xcd}Ml^T\xc0\xeclv\xe9\x90\xa0B@3\xa3/\xd1\xb6_T\xc0\xb0 \x92f]\xa0B@\xec\x1c\xcf\xb4Z`T\xc04>\xdf\xe9\xff\x9dB@-\xb9=9\xa0cT\xc0\xb7^\x13\xc1i\x9aB@\x89\x02C\xdd\x9djT\xc0Z\xf2\xb3\x80:\x9aB@N>\xb3\x1d\xe9lT\xc0\x18\xe1\x92\xb6"\x9eB@\xf2\xfd\x18\xdeBoT\xc0\x842\xd8\x19\x10\xa0B@\x9d\xf6aa\x1epT\xc0\xbfRIT\xd9\xa2B@\xe8!\xf5\xed\xbarT\xc0\x0c\xc6\xfdp\xc1\xa4B@N\xe1\xee4/tT\xc05C!\xf2\xc7\xa3B@\xc4\xb3\xa1\\\xb0uT\xc0\xeb\xb7\x90p\x8b\xa4B@\x11\xe7K\xa2\xf4vT\xc0\xb2\xc4\xbd\xcdL\xa7B@\xa2n\xda\xbfHwT\xc0\x03\x05\xd8\x8d\xa8\xa9B@g\x1a\x88\nkyT\xc0\x84\x9cBf\x98\xabB@\x95\x99]\x0eQ{T\xc0\x1e\xfd\x1f\xc3\x94\xafB@\xa4\xa0?K\xedzT\xc0\xe0\x89\xe1\xa3/\xb5B@\x84\xd1\xbe\xb4>\x7fT\xc0I\x8b\x17\x1a\xb9\xbbB@\xb4\x1e\xd8\xb3}~T\xc0z:\xc9\xd7\xcf\xbdB@\xea\xc6\xf9\xeb\xab|T\xc0\x06@\xe3|\x1b\xbfB@\xe7\x10\xd2j\xde{T\xc0\x85\x98\xf3\xb8\xd9\xc0B@\x1a\x98g\xaei}T\xc0 \x90\xbat\xfd\xc3B@\x99\xe0`1\x80~T\xc0\xe4\x89\x80S\x89\xc5B@J\xb3?\xbd\xaf\x81T\xc0R\xbd\xf7\x0e\xe8\xc3B@\x07\xb3\x13\x04%\x83T\xc0\xf8w\x16\x06\x96\xc6B@\xf4\x8e\x13\x05\x90\x83T\xc0,\xe2$k>\xc3B@\x16\xf7\xd8\xaef\x85T\xc0P[\xb1\x08/\xc6B@\xd4\x9b6\xdf\x1e\x89T\xc0\x8e\xe2X\x99Z\xc7B@m\x8d\xf4\x9e`\x89T\xc0\xb6\xeaw\x1cp\xc8B@+\xc7\xb2\\\xcc\x88T\xc0\xf3l/]\xf2\xc8B@cO\xb8\x1ao\x88T\xc0\xfaqq\xba\x96\xcbB@\xb2\x02\xb7#2\x8aT\xc0>\xdf\xb3\x14\xfa\xcbB@\xb7\xc1\x0e\n\xe0\x8bT\xc0\xda\xd1\\c\x01\xd2B@\xd5e\xf5\xee%\x8dT\xc0G\x90\xa6\xc4\xdf\xcfB@\xd6o\x95yA\x8fT\xc0\x06\xf0r?\x11\xd4B@\xaa\xe2C\x86\xeb\x92T\xc0| U\xb4\xa3\xd5B@@\xc4\x13\xe9\x14\x95T\xc0\x82\x87\xd8\x01A\xdfB@@y\xe2\xa8r\x94T\xc0\xc9\xf5s\x1d\x14\xe1B@\xf4 \x95\n\xc0\x95T\xc0\x02I(;g\xe4B@\xf6)\xae\xb6\xf8\x99T\xc0\x9a\x1e\x8ak\xe6\xe7B@\xb9\n\xd6\xfd\xf9\x9aT\xc0`4\x90_\xa9\xefB@\xbc\xae\xef\xc1\x01\x9cT\xc0\xbe\\w\x96\x8a\xf2B@Ei\xc1m\x03\xa0T\xc0\xd2=o\xb1\x0c\xf6B@\x84[\xf6\xcd\x93\x9fT\xc0<\xa3"G\xa4\xf8B@\x84\xf2\xdf,\xbb\x9eT\xc0\xc0\xd9\xf6\xa8)\xfaB@_\xa0\xe8*s\x9eT\xc0N\xa5O\x88\xea\xfcB@\x8b .T\x94\xa1T\xc0\xc8&l9\x01\x02C@\xf6d\xd5\x1f\xf7\xa5T\xc0\xb3X(?\x13\x0eC@\xfc\xc5f*Z\xa9T\xc0\xcc\xba\r\xf4\xba\x12C@g\xda\x88\nk\xa9T\xc0\x82v\xd7\x0e\xb0\x15C@u\x9b\xc0\xe5G\xa7T\xc07\xdf/\xd4\xcb\x16C@\x02L\xb2F\xd3\xa6T\xc0\x8a+aM\xcf\x18C@\xb44\xb6Hp\xa7T\xc0S*\xe9^\x91\x1eC@\xfa\x9e(\x08\xb4\xa5T\xc0\xa2!\xc9\xe2h\x1fC@\xbb\xef$\xe8\xc5\xa4T\xc0\x0f\x913\xbc\xc3 C@\x1e\xb8\xfc\x86\x1f\xa5T\xc0\xca\xaf\xc5\xfap%C@i\xd5\xef\x88\xa0\xa4T\xc0\xec\x03\xdd\\f\'C@\x15\x84`\x8dI\xa6T\xc0\xba\xd6\x1a\xd7)/C@\xd0a\xa6\xec\x8a\xa5T\xc0\x1e\n;o\xcd4C@\xc8\x11\\\xab\xd3\xa4T\xc0pm\x98\x12\xb33C@5\xaa\xe9\x07\x0b\xa3T\xc0\xcf^\xd3\xf4C3C@\xdf\x99\x19\xdf\xad\x9fT\xc0:\xdf\x9bP\xf23C@\xc7b \x94\x8d\x9aT\xc0\xe5P\xd2\x18\x177C@\x90\xac^\xd1C\x99T\xc0\x9e\xbd\xea\x1d\xd86C@\xda\x14\xf1E\x11\x95T\xc0[O!\xe5\x918C@1\xf5;\x83\x1c\x94T\xc0%;\xc2\xa2\x8c;C@[\x9b\xab\xe3\x8e\x92T\xc0\x9df*\x1b@JC@l\t\x84aV\x91T\xc0\x10\xc6\x0f^%LC@\x1cu\xf1\x97\xac\x8dT\xc0E\xac\x9b\xe2\xdbJC@\x8b\xe6\x12\xb5\xca\x8bT\xc0\xb5s>\x05*LC@K\xf9d3\x1d\x8bT\xc0.\xeb#\x9d\xebPC@\x9a\xbb!4\x18\x8cT\xc0\xb4,\xb15\xc5VC@\xf8\xa3\xd57\xc6\x8bT\xc0iEz8\xebZC@Cp\x89<\xdf\x8dT\xc0\x92\x12\xfaH\xb4cC@\x10\xfeQ{\xa7\x8cT\xc0\\4n\xc4\xfdfC@\xad\xc2\xc3\xb0Y\x89T\xc0\xc8\x8ecb]kC@\x0c\xb4\xfe\x92\xea\x88T\xc0`\x11O|\x1fsC@\xa8\xc0\x11.z\x86T\xc0\x00}e:\xdeyC@.\xdd\x17\xe8p\x85T\xc0\xb2\t\x03\xd6\x14}C@\x96\xdb\x82\x84\xbe\x83T\xc0.\x83\x9f\xb1\x99~C@\xef\x08\xdf\xa1\xbe\x82T\xc0r\x8d\x94Q\xcf\x81C@\x8b\x00\x94\xba\xfa\x7fT\xc045\x99\x15\xf4\x81C@O\xf7\xe0wi~T\xc0\xbc\x93 \xd5\x1a\x7fC@\n\xa7j\xd2\x03|T\xc0\x91\x03\x96\xd5\xde~C@\n\\9\x92a{T\xc0\xf3\xe4\xa2\x9a\xfc}C@\xd7\xbfE\xcc\x82yT\xc0\xdf\\\xc1 SwC@\xe4<\x85s\xa3{T\xc0\x881\xf3\xea\x86rC@\xa6\xb7}m\x93zT\xc0x\x88\xd7\x895qC@\x91VZ\xeb!yT\xc0\xc5\xc4\xdfQ\xcdoC@\xa0\xc7\xd9\xa7ywT\xc0\x0eb\xa7\xec^qC@\xf0\'\xe7\x85\xd1uT\xc0r=8\xc2\x0cxC@(>O\xc4\xb8tT\xc0,J\xea`gyC@b\xc1\xe4\\ rT\xc0\x8a\xfc\xa1J7vC@\x80\xd7\x7fz\xc9pT\xc0\x0eD\xd4.\x10wC@\x1f\xbah\xde\x07rT\xc0\xaaF+\xc5\xf8{C@\xa7\xf1\xac\xba\xa4qT\xc0/\x92\xefv\'\x82C@dQ\x10\xc4\x0ftT\xc04\x0f\xc6W\xa5\x85C@!0\xaf\xe4\xc0tT\xc0\xd6R\xd7T\x80\x88C@\x08\xad\xe6\xc1stT\xc0J\x7fy\xb4\xdb\x89C@]\x1e;\xbeSrT\xc0\xc0\x92\xbd\x91\xe3\x89C@5B\x0fY:pT\xc0\x9e\x8d\x00\xcc\x1f\x8cC@:,\xea7\xa9oT\xc0?\xab\x15\xaf\x1c\x90C@\xd1Gd\xfc\x91pT\xc0B2L\x05\x7f\x96C@e=P\xd9FnT\xc0\x1cK\xb4`L\x9bC@D\xc3|r\xaalT\xc0\xef\xe5\x95\xa0)\x9cC@\xa6~\xcd2!lT\xc0\xb4\x87\xf5\x18O\xa1C@\x9f`V\xaf\xb8jT\xc0\xc6!\x93\x97\x9f\xa2C@\x7f\xeb \xe0\xa6dT\xc0t\xfb\x86\x94\t\xa2C@\x0f9\xdc?\xaccT\xc0\xe1\xd0rs\x94\xaaC@\x87\x126\xfb\x99bT\xc0\xdb\xee\x97\x94%\xadC@\xecm\x0e\xb2\xc2]T\xc0\xcd\x18T\xef\x13\xb4C@Y\xd4\xc8P\xab\\T\xc0\too\x8b\x9c\xb4C@\x81\x99\xcdL\xc6[T\xc0\xc7?\xd7\x92\xf8\xb3C@\xa9\xc1\x1f\x03\x0fXT\xc0\x951\xf4\x94?\xacC@\xaf\xe2T~\xafUT\xc0R%\xf17D\xadC@\xe3\xc7\xc7V-RT\xc0\x16\x99T\x96\x8b\xb1C@o\x11\xda/5OT\xc0>\xb5av\xb9\xb1C@}\xebo\x8deNT\xc0g\x92=\x16E\xb4C@\\ 4\xcd\xd1LT\xc0ZS\x91\x17<\xb5C@\x0c\xfc\x07j\x8eKT\xc0\xf81\xa3\xd7\t\xb8C@\xb6(2h~GT\xc0\xf0\xf4\xa8[\xe0\xbbC@\x87w\x89\xa6IFT\xc0W\xc0\xe2\xb7\x8b\xbfC@\x85\x83\xce|dBT\xc05+\x8cX.\xc4C@Z\xa0;\x99\x15BT\xc0\xe4\x80\xfdt\xa6\xc5C@P\x10~\x0f\xf4>T\xc0\xa46"\x99x\xcaC@]\x060\xe7\xaf;T\xc0f=/A\xb0\xcdC@P\xf2\x06\xe1g:T\xc0\xee\x80J\xc1\xbd\xcdC@\xa8KV\x1cd8T\xc06\x00\xed\xe5\xe1\xcfC@\xd6\xab-\x13\xdb7T\xc0*NE\xe5\xc9\xd4C@\xd8\xf5\xee-B7T\xc0\x80\xfc\xe5\xc0\x15\xd7C@\x08\x16\xfcbL5T\xc0\x9b\xa2\xec\x05\t\xdaC@iU\xf8_D5T\xc0\x0eJ\xb3\xc3\x02\xdcC@\xd3\x1db!\xd06T\xc0\xdf\xa5\xba<@\xdeC@3\xed\xe0\xfe\xb97T\xc0\xf2\xd6jvG\xe1C@\xd0\xb1R4l4T\xc0\x10\x8fQ[\x8d\xe7C@\xa9\x01\xec\xcf\xdb4T\xc0t+_8z\xebC@\x8b\xad\xbd\t\x1b3T\xc0\x0b\xd2\x14\x14\xa9\xedC@\xe0mCF\x9d2T\xc0\x0b\xda\x14\x14\xa9\xefC@q\xa8i\n\xfa3T\xc0\x06\x98\xd3\xce\xd3\xf3C@\x1c\xc7\xae\xa7\xb33T\xc0\x0c\xb6JR<\xf5C@\xea%4\x03\xf22T\xc0\x87[\xdaK\xbd\xf5C@z\xaeI\xff(1T\xc0\xc1\x0e\xccs\xe7\xf4C@\xf37\xeb\x9b\x910T\xc0\xad\x86e\x0e\xec\xf5C@\xb2\xda\xbf\xfa\xd50T\xc0\xaa\x07\xaa\xcc7\xf9C@\xdbu\x08\xf2I/T\xc0\x80d\xb3\x8d\xe2\xfdC@\x15\x04\xeeO?/T\xc0h\x1e`\xa6\x90\x04D@\x08=-\xa3\xee,T\xc0\x8b\xda\xaa;\xb9\x13D@\x12\x98\xdeb\xdb,T\xc0@\xf7\x8a\xf7\x86\x15D@\x93jz{m+T\xc08[\xb1\xf3\xd9\x18D@-\xf2\xb1t\x9b)T\xc0\xa0\xdf\xe2kr\x1fD@\x9e\xcby\rW\'T\xc0\xe1j\xf3md#D@|\xb15i\xb0&T\xc0\xc2"\xee\x033\'D@\x8b\x86D\xe8\xfd&T\xc0\xce\xad\xb6|\xc7/D@\xd2\xcc\xc3\x89E(T\xc0\xcb\x15\x9d\xb8\xbf1D@^6\x84\xaa.(T\xc0\xe6:\xdf\x14\xf92D@\xd7h\x1dc\x84&T\xc0\x9c\xe9\\P\x82=D@/-L&\x04(T\xc0"=\xcfI\x92@D@n\x7f\x10I\x8a(T\xc0b=\r\xa6\x04ED@\xba\xe6\xad\xac\xca*T\xc0\x10]\xd4`\xbdHD@\x99s*\x0b\xbc*T\xc0\x05\xc9(z\x83JD@\xb2!v&\xca(T\xc0\x96\x9ea\xf9\x96ND@\xea4\x8e\x9f#\'T\xc0\xf4\xb1\x81<^OD@\xdc\x04w<\xc3$T\xc0\xe0\x0c}?\xd8ND@(\xc7\xa1oh!T\xc0\xc5t?\xe1\x8fQD@U\x00\xcd\x11\x8f!T\xc0V\x1b[\xceH=D@\xed\xbb\xb7\x13\x82!T\xc0\xb8"\xe9\x98\x963D@\xb03\xc9\xb9\xaa!T\xc0\xc0\x81\x05\x80\xcd\x14D@2\xceZ\xfb\x98!T\xc0T\x89\t\xf1\xeb\x02D@&[\xbb\xde\x93!T\xc0\x14\x93Y|\xad\xfaC@\x80\xb8\xb3\xa0\x8d!T\xc0B\xf5\xa4\x96P\xdcC@\xdb\x9f|\x15v\x1bT\xc0\xf8\xcej\xcb#\xdcC@e\x07H\xe8\xc4\xfaS\xc0@x\xa0\x98_\xdcC@j\xeaW\xed\xf7\xf0S\xc0D.\x08/d\xdcC@\x8e.\xb9;\xc8\xdeS\xc0\xca\xbeK\xf31\xdcC@1\xf9\xdc\xf3Y\xdfS\xc0"QT@D\x99C@\x0c\xa0\x95,\x84\xddS\xc0\xd9[&?L\x9bC@K}l\n\xc1\xdcS\xc00\x01\x17\xe0%\x9bC@\xdd\xfeqV\xa1\xd8S\xc0\x91+0ox\xa2C@\xdc;\x03\x0c(\xd6S\xc0_\x85WHc\xa5C@T\xc3\xdb\xc1\xe5\xd2S\xc0<\xe1\xb0#x\xa6C@.\xd3\xaa\x9b\xe8\xd1S\xc0\x8c\x00\x16\x93\xa1\xa9C@\x13\xff\xd0\x98\xa6\xd0S\xc0\xe0q\xbcG\xa0\xacC@\x98\xb2.\xe4n\xcaS\xc0\xfa\x14\\\x0e^\xb2C@zn\xd7\xc1\x1e\xcaS\xc0\xac\xfa8\xa7\xfc\xb4C@\xbe\xac\xad\xdch\xc8S\xc0\xb2D\xa3Ha\xb5C@:\x88\xe77\xa9\xc6S\xc0\xa7\xc5\xbeUA\xb9C@\x83y>\x950\xc6S\xc0\xb0\x8e6\xf1x\xbbC@\x10\x8a\xbf\xb8\xb1\xc6S\xc0\xcew\xae\x8bE\xbcC@K:\xa10\x85\xc4S\xc0\x12X\xbe\xcaD\xbcC@S\x8frP\x1f\xc4S\xc0\x86|\x89\x86/\xbeC@\x03\xfd\xa8m \xc3S\xc0\x95Yo\xa9\xed\xbdC@4\x8e\xba\xa0\x1b\xbeS\xc0h\xd7`\x99!\xb8C@a\xccl!%\xbdS\xc0\xa4\xbc\xb7L\xf0\xbaC@\x085\xf8o\xbb\xb7S\xc0\x06\x8fA\x19M\xc3C@\xaf\xc9\x08\xaa\xa3\xb5S\xc0\xd4\xf9\xef\xd0\x1a\xc8C@\xd3\xd2\x05\x85\x9e\xb3S\xc0L+\x02\xab\x8d\xc8C@\xd4\xa6\x12g\xa2\xb4S\xc0\x19\x10\xfc)\xf8\xcaC@\x8e\x9e=\'\x1a\xb3S\xc0\x11$j\x07\xc6\xceC@X~\xde\xe4\x14\xb3S\xc0\n\x9b\xce%\xbf\xd0C@\xa4\xc1\xcd\x00t\xb1S\xc0\x84bC\x80v\xd2C@\xe6\x9b\xfd\x9f \xb1S\xc0\x16\x16E\xe64\xd0C@\x85\xe3\x13^\xdf\xaeS\xc0\xf1\xd8\xabf@\xd0C@s\x8c\xd0{\xc0\xaeS\xc0\x14\xfb\x06\xc4\x8e\xcfC@\x92D\xa5\xbd\x1e\xafS\xc0\xday\x7f\xe8\xec\xcdC@P\xc5}\x02\x85\xb1S\xc0\x88I<\xcc\x01\xcdC@\x03B\x1b\x9e\xbb\xb0S\xc0\r\xed\x14,x\xcaC@of+\xdb\xe4\xaeS\xc0\xe4 \xc0j\xcf\xc9C@\xa2>\x00\x1c\xd8\xadS\xc0\x1696\x11\xa0\xc7C@l\x98,\xb6\xa6\xaaS\xc0\xa0\xef\xf5\x19\xba\xc4C@\x1a\x1eh\x94\x8b\xa9S\xc08\xf5a!\xdd\xc4C@t9\xb6\xf4\xc5\xa8S\xc0\x1c\x0fza\xd5\xc3C@\x80\xad\xdc\xef\xad\xa6S\xc0\xe2N\xc1E\x91\xc4C@\x04\xa2\x1d\xa9\x1b\xa4S\xc0\x80.1\x88\xb2\xc2C@\x9a\xd1\xb3\xe7\x8f\xa0S\xc0#`\x0bl8\xc3C@\x11\xec^C\xcd\x9eS\xc0W\xc1\xbbN\x8d\xc2C@g\xdd\x07\x03,\x9dS\xc0n(\xbfpQ\xc4C@\xeeat#\x89\x9cS\xc0\xd49\xd1K/\xc6C@b&\x87\xc0\xee\x9aS\xc0V\xe3\xc9\x0bS\xc6C@#\x86\xd3e\x8e\x9dS\xc0\xdb\x96\xcb\xc7X\xcaC@\x1b6\x89$\xd7\x9cS\xc0\x99\xff\xf5\xc4\xdd\xcbC@\x8em\xc0\x1f\xdb\x99S\xc0\xac@+\xea6\xcbC@\xe6\xa2\xdc\'\xa4\x9bS\xc0\xf4\xa8\xf8]{\xcfC@qxq\xe1\x9d\x98S\xc0\x90?W\xc0\xa7\xceC@\xe3\xea\xeb\xbf+\x98S\xc06\xb1\xe4\x01\xcf\xd0C@c\xef\x1a\x81\xd5\x96S\xc0\xc6\xf9\x18\x83\xf1\xd0C@i\xd9\xf5_D\x96S\xc0$\x94t\xdd\xfe\xd1C@\xc7\xf6:\x19y\x91S\xc0\xf1\xbc\xbd\t(\xcfC@\xb1uR\x96~\x90S\xc0\r\xa5\xae\xc5\x11\xd2C@\x1ejo\xb5\xab\x8eS\xc0\xcc\xab\x12\xe7K\xd4C@\x92\xc1T\xf8\x90\x8eS\xc0y\x85/@E\xd6C@@\xaf6R\x13\x8dS\xc0\x8e\xa3\xf8%\x85\xd6C@\xde\x12R\xd1\xb5\x8bS\xc0GX\x1b\x02\xea\xd8C@\x19(a\xa3\t\x86S\xc0J*\x9f\x12z\xd6C@\x13g@\xd6\xb0\x81S\xc0\x88\xd9\'\x1e\xba\xcfC@\xa4\xd1pq\xb1\x7fS\xc0\x00\x953\x07\xab\xccC@\x90=\n\x0c\xb6}S\xc0\xba\x8f\xd7\xe8?\xceC@\x9d\xeaS\x8az|S\xc0\x01r\x9ek\x02\xcbC@)\x00\\\x8c\xde{S\xc0\xc4\x85I\xab\xc4\xcbC@\xad\xf4\x85\x8c\xa4|S\xc0\xc2\xbfC@@y\xc9\xfa*sS\xc0(4F\xc0\x8b\xbdC@\x8cZ\xf2:?rS\xc0P\xe8\xc3\xdd\xc3\xbaC@\xd5\x1dv\xdawsS\xc0<\xeb/\x1eH\xbbC@\x9c\xb8\xdc[\xeerS\xc0\xa3\xbf\x9e\xa0\xb7\xb9C@\x0f\xffc\x1c\x80sS\xc0N\xee\x91\x85R\xb8C@\x8b\x85^zXsS\xc0\xcc\xdb\x82$V\xb7C@]\x02\x89vrpS\xc0dO\xcc\xc0k\xb6C@E\r#\xd4ioS\xc0^\x7f\xad\xe6\xa3\xb3C@q\xbdr\xd4.oS\xc0\xda\xef\x99\x87\xb6\xb2C@+\xffG\xf6epS\xc0\x80$\xdb\xe9q\xb0C@\xe7P\x0bu\xb5oS\xc0\x9a$\x7f\xae \xaeC@\xa0w\xa2tFpS\xc0\x9a6d\x0fW\xabC@h\xa4kv\x01pS\xc0:\xc0$/\xd5\xa9C@\xba\x13\xe0\xd2\x8enS\xc0\xb6B\x1f\x8d\xad\xa8C@\xf6\xcc\xda\xf6\x9apS\xc0\xe8\xcf\x991o\xa4C@\xbc\x92\xad\xb7*qS\xc0\xf8![\xf6\x8e\x9fC@\xff\x12\x17{\x8csS\xc0\x92\xb7\xb7e*\x99C@\x94\x02\xf5\x9a{tS\xc0|\x85\x85\x0e$\x92C@\x8f\x0f\x93\xdd)uS\xc0\xda\x90\xa5Q\xeb\x90C@\xd3i\xe4\x0c"\x82S\xc0\r\x87~x\x00\xa2C@R\xa4sC\xb0\x8eS\xc0\x13\x07\xb2\x00\x10\xb2C@\x1b\x1e7l\xb8\x91S\xc0\xba\x96\x8d\x144\xb6C@e.\xb52>\x96S\xc0\xd2N"\xec~\xbaC@:\xfbi0j\x96S\xc0\xf8\xaf\xdc\xfb\xbe\xb0C@\xad\x9e0\xeec\x97S\xc0\xba\xa8 \xbdK\xaeC@%B\xb7\xeb\x02\x96S\xc06\x18]#\xec\xacC@(\xf4\xdel\xd0\x95S\xc0\nc]\xe8\xb4\xabC@\x80_\xf7\x8ew\x9aS\xc09r(\x15\xf7\xa0C@\xadA\x1aM\x8b\x99S\xc0\x8eB\\\x99Z\x9fC@\xdc\x07!\x91\x13\x9bS\xc0\xe8/\x85\x83\'\x9bC@\'\x1d=\xef#\x9bS\xc0\xe4\x01\xe0\xe2K\x99C@x\x0f\xadM\xc0\x99S\xc0f\x03?\t\xd6\x95C@\xdc\xb7hr\x8e\x9bS\xc0\xa90N0\x06\x93C@7\xe4=\xb1\xab\x9cS\xc0\x90\xfcd\x95<\x8fC@ws%W\x0e\x9fS\xc0\xd7\xf0b1T\x8eC@/J\x048\x1a\xa0S\xc0\xe6}g\xd9\xfd\x8bC@s\xff\x90~X\xa2S\xc0\xf2\x1d\x01\x1dP\x87C@\xda\xf4]\x83\x1b\xa4S\xc0\xbc\x1a\xae\xa4\x7f\x84C@\xb9\xa4F!&\xa3S\xc0F2\xad\xaa\x01\x83C@\xf6d\xa8\xc3g\xa3S\xc0 _\xb1\xaa\xc8\x81C@\xad\x87(\nQ\xa6S\xc0\xa7\xc5\xe6\xb1\xd0{C@\x10\xf29\xcd_\xa8S\xc0\x1a\xef\xf86g}C@\xf1\xa5\xcb\xf1g\xa9S\xc0\x02/\x8d\xbb\xabyC@\x99C\xf5\xd7\x88\xabS\xc0\x18\xb1\xd3\xc0\xf9uC@\xc3\xa8\xfc\xde\x03\xaeS\xc0Q\x1f\xcb\xc1\xd6sC@1@\x03]X\xaeS\xc0\x83mf\xde\x14wC@"\xe9h\x017\xafS\xc0\xe8vx\xb9\xf2vC@\xfcy\xfc\xc3\xf3\xafS\xc0\xea\xd8\xbe\xbf\xabtC@)\xe5\xe3j\xc1\xb2S\xc0\x00%2\x07\xabpC@\xc3\xf4/\x127\xb4S\xc0\x93\xfe\x06+\xb8jC@\xe5\xbd4\xb9u\xb7S\xc0\xdd\x91\xf3;\xb7aC@\x0613o2\xbfS\xc0\xce.&\xadblC@h\x87V\xd5(\xc2S\xc0Be\x84\x11efC@9\xe1\xfd\xd8\x81\xc3S\xc0*\xe0[y3eC@\\\xc8\xfe\x99\x9e\xc3S\xc0\x9c(\xf1\xf6\x8aaC@Y;\x0c?\x95\xc5S\xc0\xb4d\r\xc5\x87ZC@\x98\x81\xf9\xbd\xaa\xc5S\xc03\xb9b\xd7`TC@\xed+Z\x84\xbf\xc7S\xc0\x7f"7T\xf6TC@pg\xb5\xc4\'\xc8S\xc0\x90\xef\xeeYATC@VF.\x92\xd3\xceS\xc0\x96\x1d\xa3\xe7\x80=C@', '\x01\x06\x00\x00\x00\x02\x00\x00\x00\x01\x03\x00\x00\x00\x01\x00\x00\x00\\\x01\x00\x00\xac(\xff\xc9\xae\xa0U\xc0\xea\xc3\x07z\xd9SB@\x04\xff1sP\xb1U\xc0\xe3cO\x06xSB@0w\x96!]\xc4U\xc0!\xf7d\xc9MSB@\xeb\x07\x9d\x836\xc7U\xc0\x12\xdb$\n^SB@*\x93\x11\xe1.\xd6U\xc0\xd6\x0bE\x85\x1bSB@\xe1\xc3\xfe~\x00\xe9U\xc0_m\x85{\x96RB@DGn\xc4b\xecU\xc0\xad\xd89\x98~RB@n\xf4\xd1Z\xa0\xf6U\xc0*W\x9dg\x1dRB@\xaf\x0e\xcc\xbb\xb9\xf7U\xc0\xbd\xa8*\xaa\xafUB@8h\x02\xdb\x90\x04V\xc0{\xe8H\xdd\xffVB@k3P=\xa1\x02V\xc0\xa1<\x13\x9d\x96JB@UY(\xbd>\x02V\xc0\xba\xa7\xdb\xbf\xe3DB@B\x880\xa2\xbc\x02V\xc0AN\x1b\x9e\x8f?B@\x8c\x9d\xcf\xe0\xbe\x1fV\xc0n\nBB\xc5?B@\xdfs#\xc5\xcf V\xc0\xe8\xa5\x9a"\xf1?B@\x08\xc0\xeb\xce\xe23V\xc0\xe5V\xe9\xb7\xe0?B@\x8c\xa2\x07\x12\xe34V\xc0\xb8\xbaG\xff\xfc?B@$\x94\xc5\xd1$5V\xc0\xbe\x9cK:\xfb?B@\xf7\x1d\xf5\xcb/VV\xc0;\x8dL\x89U@B@\xb5\x02]\xd3\x8bZV\xc0l\xec\x1c\xccW@B@\xc7\x80\x0c\xf5\xc3ZV\xc0\xa6\x01\xfe+\\AB@\x03\x12Y\xd1\xeeWV\xc04\x16\x8c1\xe1NB@\xa2\x040\x92EWV\xc0h\x88\xc3\xf2\x18PB@h\n\xe4\xcd\xe9UV\xc0R\x94\xb6\x11\x80PB@\x0cs\x18N\xa1TV\xc0XH\xbe2\xa0OB@T\x8cb\xc9$RV\xc0Ll\xb6\xbc\xa3IB@c\x1aW\xc1wOV\xc0^\xb6r\xc0\xdfHB@\xc9\x12\xe2\xbdrMV\xc0\xfcF\x03{}JB@O\x97N\xde\xcfLV\xc0\xca\tjQ\xd0PB@\x1fTC\x9cVKV\xc0W\xa9&\x8c\x97SB@\xa3\x8b\x9e\xdc\xbeJV\xc0~\x1b\x0c\xeb\xf7UB@\xd1\nt\xe0\xa4LV\xc0\xf89\xb2\x85Q[B@\xfeU\xad?\x91LV\xc0\xba\xf5\xcd\x02\x1e]B@\x1ar?aXKV\xc0c+\xba\xdegaB@\x80#\x99\x1d\xb1IV\xc0>}3\x1a*aB@\x8d\x94\x18\xda\x08HV\xc0\x84\xfc,\xb8PbB@\x96\x166\xd9\x0eHV\xc0\x96_p\x9aoeB@C\x8aLA\x86JV\xc0\xe6\n\xd8\x14\xf9fB@\xfd\x80\xf0"\x1bKV\xc0\xc4e\xaa\x11+jB@n\xf3j\x01\xa9JV\xc0\x80d\x8e\x08\xf7kB@\xdd\xb4D=LHV\xc0\\\x0c\x89I\xe9nB@\x82\xb7\xb2\xe1\xb8FV\xc0\xb9\x99\xc1 \x1azB@\x05a\xab\xa1\xdcFV\xc0\xe8\x08\xe9\xdc\x1e}B@)1\x17\xc7PHV\xc09:\xdc3|~B@W\t,\xc8\x9eIV\xc0\xb8e\xa0\x90\xce\x7fB@\xf9\x1d\x91p)KV\xc08\x04[.L\x83B@\xd6\xf75\x13\xdbJV\xc0\xfa\xa1z\x91:\x88B@\xfb$\xf8\xee^IV\xc0\xdcL\xae3\xef\x8bB@\xfe\x04\xf3-{GV\xc0\x8e\xf300\\\x8eB@\x03\xa5#\xab*DV\xc0\x90\x14\x8a\xf1\xcb\x97B@\xfe!\xad?\x91?V\xc0P\xc9\xfe\xda+\x9cB@rb\xab.\xaf;V\xc0\xa8\xeb\xf3y\xf6\x9bB@\xc9{\x87!A7V\xc0\x9a\xfc\xd6=\xe3\x99B@\xe5\xbd\xc3\xa4\xc7/V\xc0\x90\xe4\x04\x06z\x93B@\x0f\xb0\x14\x80N/V\xc0>\x04z\x08\x14\x92B@2\xc7\x1f\x18\x0f,V\xc0X\xdf\xd1\xedV\x91B@\x98:\xe6+P\'V\xc0Ph\xf2\x10\xf7\x8dB@\xdc\xaf\x16\xe3\xcb#V\xc0\xcaHJ\xd9S\x89B@\xa6\x8d\xee\xb9\x1b!V\xc0\xf1Q\xf0:L\x88B@\xb6K\x7fVh\x1fV\xc0\xdc\x0c{\x00\xbc\x88B@\xd2qH\x91\x84\x1eV\xc0T\xc1\x88\xdc=\x89B@>1B-\xd5\x1cV\xc0\xea\x89#\x19\xa3\x8cB@\x8f\xbbK%\x0b\x1bV\xc0\xf1X\xe4p\x17\x94B@\x16T\x8f\xe9\xd8\x1cV\xc0G\xa3\x07-U\x9aB@W\x1c\x9a\x12\x18 V\xc0I\x9b+\xc8\x00\xa1B@\xfe5o5\xba V\xc0\x15T\xb2\xfe\x00\xa6B@g\tn\x92\xee\x1dV\xc0\xae\x03<\xb0M\xb3B@\x98\xfe\x0e\x88\xdf\x1aV\xc0\xfc\x1d\xb1\xcf\xcd\xb5B@\x9b\xcb\xe2]\xfd\x16V\xc0>\xac\xff\xf3e\xb4B@\x01\xe1\xe2\x95\xf3\x13V\xc0\xdaB\xb0\x0e\xb1\xb8B@\x9f\xb4nR\xa0\x05V\xc0x\xe00\x19\xf8\xbcB@\x06\xdd\x95\xf3\x94\x04V\xc0\xfe\xc0~\xb4i\xc1B@\xb8D\'\xc5\x96\x08V\xc0\xc6\tq\x12\xb4\xcaB@r\xf7\xe9+\x17\nV\xc08\xd3E\x8ds\xd0B@\xf9\xbd\x00\xae3\nV\xc0\x9a\xbd+\xe9\x92\xd4B@\x16\x9e\xc8\r\x8e\x08V\xc0\x06O\xa4\x7f\xb3\xd9B@L&\xfc\xe8\xa3\x04V\xc0\xde\xae=\x1b#\xdeB@U\x08\xc0cG\x02V\xc0\x88\x92c\x1b"\xe7B@+_\xd7\xe1\xb7\x00V\xc0\xb7\x1e\xd7\xb6\x92\xe6B@\xc7\xaf\xcb\xf7[\xfdU\xc0y\xf6\xe5Q[\xe3B@\xdb\x0e&\x93"\xfcU\xc0\xfb\x10\xf5\xb2W\xe6B@\xe3\xfa\xe0\x11\xe4\xfaU\xc0!,\xd2-\xa5\xe7B@\x81\xc1\x1b.A\xfaU\xc0\x13g\xc3\xaeW\xebB@\x13\xe1\xacV\xf5\xfbU\xc0B\xd4\xb3Q\x07\xf0B@\x8cv%\x97\xce\xfbU\xc0b}\xf3\xf7\xbc\xf3B@\xcf\xa0\rU\x00\xfbU\xc0\x896\n\x97\xbf\xf5B@\x1f@u\xcf\x89\xf9U\xc0r\x90\xd54Y\xf6B@\xb6\x9d\x07(\xdc\xf6U\xc0\xe2\xbd,\x90\n\xf2B@9\xce\x12\xa3\xb6\xf4U\xc0\x84A\xf6\xabj\xf0B@\xb23\x06\xf8=\xf0U\xc0\x17\xd5\xc4\xde\xf5\xf2B@\xb4\xd5V\xd5\x9a\xeeU\xc0\xacB\xee\xc5\x81\xf2B@\x9au\xfa\xf1f\xedU\xc0N?\xa5&+\xf3B@\x8e\xe6\xac\x8d\x80\xebU\xc0Z\xf9u\x83\xd2\xf2B@\xed\x0ekh\xd2\xebU\xc0\x01\x8b\xe4?\x0e\xebB@FANd\xb5\xe9U\xc0^\x10y\xa6\x01\xeaB@\x7f\xd6\xf1\xb9\xe2\xe6U\xc0\xd7H\xdbE\x02\xecB@ni\xa0\x1a\xfe\xe5U\xc0[\xf2\xfca\xb5\xeeB@\xd6\x8c@\xdd\x0f\xe6U\xc0z3\xd6\xa1\x04\xf2B@Z\xc6d\x04#\xe8U\xc0\x96\xc0\xa7\xc03\xf6B@NC\xeeC\xad\xe6U\xc0\xe8Nl\xe2N\xfcB@\xf2Zh\xb2N\xe0U\xc0F9\x05G3\xf5B@Wj\x88J\xf2\xdcU\xc0\x89\x1f5\xe6\xdf\xf7B@\x06&\xae\x9f\xcd\xd8U\xc0f\xfby\x13\xad\xf7B@\xcaL\x054\xe0\xd3U\xc0\x96\xef\x01]e\xf2B@\xd6\xab\xcd\xact\xd1U\xc0\xa5\xd5E\x02w\xefB@&\x97\xedE\x83\xceU\xc0X%\x17\xe9\xaf\xecB@G\xde\xeb\xfb?\xcbU\xc0\x10vA\x91X\xebB@\xd7\x90\xbd\xfc\x1d\xcaU\xc0\x92\xd0\x08\x11\xda\xe9B@K\x02\xc8\xb5p\xc8U\xc0\xa5\n\xc5\x14\x16\xe5B@=\x14[\xb4\xcf\xc6U\xc0\xa98AYb\xe4B@H\xed\x97P\x90\xc4U\xc04\x8d\x8c>P\xe7B@R\xa5_\xb1U\xc2U\xc0\xeb\xd7\x0c\x859\xf4B@\xc9Qm\x8d\xd7\xc0U\xc0rU\xbb\xad^\xf6B@\x96\xca\xd7IL\xbfU\xc0Q\xa7\xddo\x1e\xf7B@\xb0\x02\x86\xe5\x9e\xbbU\xc0lE\xe4\xb4\x11\xf8B@m;\xeb\xe1\x9a\xb9U\xc0|\xc7S\xc1\x12\xfaB@\x06\x9f]\xda?\xb7U\xc0L\x15\rhS\xfeB@`\x0f\xeb7\xe2\xb4U\xc0L\xbc\xcdj\xeb\xfeB@n\x1bTSa\xb3U\xc0^\xd8\x8dTI\xfdB@\xb4\x19`\xac>\xb0U\xc08\x8e\x89p\xfd\xf2B@\xf8N\xaf\xe8\xa5\xaeU\xc0\xfd\xab\xeb\xf3\x82\xf2B@\xc5\xc3\x19\xa5\x1a\xacU\xc0\xba\xedi\x9c\xb7\xf4B@{\xdd\x80?\xcb\xaaU\xc0xxP\x9e\xe3\xf4B@\x15\xda\xa5}B\xaaU\xc0\x19\xa8!\x86\x87\xf3B@}\xa4\x06C\xec\xaaU\xc0\xad>\x05\x7f)\xeeB@\xeb\xf0\xdb\x80\x9e\xaaU\xc0\xbc\x0b\x14\xfev\xecB@\xf4Y\x9b\xfdP\xa9U\xc0\x0c i\xbdI\xecB@\xe5\x86\xc3\x97X\xa7U\xc0|\xd5\xfd"\xd2\xedB@\x7f/0\xb7J\xa6U\xc0\xaf\xb2\xfd\xb2\xe5\xf5B@\x1c\xc9\x1e\xf4;\xa5U\xc09\x1a\xde3|\xf6B@\xcb\xbb\x87,\xa1\xa2U\xc0v\xf4$7\xf4\xf5B@\x94\xfc>\x8bt\xa1U\xc0\xca\x08#}\xc4\xf6B@@\xf4\x17\xe9\x14\xa1U\xc0\x88\x91\xd2e\x9b\xf8B@\xf8/\rk\xf9\xa1U\xc0\xeay\x81\xa8e\xfeB@\xabSk\t\xc8\xa1U\xc0:\x18K\x8bd\x02C@\x05\x8d\x9e\xca8\xa1U\xc0\x8a\xfdE\xae\x05\x06C@\xafi9\x063\xa0U\xc0\xc7\xc7\xdei\x9c\x06C@5\xf8\xb3\xe3U\x9dU\xc0^q{\x99\x92\x07C@\xba\xf3DbQ\x9cU\xc0\xfa4O7\xba\tC@\xb7\x02\xc3DR\x9cU\xc0L\x03\xccwZ\x0bC@1\xea\xbc\x8a[\x9eU\xc0\xd4PGmL\x0eC@\xf4@\x99\n\xc0\x9dU\xc0K\x86\x192\x88\x10C@p|b(\xf6\x9cU\xc0\x1e;\xe0\xd2\x9b\x10C@D\xea\x0eB\x0f\x9aU\xc0b\xdcl\x19\xda\rC@pO-\x022\x99U\xc0H^\x14\x1c\xc8\x0fC@\xd8\x99P\xbd\x04\x96U\xc0"\xe9p\xc5/\x11C@\xdfS8~w\x95U\xc0\x0c^z\x85o\x12C@\xc2\xd4\x86\xbf\xf5\x95U\xc0\xe6U\x84^\xe9\x13C@\xb1\x11oD\xc6\x98U\xc0\xf3\x94Y\xb9\x81\x15C@\xc6\xb5\xc3\x06\xda\x98U\xc0H\x1d\xfez\xef\x18C@\xfc\xce\xd2\x83Q\x97U\xc0\x06\xef\x15\xbd\xbd\x18C@\xa0\xbd\x92\xe0\xdc\x95U\xc0\xfe\x90\x94b\xb1\x16C@\x84\xe2h\x18\r\x93U\xc0:\xc1a,=\x13C@\tY\x1e\xf5\xa6\x92U\xc0\x16n\x7f\xf0\x0b\nC@2\xee/\xd3\xc5\x91U\xc0\xbd\x98\x899r\x07C@\xae\xc0\xe2O#\x90U\xc0\xd4\xef\x8c[6\x05C@\tK\x95#3\x8cU\xc0\xc3\xe1\xf1\xe7E\x02C@\xf6\x1b\\\x18\xb8\x86U\xc0\xb0\xe1Mxs\x01C@\xb6,\xe5\xaf_\x83U\xc0\x89N\n\x97\xbf\xfbB@T`\r\x11\x06\x82U\xc0 ~[\xff\x18\xffB@V\xa6\xce+m\x80U\xc0\xb1\xd5\xb6\xe99\x00C@B\xf6yiY}U\xc0$\xa4)\xfc\x83\x01C@\xbe\xfdOi\x93{U\xc0\'\x06\xc2\xba[\x04C@\xc5\x07\xf0H\x8bzU\xc0\x08\x89\x99\xcdM\x08C@2*\xc9l_zU\xc0v\xb5\xdfH\n\x17C@\xfaGB\xa9\x8cvU\xc0\xba@\xc2-\x89\x1eC@K\xe1r\n\xc1uU\xc0\n^\xf0\x83]#C@\xbb\x90>\x89\x9esU\xc0\xd3\xb2~\x86\xa1$C@H\xd8\x19IQrU\xc0\xe0\xa1rf%$C@\xee?\xde\xa3\xcdoU\xc0/\x13\x9e\xb1\x99"C@\xc4C\xc4\xe1\x9bkU\xc0\xc5$\xca\xa3\x85&C@N\x8eF\xe0\xdeiU\xc0\xb0v\x90\x80;+C@xWU\xc0\xad>c\x0e\xeccC@\xe1Wb\xe5f4U\xc0\xc0\xf8\x85x\x8eeC@\xee\xaflf\xc34U\xc0PA\x0c\xb2\xcfjC@\xad\xff\x0f\x85e2U\xc0\xb4\x1ct+\xeenC@^\xc4N\x86y2U\xc0\xca\x07#\x8a3qC@z\x83\xca\x06h3U\xc0.+C"\xd7rC@\x83\xb8\xa8\x08\x067U\xc0\xa6\xd6\xc9\x1fvsC@\x18+\x82*\x048U\xc0@\x876?htC@\x00\x8d$l\x0e8U\xc0X\xe6b\xbd\xbbvC@\x8a<\xbd\x0b*6U\xc0"\x82K\\1zC@CJ\xf6\x88g5U\xc0\x9bZG\x95\xcb}C@\x95\x14s\xc9\x076U\xc0jc\x06\x15\xbf\x80C@\xac\xecc0\x158U\xc0V\xe4O\xea5\x84C@\t\'\xf0\xb2\xf58U\xc0`!E\xa5{\x86C@*\x08\x11\xd4\xbf8U\xc0L=\x0fhS\x88C@*\xdb\xdb\xad\xfb4U\xc0Wq\xa6\xa0E\x8dC@S\x05\xf7J\xef3U\xc0\xdfug\x84!\x8dC@pw!+\x8e2U\xc0R\xbc\xeeD\xb3\x8dC@\xdd\x1d\xbdD\x8b/U\xc0\xc1\xfe\xde!/\x92C@\xdfi\x8c\x1c\xb8*U\xc0n\xdf\xa7\xcfx\x8bC@\xee\xb8&x\xd9\'U\xc0\xe6\r\x01s\x97\x89C@\xf4\x89\xa3\xd4\xf4%U\xc0w\xe4\x97t\xfe\x88C@\xbb\x083\xb2\xfa U\xc0\x0e\xa3\xfb\x97\x0e\x8cC@%\x16\x05\xcd}\x1fU\xc0\x13W,\x15\xbe\x8dC@F*\x80\x8ay\x1cU\xc0\x8eZ\xeb[P\x8eC@H)\x10e>\x1bU\xc0\xdf\xfcy?\xd8\x8aC@\xcb~P\x06\xdd\x1aU\xc0\xe6\xc3\x96&\x0f\x86C@\xe9\xe4\xa3B\x0b\x19U\xc0\x90\xcftE\x93\x84C@_`e?!\x16U\xc0\xbd\xff\x18\t\xd7\x84C@\x08\xc5\x82[\r\x14U\xc0\x83\x1dR0\xcd\x81C@\x0b\x02\xbd\x97\x91\x12U\xc0d\x07\x13\xa2\xe6xC@%\xa7\x81\xd4\xbc\x10U\xc0\xea\xf9\xf3\xe5ouC@a\xd7\xb7\x0f\x0f\x0fU\xc0=\xc0\x1en\xf1oC@\x1e\xbb\xdd\x0e\xa3\x0eU\xc0b\xbe\xe2<\x06hC@\x14\xec\xc5\xe8O\x0bU\xc0kH\xae\x83\xeddC@\xce\xa1]\x00\xb0\x05U\xc0\x1a<\x07\x0c\xfcaC@\xee\xcb\xe6zq\x03U\xc0n\xa8\x94\x14\xc2aC@\xf0oE\x15\x94\xfdT\xc0]2\xe5\xf2\x89cC@\x0b\x1d!\x0bg\xfaT\xc0E\x94^\xd8\x04aC@\xd6a\xef"\xe2\xf6T\xc0\xde\xd0$|Y_C@\xb2\x0b\xf8 \x9a\xf5T\xc0\xb8\xfcO\xe5\x1e[C@\x83\x91\xa9\xfb\x96\xf2T\xc03\xe0`\xe4\xcfXC@\xf8\x8b\x8fVK\xf1T\xc0\xa4\xd5\x84\x0cNSC@\x81F\xe6\xed\x9e\xedT\xc0\x02o\x07\xd0YQC@\x14c>\nm\xebT\xc0i\r\xa3\x94zOC@\x99R\xf8\xe4\xf7\xe9T\xc0\x84\xc9\xceO\xdbOC@\xb9\x8cX\x03*\xe9T\xc0\xa8\xe1\x1b\xf0cQC@\xe6\xdfh\x06\x87\xe8T\xc0\xd8\xee|\xd0\x1dUC@/!x\x82\x93\xe7T\xc0\x0bbd\xcc\xc7VC@DF\x16\x19\xb3\xe1T\xc0rOZ-\x1aYC@\xe4\x85U3\x01\xe0T\xc08\xfc\xcekVXC@\x19\x8f\x8d\x0c\x08\xddT\xc01\x85\xef\x8e\xf6TC@\xb4\xfe\xeda\xc5\xd7T\xc05\x86\x1f\xf4\xd6SC@\x87.\xf0\x19\x1f\xd5T\xc0\xad8\xa4\xfe\xe4PC@\x81\xf6\x935\x80\xd4T\xc0\x8ekH\xde\xa3MC@\x96:Y5\x9e\xd3T\xc06\xf1\x9a TLC@\xac\x04\xaa\x11\x90\xd2T\xc0\xa21Y\xa5^LC@\x00Y\x89\xd2t\xd1T\xc0\x8c_1%\xfcMC@u\xa3\'L\xae\xcfT\xc0R\x03H\xe1\xe4OC@\xb9\xc4\x9f\xe4\xa4\xcbT\xc0(\xd3#H\x0fNC@X\x1e\xc4_)\xc9T\xc0T\x88#\x83FOC@P\xb5\x1b\x9c\x1e\xc7T\xc0\xa4\xb4xB\x19UC@\x00-`v\xe5\xc3T\xc0\xe6\x9a\xa8\xe1\xc5WC@Z\xdcGp\xb9\xc1T\xc08\xf4 $u[C@>\x12|*=\xbeT\xc0\xe8\x8f\x13F\x1d\\C@\xa853\xa6\xf6\xbaT\xc05[\x80\x81\x8a_C@\xb6\x91T\xe0\xfa\xb8T\xc0\x9bZ[C\x13_C@\x8a\xc8\xa6]\xe2\xb7T\xc0"z\x88f\x08\\C@\xa4\x07\xce\x1aR\xb8T\xc0\xfb\xc0HkvWC@\xf9\xe18\xb8\n\xb7T\xc0#\x11\xf7\xb0\x81SC@\xd4\x98\xc8\x94\xa5\xb6T\xc0\xa4\xbc\xd4\xd1\xdbLC@\xce5\x00q\xed\xb4T\xc0.\x8dy;,IC@I\x8b\xae\xefY\xb3T\xc0\xfe\x90\x9f9UGC@+8\x07\x08|\xafT\xc0\x9a\x98\xeb\xe0\xcaFC@\xef\xc2\xed^\x84\xacT\xc0\xe5@\xf1\x9d\x02EC@\xef\x941Z\xdd\xaaT\xc0v<\xab"F@C@P\xc5\xdd\x91G\xa7T\xc0w\x1a\x9de\x80\xdf\xb3\x14\xfa\xcbB@cO\xb8\x1ao\x88T\xc0\xfaqq\xba\x96\xcbB@+\xc7\xb2\\\xcc\x88T\xc0\xf3l/]\xf2\xc8B@m\x8d\xf4\x9e`\x89T\xc0\xb6\xeaw\x1cp\xc8B@\xd4\x9b6\xdf\x1e\x89T\xc0\x8e\xe2X\x99Z\xc7B@\x16\xf7\xd8\xaef\x85T\xc0P[\xb1\x08/\xc6B@\xf4\x8e\x13\x05\x90\x83T\xc0,\xe2$k>\xc3B@\x07\xb3\x13\x04%\x83T\xc0\xf8w\x16\x06\x96\xc6B@J\xb3?\xbd\xaf\x81T\xc0R\xbd\xf7\x0e\xe8\xc3B@\x99\xe0`1\x80~T\xc0\xe4\x89\x80S\x89\xc5B@\x1a\x98g\xaei}T\xc0 \x90\xbat\xfd\xc3B@\x03\xf2z-~\x92T\xc0r?v\xb2\x05\xa7B@ \xb6\x0fZ\xa5\x96T\xc0R=\xce\xb2X\xa1B@\x89\x829\x06\xfa\x99T\xc0\xfc6\x9e\x14\x17\xa0B@\xf4J\x1e\xdc3\xa3T\xc0\x17e\xec:\x85\x99B@9\xca\x17zZ\xa4T\xc0H&\x83Y\xd2\x98B@\x8eB`\xac\x05\xaeT\xc0~\xf2\x85\x0c\x15\x8eB@\xe1\\\x95\x0b+\xaeT\xc0"C\x0cE\xeb\x8bB@\xb0\xe7\xb6\x0bc\xadT\xc00\x18\xf2g\xa9\x89B@`\'\x1ak\x15\xaeT\xc0:\xc9\x92_p\x88B@\xc5\xf4!oO\xaeT\xc0\xbbt\x99\xdcY\x84B@vt!s\xfb\xb3T\xc0J\xaa=\x83\xb7\x80B@\xe5\xbd8\xb9u\xb7T\xc0\xacB\x906\xbf|B@\x034c\x99\x14\xb7T\xc0a`\xa5\x18QwB@F\x1ej\xdc1\xb8T\xc0\nT\xac\x93drB@\xf6\xc9\\\xfe\xd9\xbcT\xc0\xc5\x8c\xa8\x1e\x9anB@\x07#R\x0e\xfc\xc2T\xc0\xaau\xdd\xf0\xecmB@\xb8\\~TY\xc4T\xc0\xcap\xe3r\xedlB@\xd9\xab\xce\xc24\xc8T\xc0\t\x17\xbcC\xbbcB@.9\xd1\x06\xf6\xc7T\xc0\r%\x8a@&`B@\xa8F\xc7f\xdd\xc8T\xc0\xb2\xc2\xe1C\xba^B@c\xfbR\xb4\x08\xcdT\xc0\xfe\x12X>\xfc]B@R\xd0&\x8b\x91\xd4T\xc0R\xdc;\xfd\xd1ZB@R\xc1Q\xda\xb1\xd8T\xc03Xq\x92\x17XB@\x8bq\x1c\x99\xdd\xd9T\xc0\xc2-\xaf\xd2\x0eVB@(P\xeeCt\xddT\xc0\xa0E>\xec\xb6TB@\x9b\r,0\xfa\xe1T\xc0\xc6\x82\xd7k\xabTB@p\xc3]5e\xe9T\xc0\xb7&;\x1d\xf9NB@=\x0c\xbe\x1a6\xebT\xc0/j2X\xa2LB@>F\x91\xd8\x84\xecT\xc0\x08A\xde\xae\xc8JB@\xfcV\xf9\xdf\xe0\xfbT\xc09\xa2\x0eg\xafKB@\xa28\xed\x87n\x00U\xc0\x1d\x96*\x8d\xc9KB@-n0\x89I\x10U\xc0\xc2\x95q\xc87LB@\xc3w\xf3\to\x10U\xc08\xd5QJ9LB@\tS\xd1-\n2U\xc0\x8a;h$sMB@\x93\xba\xb1\xae\xa02U\xc0ZP\x15\x01\x7fMB@{3\x1f\xca\xe6?U\xc0c+\x9bY|OB@\xd9\xb2\x9b\x9cpQU\xc0Rgj2\x14PB@\xf1\x19\x9f\xbe4SU\xc0M\xf9\xe3\x16$PB@[\xd6\xac\xf0\xfd[U\xc0\xea+\x1c(!OB@~\x9aI>ErU\xc0\xe9l\xd697PB@\x0e\xe2\x9aQ\xc2~U\xc0\x10\xd8k\xd3\tQB@5\x1c\x1aJ\xbc\x8cU\xc0\x06o\xacVWRB@\x8e\xdfe\x9f\x96\x9aU\xc0\xab|j\xc0QSB@\xac(\xff\xc9\xae\xa0U\xc0\xea\xc3\x07z\xd9SB@\x01\x03\x00\x00\x00\x01\x00\x00\x00\x0b\x00\x00\x00\x06_\x0e"!bV\xc0L%\xe1\x0b\xc4?B@ \x1f\xfa\xc7JdV\xc0\x8e\\y\x04hBB@\xd4\xd4\xba\xe6]dV\xc0\x00\xb57\xdeNEB@\x02R\xc7\x03\x99cV\xc0i\xa5\xbd\x19fGB@\xa5\x01-\xc4\xf2aV\xc0\xea\xc0SYEHB@\x85qK\xa0\x90_V\xc0e.\xb0\x1f\x93GB@[R\xee\xfa\xd4^V\xc0I\x1b|\x80\x1fFB@2\x86\x999,^V\xc0Qg\x9deGCB@\x9dA|\x1c\xd5^V\xc0\x90n\x07\xec\x9b@B@l)\xdd\x19u^V\xc0\xd1wzn\xd2?B@\x06_\x0e"!bV\xc0L%\xe1\x0b\xc4?B@', '\x01\x03\x00\x00\x00\x01\x00\x00\x00P\x01\x00\x00\x82\xb7\xb2\xe1\xb8FV\xc0\xb9\x99\xc1 \x1azB@\xdd\xb4D=LHV\xc0\\\x0c\x89I\xe9nB@n\xf3j\x01\xa9JV\xc0\x80d\x8e\x08\xf7kB@\xfd\x80\xf0"\x1bKV\xc0\xc4e\xaa\x11+jB@C\x8aLA\x86JV\xc0\xe6\n\xd8\x14\xf9fB@\x96\x166\xd9\x0eHV\xc0\x96_p\x9aoeB@\x8d\x94\x18\xda\x08HV\xc0\x84\xfc,\xb8PbB@\x80#\x99\x1d\xb1IV\xc0>}3\x1a*aB@\x1ar?aXKV\xc0c+\xba\xdegaB@\xfeU\xad?\x91LV\xc0\xba\xf5\xcd\x02\x1e]B@\xd1\nt\xe0\xa4LV\xc0\xf89\xb2\x85Q[B@\xa3\x8b\x9e\xdc\xbeJV\xc0~\x1b\x0c\xeb\xf7UB@\x1fTC\x9cVKV\xc0W\xa9&\x8c\x97SB@O\x97N\xde\xcfLV\xc0\xca\tjQ\xd0PB@\xc9\x12\xe2\xbdrMV\xc0\xfcF\x03{}JB@c\x1aW\xc1wOV\xc0^\xb6r\xc0\xdfHB@T\x8cb\xc9$RV\xc0Ll\xb6\xbc\xa3IB@\x0cs\x18N\xa1TV\xc0XH\xbe2\xa0OB@h\n\xe4\xcd\xe9UV\xc0R\x94\xb6\x11\x80PB@\xa2\x040\x92EWV\xc0h\x88\xc3\xf2\x18PB@\x03\x12Y\xd1\xeeWV\xc04\x16\x8c1\xe1NB@\xc7\x80\x0c\xf5\xc3ZV\xc0\xa6\x01\xfe+\\AB@\xb5\x02]\xd3\x8bZV\xc0l\xec\x1c\xccW@B@k\xe32\xb8\xb5\\V\xc0\x03f\x0b\xb4l:B@g2{\\#^V\xc0\xe4X\xce\x18\x179B@ ?-\xfb}_V\xc0d\xe8aM\x96;B@l)\xdd\x19u^V\xc0\xd1wzn\xd2?B@\x9dA|\x1c\xd5^V\xc0\x90n\x07\xec\x9b@B@2\x86\x999,^V\xc0Qg\x9deGCB@[R\xee\xfa\xd4^V\xc0I\x1b|\x80\x1fFB@\x85qK\xa0\x90_V\xc0e.\xb0\x1f\x93GB@\xa5\x01-\xc4\xf2aV\xc0\xea\xc0SYEHB@\x02R\xc7\x03\x99cV\xc0i\xa5\xbd\x19fGB@\xd4\xd4\xba\xe6]dV\xc0\x00\xb57\xdeNEB@ \x1f\xfa\xc7JdV\xc0\x8e\\y\x04hBB@\x06_\x0e"!bV\xc0L%\xe1\x0b\xc4?B@\xc5D\x14\xc1\x07aV\xc04\x1eDPfaV\xc0\xca\xb0\xecI\x91-B@\xd0\x80YA\xdbbV\xc0d\x97\x03\xcbB,B@e\xb7Q\xe8\xc4fV\xc0\x9cw\xc1\xa7j-B@\xf5_\x15,\xddgV\xc0\x0b\xfa@G\xdc*B@\x8a\xebc\x89\xd6fV\xc0\xba=R\xeeo\'B@\x046\xc2B\xb5bV\xc0\xab\xfcs\x97\xf5#B@\x88\x08\x07\xe2DbV\xc0\xf6n\xa6\x96\xde!B@\x95\x04\x82\xa0\xabbV\xc0:\x9e\xbe\xb9\xf0 B@\x82\xab\xf0\xeb\x8fgV\xc0\\nX\xfc\xd7\x1eB@\xdc\xacB2\xecjV\xc0\xe2\x1c\xea\x92\xa2 B@\xa6\x933\xb5tlV\xc0\xc64"3H B@E\xf8\xa7\xf5\x86lV\xc0y\x9dQ\x9c\xd4\x1eB@87\x87\xd3QkV\xc0\x1cp#\x9cG\x1cB@\xf7\xbe4\xc9\x97gV\xc0Sjt!\x87\x17B@\x19r\x99\xe5\xbaeV\xc0\xee\x11\x08\xaaw\x13B@\xf8r\x93c\xbaeV\xc0r\x7f\rL\x9f\x10B@\xa2Q\xe0\xcc\xb7jV\xc0\x9cC9\xb1\xb8\x0cB@(\xef\xaaohkV\xc0m\x85\t\x11\xa1\nB@\xb4nyM\x17lV\xc0\xbb\xcc&\x9fO\x03B@H\xdc\xcb\x902nV\xc0\xbe\x86\xfcg\xfe\xffA@B7x\x90\xa6}V\xc0\x87\xc7s\xb1\x9a\xffA@\xdaP\xda\xc0%\x92V\xc0\xba\xa5(\x92\xe0\xfeA@\xb6\x89p\x8eB\x98V\xc0~YD\x0f\xad\xfeA@49\xce\x84.\x94V\xc0\xdc\rR\x97\xbd\x0bB@~v\xdd\x00;\x92V\xc0>\x15\xad.\xd8\x0eB@\xee\x0b\xc4\x1e\xe2\x90V\xc0\xfd\xde\xed\xcc5\x0fB@S~\xc3>\t\x8fV\xc0\xae=\x8fN\x8e\x11B@/#E^\xde\x8eV\xc0\x84\xe4\xb0\xa3\xa2\x14B@\xad\xab\x1f\\\t\x8eV\xc0U5\xd1\xc8\x18\x16B@H"\xd0vT\x8aV\xc0U-#\x817\x19B@\xd1"\xd1og\x88V\xc0dQc@\'\x1bB@\x8d\x85\xf2p\n\x87V\xc0\xe6u\xd2\x16\x08!B@\x82\n\x93i<\x84V\xc0\xb6\x7f\xc1\xfb\xdb"B@\x0c#B\xaa0\x83V\xc0*v\xb9\xf9w&B@i\xef0\xadU\x84V\xc0W\x84!\x8e\xa6)B@J\xf5\xb1\t8\x83V\xc0w\x9a\xb7\xeak.B@~<}\x8bV\x83V\xc0N\xf8G\x8a\xf90B@l\xa5X.#\x85V\xc0\xed\xeaG\xa7\xdf2B@\x06\xc2+\xb4{\x87V\xc0\x1a\xde\xf1C\xd63B@\r\x80\x13u\xee\x87V\xc0BJ\xe0\x9e\x186B@/CO5\x82\x87V\xc0\x99\x8f\x935\x1b:B@\x8b\xda\x1a\xb5\xca\x88V\xc0\x1a~\xdd\x95\x8e:B@\xa1\xc0\x19\xd9\x9d\x89V\xc0\x04\x9a\xd0\xb4\xf5>B@\x92\x8d-\xc5]\x8eV\xc0\xecCTq\x14?B@\xef5\x08\x1a;\xa5V\xc0\xf6\x03\x1c\xd2\xd9>B@0\x1f\xec\xd9{\xb3V\xc0\x10>S?\xa0>B@\x8c\xc7]\xbd\x92\xc8V\xc0\x10\xae\x90Iw>B@\xffo\xca\xdeZ\xdaV\xc0M9\xcfh\xdc>B@Io\xc1\xc6\xfd\xdcV\xc0Z\x90)\xaf\xc6>B@L\xe4$_\x11\xecV\xc0\xb4\x14\x8e\xb0\xd9>B@|\xe5\x88J+\x08W\xc0P\xc3\x9aZ\xe7>B@C\xf6\x8cK]\tW\xc0\x8d\x95\n\xba\xee>B@\xa0o\xe4\xa7y!W\xc0\xb6*\xdc\x82\xd6>B@#\xf4\xc6\xc6\xc41W\xc0\xcd\x80X\xc6\xb7>B@\x8b\xc5\xa0\xad\x8b6W\xc0-\x9f\xdf\x87\xb4>B@\xff>n\\\x07SW\xc0@\x17\x98\xa5\xce>B@($!\xa0\x03UW\xc04\xf5`\xe2\xc0>B@2\xa6\x9d9,fW\xc0qi\xa1\xf4\xb6>B@\xcf\xa4\xc1\x9c\xe1vW\xc0^9\xcaQ\xb1>B@\nE\xeb\xf5/\x85W\xc0\x98\xfb\xe2\xe2\xd9>B@7\x8b\xb2$\x81\xa7W\xc0hN:!\xa5>B@\x17Y)\xaa\xbf\xa7W\xc0\xdeXh]\xd4UB@c\x9a\xe1\xac\xc9\xa7W\xc00\xc0\xd2\xe2\xbdaB@~\x85PK\xb4\xa7W\xc0\xe8\x85\x167\x9f\x7fB@\x0b\xf1\xb0\xf6\xb8\xa7W\xc0]R\xa4\xe8\xb2\x87B@\x99{\xe6R\x9d\xa7W\xc0\x96\xceP"\xf3\xa9B@]y\x83\xd5\x99\xa7W\xc0z\xb3\x97\x97-\xaeB@\xd6\x93\x97\xa2\x9d\xa7W\xc0<\x9c\x94w?\xd3B@F\xe6L\x80\xa2\xa7W\xc0\xf7\xdd\x92\xf5\x05\xd7B@\xa8OE\x97x\xa7W\xc0\xd2\xe5\xac\xbb\xe3\x03C@p\xb2\xe1V\x82\xa7W\xc0#(\'\xf1#\x07C@\x8c\xdc\xaa\x91\x9e\xa7W\xc0\x82\x80\xd1\x1d.2C@\xba\x9ba\x10\x99\xa7W\xc0B$:=Y\xa5jM]^C@\x8a\xaf^\xb22\xa7W\xc08\xfeMf&kC@c\xbf\xedv\xfe\xa6W\xc0tk\x01\xa9\xb7\x85C@\x8d\x19\xa5\xb8\xeb\xa6W\xc0\xc8mlFp\x8eC@\x14\xddkuz\xa6W\xc0\xea\xa9\xfe\xbc\x13\x92C@w.\x1f\xb65\xa7W\xc0\x98C\x11?i\x93C@\x18\xbe\xa8\xbc^\xa9W\xc0\x98q\rYG\x94C@\xf7\nD\xa0;\xabW\xc0 @$\xdbc\x96C@c\x07J\x06\xc1\xaeW\xc0\x880\x81\xba\xeb\x95C@\x1e\xf8\xdf*\x90\xb2W\xc0*\xb0\\y3\x99C@\xa9\xec\x9bM\x88\xb4W\xc0NO\xe50\x02\x9bC@\x174\xea\xacW\xb5W\xc0\xe0Yj\x8f\x81\xa1C@\xf0\x11\xcf\x0fd\xb8W\xc0\x83_\xd1*\x9d\xa4C@\xdeS>s\x17\xbaW\xc0\xa6\xb7Q\xfeX\xa9C@\x08\xe4\xc8rS\xbaW\xc00\xbf\xcb\x18\x89\xabC@0,\xc9p}\xb9W\xc0\xdd\xc2\xcd\xd2\xb8\xb0C@@\xe1\xbbu?\xbbW\xc0\xad\x86\x12V\xcd\xb0C@l\xaei\xf8W\xbdW\xc0\x16\x91\xa6N\xaa\xb4C@\xef\xa6\x93\xf8\x1d\xbfW\xc0\xb0\x89\x1dE@\xb8C@?\xf1\xa4\xbc\x97\xc2W\xc0\xfc\'>\xa1A\xbbC@z\xff\xde\xdd\x0b\xc3W\xc0\xc5\x1cf=\x1f\xbeC@\xa1wd\xc7\x87\xc6W\xc0\x0e)\r`4\xc4C@\x95\xe4\xff\xa9\xf9\xc6W\xc06\xe9s\xc4\xc4\xc7C@aUjfn\xc3W\xc0\x06p\xe7|\x1b\xcbC@I\xae\x85\xc9\x95\xc3W\xc0\xc4\xb5\xce\x96\x16\xd0C@*\x95q\x8a\xcf\xc1W\xc0\x98r\xbe\x93\xb9\xd4C@^\x8fB\xe5\xa0\xbeW\xc0\xe0\xab\xdf\xb2\xad\xd7C@\xec\xc4\x0f\xe8\x8d\xbdW\xc0dg\xe1n\xb3\xddC@\x16o\x7fH\x00\xbdW\xc0\x10\xef<\xadE\xdeC@:\x1f=&\x93\xbbW\xc0\x9fY-3\x0f\xddC@\xbfs\r\xa2\xf6\xb9W\xc0\xebG\xddQ\x06\xddC@A]\xe7\xdc.\xb8W\xc0\xa4\x16\xe5\x8e\xa1\xdeC@\xb2\xae\xec\x7f\xc1\xb7W\xc0XeX\xf3\x86\xe0C@\xd9\x18\xd2\xde!\xb8W\xc0\xa0\x9f\x00\xf1]\xe1C@\xa0\'\x9f\xc6\xfe\xbaW\xc0\\\x06"\xf2\x00\xe1C@\x17\xd0\x95\xe9\xd8\xbbW\xc0\x94\x02\xce+A\xe3C@|\xa9\xb4\xa6\xba\xbbW\xc0\xfab\xe8\xea1\xe4C@}\'@\x83\x8e\xb9W\xc0\x02\x81_n\x9a\xe5C@f\x17EE\xdd\xb8W\xc05\xbc<\x93\xa0\xe8C@2\x12$%~\xb9W\xc0:\xff\x7f\xcb\x06\xeaC@\xf9.\xff\xc9 \xbbW\xc0\x88\x08n\x0c\xa4\xeaC@q$\x07\xcd\xef\xbbW\xc0\x9b\xc3@\xb1\xb8\xecC@\xb7\xfb\x8f-\x0c\xbcW\xc0\x04\xcdM\xcb\xb2\xf2C@\x87\x0e\x06\xf1\xc2\xbdW\xc0\xe6D\x15.N\xf3C@9\xe2\x94\xb7d\xc1W\xc0\x88\xa2\xe2/\xd0\xf2C@Nu\x8b\xf7$\xc2W\xc0\xcdyB4]\xf0C@oU%:\x0c\xc4W\xc02T\xa8\xba\xea\xeeC@\xa9\xf7\xe1;r\xc6W\xc0\xb4n\x8e\xbfW\xefC@\x88]\xdb\xa1\xa2\xc9W\xc0\xd3\x8a\x1b ;\xf4C@\xe6x\xfb\xe4i\xccW\xc0=\xc8I\xca\x80\xf4C@\xa9\x83\x1fFI\xcdW\xc0.\x0c\xc2)\x16\xf8C@|\xc3\xe1\xe8k\xcfW\xc0\xf7N\x99\xe8\x96\xf8C@\x95\x05B\xb2\xc1\xd3W\xc0\xb5k\x9d\x94\xec\xffC@r2\x18\x95\x15\xd6W\xc0y\xe6\'\\2\x03D@8\xf3cw\xc2\xd7W\xc0v\xa5\xed\x1f\xae\x03D@KE {\xfe\xd8W\xc0\xa6\xe9\xa8\x9c\x99\x05D@]Y2\x1d{\xdaW\xc0>\x1ca\x83(\x06D@\x82\xef\n\x9a\xd7\xd9W\xc0N\x87\xfb\xdeI\nD@*\xbd>W\x9c\xd8W\xc0v\x0e\x7f\xd54\x0cD@\x9e_U\xda#\xd9W\xc0p,\xa4\xf6\xc5\x0eD@\xe4 \xf9\xd9\t\xdbW\xc0&\xbaq\xf7\xdc\x10D@Q\x96\x03>\x80\xddW\xc0,\xe8\xe8zE\x16D@\xd8\x1ep^\xdd\xddW\xc0\x84\x01\xa9\xf3K\x1bD@\x95Y\x9eA\x84\xdeW\xc0\\\xa5\xba\x98\t\x1dD@[\xc6\x1aL\x04\xe3W\xc0$\x1cWX\x13"D@Ker3\x1d\xe6W\xc08c~\xc0\xa6\'D@\x1eOE\x9ee\xe9W\xc0\x04\xcbJ\xe5\x90\'D@X9\xba\xbeP\xe9W\xc0:\xdeK\xa5B)D@{\x04\r8\x8c\xe7W\xc0w\x8a\xbf\xeak*D@6-\x84\xd7o\xe7W\xc0\xce\x7f\x91\x06Z,D@L(\xe1}\x96\xe8W\xc0^\xd8\x9c+\xed-D@\x0c\xa4I\x9d\xc1\xe8W\xc0\xaa?c\xeb\xbc2D@*\x0b\x9f\xcc\x80\xecW\xc0\xea\xf2J\x91\x1f>D@s\xbd\x9b\x8d\xd6\xebW\xc0f,\x06\xf2\x8fAD@\xb2:\xe3\xa8\x1d\xeaW\xc0\x07LR\xee\xe1CD@vM\xde\xadm\xeaW\xc0\x8c\x81\x9fq\x84GD@Z\xd4\x0c\x8f>\xebW\xc0\xacR\x12KmHD@\xbd\x04K\x94\xfe\xebW\xc0~\x187n\xd4GD@\xf0\xbac\xd0J\xecW\xc0N7\xc9\xab\x16CD@\xb9\x99=\x9a+\xefW\xc08mx\xcf$DD@\t\xfa3\xbf\xdb\xf0W\xc0\x07\x04\x9a\xcf\\FD@\x9d\xfaoa\x1e\xf1W\xc0VJ\xd4\xefeKD@\x8f\xbb)\xc9{\xd8W\xc0B]\xeew\xcbJD@YeSX\xea\xcdW\xc0~J\xf3r{JD@\x80\xd5\x90`\xeb\xbaW\xc0\xfc\xb5\x98J\xe2ID@,\xe2\xa8\xbb\xf3\xa8W\xc0\x90\xcf\xc9\xfd\xb1ID@:[\x84\x07\x0e\x9fW\xc03y\xb8\x8f\x7fID@\xe3\x03\xb6\xd1A\x8fW\xc0\\\xee\x04m\x15ID@\xca| \xe2\'\x81W\xc0*\xf7\x9a\x90yID@\xbf\x17\x02\xcbRrW\xc0\xb2\x99\x95\x98\nJD@\x9e\x94\xda\xb8\x06dW\xc0\xc1n\xa4\x17XJD@\x84\'S\x86\xb2WW\xc0\x84\xb2\x82\x8aMJD@\x9e3\xd8\xc5uFW\xc0\xd9\xa6\xfb\xe4\xcbJD@n\xfb\x82\xaf\xf0-W\xc0t\xab`8zKD@\xc6I\x82%_)W\xc0L\xd2\xed\t\xb5KD@B@\x9b\x08#\x17W\xc0X(\x18\xeb\xbeLD@\x1e,\xaa\xf7\\\x0cW\xc0P\xc9\x0f\xb2\xcfLD@\x87\xcb\xa8T\x91\xfcV\xc0z\x13\x19\xac\xdbMD@\xe2\xa1\x982x\xefV\xc0\xbcA\x01j\rND@J\x97|\xf0\xe2\xedV\xc0\x1c\x10\xa7\xb0\xf5KD@x\xb4\xf9J(\xecV\xc0\x8e\x92\xcf\xd6dJD@\x8c\xdbI\x0fK\xecV\xc0e\xce(]\x9dFD@S\xa3j\xa2\xd7\xe7V\xc0\xda\xc2n-6DD@\x0e\n\x8c\xa3z\xe7V\xc08\r\xc0\xb0\x9f@D@D\x17\xc4|\x81\xe5V\xc0\xec\xd6\x05\xbb\x04>D@\xec\xa9\x9d\x9d\x14\xe5V\xc0\xca\xbf\xe8\x7f\\;D@\xf6\xe4\xa0\x15 \xe2V\xc0\xccC\x14\xe7K:D@\xc2M\x1du|\xe2V\xc0\x98*\x1c#|8D@#\x95\xf0\x15\xe5\xe1V\xc0\xb0\x91\xf6\xe8\xb07D@\xf0>\xa7\xb1\xc5\xe1V\xc0Hv\x04n\x804D@\xad\to.\x06\xe0V\xc0\xf4\xdbzK\xdc3D@\x86\r\'Oa\xdfV\xc0\x0e\x8cf\xf1\x052D@\x89\x95\x92\xcb\x87\xdeV\xc0\xd2-t\xb1\x0c2D@\xf1\x9c\x84F\xb8\xdcV\xc0x\xe8@\xf0\x9b/D@\x13\xa1\xe3\x89(\xdfV\xc0D\'\x866\xa3\'D@d\x15\xc8\x0b\xf1\xdfV\xc0\xb9y\xa9\x9b. D@\xcfV6\xcbm\xe0V\xc0\x1a\xfc\x84 \xaa\x19D@\x82?:\xcd\n\xe1V\xc0\xee1l9:\x11D@\x9e\\|)D\xe0V\xc0\x8c?]\x81\x8b\x08D@\xf4ENE2\xdfV\xc0\x8am5\x00\xbe\x00D@h\xaa\xd1\x1f\xa2\xdcV\xc0\xa2S\xf9$\x1a\xf9C@\x8d4\xd3\xf8\x8d\xdbV\xc0Zt\xc7=\x00\xf6C@\xa0\x82\x8f\xfc\xc9\xdbV\xc0\xd5\x95\x82\x9ep\xf3C@\xc7\x8a\xae\x7f\xdf\xdcV\xc02\xc4\x0e!Q\xf1C@\xe9\x83\xbd\xfd\xc1\xdcV\xc0f\xa3J\xe0y\xeeC@\xc8c\xb0rp\xd8V\xc06Q\xc6|\xe3\xe6C@v\xb1!\x8f\xe8\xd7V\xc0\x06\xadR\xe1r\xe1C@\x13u\xcc\xd0\x80\xd7V\xc0\x1c r}\xc2\xdcC@\xb6\xc1R\tW\xd4V\xc0\x1a\xbf`\x9d\xcd\xd7C@*\xeayT\x04\xcdV\xc06\xa1\xe4\x01\xcf\xccC@U\xa1\x87L\x01\xcaV\xc0\xd9\x13\xfe\xe2\xbc\xc6C@\xab\xf6-\x01\x00\xc6V\xc0B\x042f\xb5\xc3C@\x1e)\xde\x1d!\xc4V\xc0\xae\xd5\xe7\x06\xad\xbcC@\xc9\xefj\x9cU\xc2V\xc0>V\xfd\x02\xe4\xb8C@\t\xc6\x08n\xac\xbcV\xc0Y\xe4\xda\xebG\xb3C@\xc2\x04\xd3\xa0p\xb6V\xc0\x08\x11\xc5)\xdd\xacC@\xf1u\x1f\xd3\xe2\xb1V\xc0J\xbaV1\xff\xa5C@\x0eG\xc0\xc9>\xafV\xc04\xbf\xa7\xd2\xb9\x9fC@2~y\xa9\xe0\xaeV\xc0\xe2\xd7\xcc\x0f\xc6\x9cC@\xd9`\x0b\xe9\xf8\xadV\xc0\xb6j\x85\xf3\x13\x99C@\x0e\x8a\xf1\t\xe1\xadV\xc0-\x17A\x17w\x92C@9Z]\x841\xacV\xc0\xd5\xed\xa9\xfa\xff\x8bC@\xfdRs(K\xadV\xc0\xb7\xb6\x87\xfes\x87C@\xf4}ME2\xadV\xc0\\\x06^\xfc\xd7\x84C@\x8e\r\\\xe2\xd0\xaaV\xc0j\x85G\x05\xb8wC@\xfa\xa7\xe0B&\xa8V\xc0\xd0\xe8v\x8a\xbfpC@\x17u\x819\x82\xa4V\xc0\x14\xb6\x1fK\x89oC@\x16j1t\xf4\xa1V\xc0\xbc\x19\xdf\xe4!rC@\xa7\xb8\xb3\xc7\x13\x9eV\xc0\x1e]V\xf6\xc7zC@\x01\x93\xde\xa4q\x9aV\xc0JX\xd76/{C@N\xd2\xe4yx\x94V\xc0\xd01Z\xf8dvC@y\xf1X\xd8\xdb\x91V\xc0C\xd1 \xb6^vC@\xf0[\xbcR\x9e\x8fV\xc0\xde\xb777\x10uC@\xb0?\x10\xc4\x81\x88V\xc0/\xe2_\xb41mC@\x1b;\xd4!?\x87V\xc0\x10\xce\xd2SNjC@x`\x02"\xcc\x87V\xc0\xe6\x95\xae\xbaxfC@7\xaa\x97\x83\xa8\x88V\xc0\xb8Bu[\x8cdC@7s=\xe7v\x8aV\xc0\x90\xc5(~\xf6bC@\xab\xc0\x14m\x96\x8cV\xc0v\x14\x12\x89\xac\\C@\x95(\x97N\xf3\x8cV\xc0\xfe\xf8$%\xa7YC@S\xf6\xae\x90\xc1\x8bV\xc0H|\x8cJTTC@Cf\xf1\xb1\xc3\x8bV\xc0\xe2u\x0f\x0b\x1fNC@\x93\xfa\x83{m\x8fV\xc0c\x03\xc8\xb5\x0bHC@\xf1a\\\xdd\xb9\x90V\xc0A\x9e)u3DC@=ej~\x04\x91V\xc0QO\xf3\x1dfBC@\xf6\xdc\xd2HS\x93V\xc0Z\x12\xfea\xb56C@/^\x1a\x0f\xbe\x95V\xc0\x9b\x02\xea\x05\t2C@`\xdf\xcf\xb2\xf6\x96V\xc0\x90\xb6\xcd\xe1\xc4.C@9\xfc\xe5U\xa5\x97V\xc0\x13\xa7\xa5)l)C@\xf7\xa6\x91XZ\x97V\xc0\xba\r\x7fJ\xff\x1dC@`~\x90\xb5\x8e\x95V\xc0~bi\x87)\x18C@\x96\xe3WQ\x8b\x92V\xc0\x93\xe6\x0f\x02\\\x15C@\n\xb9\xc8pD\x90V\xc0\x0ea\xe1\x03\xa5\x0fC@Ma<\x0bJ\x8dV\xc0\xdb>9\x05c\x0bC@\xb8\xe8\x99\x02\xa1\x88V\xc0\x81Bn\x9d\xe9\x06C@n\x17_\x1f\xa5\x87V\xc0 6\xcc<#\x04C@\xd1{\xbe\xb4\xb0\x82V\xc0\xf6t\x0c\x17#\xffB@\x1a\x15]\xf3\xb2\x80V\xc0d\x82^\\\x14\xfcB@\xe6\xf4\x14jU}V\xc0\x86\xf5!\x13Z\xfbB@T\x9bBQ\xa8~V\xc0$\x8e\x0e]\xba\xf4B@\xd8\x1b\x06\xeb\x07|V\xc0\xd8\xd6\x8f~e\xf0B@\x82;\xd2f\xa4yV\xc06X\x8dV\x1f\xf0B@f\x8ad\xa3{wV\xc0\xcf3f\xb5*\xf2B@\xaa\xb1\xa5"\x1dwV\xc0\x9c)\xbe\xb6\xe8\xf3B@\x95\xb9\x98A\x84vV\xc0N\x18\xf9\xd1\xda\xf3B@!7\x8c\x95\xa0nV\xc0\x11\xac\x14O\xa7\xebB@3\'\xf0\xef;lV\xc0:\xfe\xa24\x05\xe7B@\x93\xe5\xe7\xeeBkV\xc0Z\xfb\xdf\xcfZ\xe4B@\xcd{\xa4\xf0\xa8jV\xc0\xeb\xb7?\xb8l\xdfB@7\x12\xc0\xdf7eV\xc0G\x17\x86Lc\xdaB@m\xe2\xd45baV\xc0\xdcu\x88\xca\xf0\xd8B@\x8e~\xa4\xb6\xdc`V\xc0\x10\x06\xbc\xa5\x06\xd7B@\xc3\x87\xc5\xd6;aV\xc0\xc4Ff#A\xd3B@L\x8f\x16\x95\xdc`V\xc0!\xfc\x04a\xd8\xceB@Zp\xfc\xb7\x9aaV\xc0V\xed%\x817\xc9B@\x14\x00\xc1\x11\xac_V\xc054\xbd~\xf2\xbeB@\x01B\x9e\xa7\t]V\xc0\x81B@\xd5\xe0\xb2\x1a\x1aRV\xc0\x8c\xdfL\xaa\xe7\x7fB@\x03f\x96\xdb\xc5PV\xc0>d\xeaE\x1e\x81B@\xdcOi\x9b\xeaPV\xc0\x1fy\xbbg\x8e\x83B@\\1\x15d\xd3SV\xc0\xa8\x0b1\x84\xcd\x87B@\x05\x94\xfbfjSV\xc0\x18a_\x83\xef\x8aB@\x9a\xe0\xef\'2RV\xc0{\x91\x9d\x88\xaf\x8bB@f\x1b\xb0\x82\xe7PV\xc0\'\xee\x8c\x87(\x8bB@/\xfd\x87Y7OV\xc0\xb8\x92>(Y\x85B@\xf8\x96~\xb5rMV\xc0\xfa2\x9b\t\xb7\x83B@\x11RQ\xafcLV\xc0|\xaf\xf51P~B@)1\x17\xc7PHV\xc09:\xdc3|~B@\x05a\xab\xa1\xdcFV\xc0\xe8\x08\xe9\xdc\x1e}B@\x82\xb7\xb2\xe1\xb8FV\xc0\xb9\x99\xc1 \x1azB@', '\x01\x03\x00\x00\x00\x01\x00\x00\x00\xfa\x00\x00\x00\x0b\xf3MM\x18\xfdT\xc0\xae\x14\x11GO\xbaA@w9\xd5\xb8\xcd\x00U\xc0j;\xd6\xb8/\xb4A@\xba\xcb#\xdde\x00U\xc0m\x1b\xa8\x9b\xbc\xafA@\xcb\x1aP\x06\xdd\x01U\xc0<\\\xf6\xdd\xa5\xa9A@\x1d\x91\x14(\xf8\x01U\xc0(\xa2\xaa\x17t\xa5A@ \xa3\xcbk\xbb\x02U\xc0RD\x1ax\xe6\xa2A@\xe5\x9d\xaa\xf6\x7f\x06U\xc0*\xa0\xf8\xe9p\x9fA@\xe3\xcbOB\x7f\x0bU\xc0>?]\\\xdb\x9eA@\x94\xb0<\x8bt\x0eU\xc0H5!\xd7~\xa1A@\xab\x87\xa6\x13\x9f\x12U\xc04\x8ej\xac\xf5\x9aA@\x87\xc6\x8a\xb3\xb8\x14U\xc0\x8c\x9aN\x83\x9a~A@\xca \xee\x85\x98\'U\xc0\xfa\x83\xad\xaa\x8f~A@oY\x93CZ1U\xc0&\xd9\x1d#\xd1~A@\xf75a\t:3U\xc0\x07lz!\x15\x7fA@\x89c\xa4_\xf1=U\xc0\xae[\x93?\x10\x7fA@y\t\xba>B>U\xc0\xb4=\x97z\x0e\x7fA@\x87\x08\xa4G"QU\xc0\x98#2V\xc0\x113\x92Gh\x80A@5J\x7f\xca\x004V\xc0(]r\xe6O\x80A@\xad7\x14\x85e@V\xc07f\xf4\xad\x07\x80A@\xd4\x9ff;\xaeLV\xc0\xc3I)\xf2\x1c\x80A@s\xb8\x9dq\xe9UV\xc0\xd6\xe9=\x9f\xf9\x7fA@\x15\xc2\xfa5aiV\xc0\x9a\xbd\xde\x07\x18\x80A@\x02\xb0\x9a\xc1\xe7mV\xc0\xce\xce\xd6\xcb\xe7\x7fA@\x17\xd5\xf1v\x8c\x93V\xc0dW>\xd5\x19\x80A@\xc9\t\xae\x97\xae\x92V\xc0\x94\xeaS\xee6\x86A@\r,\xad\x0e\x88\x8cV\xc0\xe7\t\x10,?\x85A@!\xdb\xbf\xc8\xd3\x8aV\xc0\xe1\xbf\xf7B\xf9\x89A@\x87a\x96\x8ck\x8bV\xc0\xe0[z#\xeb\x8dA@\xc1\x86eI\x88\x8aV\xc0\xc5\x14\xa5\x1e\x9a\x90A@\xe4]\x8ff4\x89V\xc0\xdcc\xfa\xf8|\x91A@\xc76\x84#P\x85V\xc0\xaa/m\x99\x04\x90A@/\x89\xa7\xde"\x84V\xc0y\x06\x99p\xe0\x92A@\x9f@s]\x00\x84V\xc0\x841\x80n`\x95A@\xb3_\xec}\xb2\x84V\xc0\xecLr\xe9\x90\x98A@\x8b\x1cs^k\x84V\xc0$\x85\xe8\xe4=\x9bA@\x91\xc6l\xc2\xc5\x85V\xc0\x0caQ\x02\x93\xa0A@\xb5\x8a\x01D\xc9\x86V\xc0PcF\xa2\xc8\xa1A@\x05\x81Z\xe9\xbd\x89V\xc0V\xf4\xe1\x83\xcf\xa1A@\x83m\xc4\xa9\xde\x8aV\xc0@Y=\xfc-\xa4A@\\\xf7\x07\xa7\r\x8aV\xc0\x9d6\xb3\xdd5\xa7A@8=\x81\xe2\xcf\x86V\xc0\xf6\xb3\xf0uJ\xa8A@\xac\xfc\x0c\xa1R\x86V\xc0\xcfcB0?\xacA@\x80j\xa2\x01\xc4\x86V\xc0\x9c\x87\x96K\xdb\xaeA@\x17L\x89\x1d\x95\x85V\xc0\xfc\xad\x1d\r\xd8\xb0A@\xf4\x19W\x1c\xd6\x84V\xc0F\x9a\x16\xe8\x0b\xb4A@ws\x97\xbdt\x85V\xc0\x1au\x02\xff\x8c\xb5A@\xfa\xc7P\x800\x87V\xc0\xca\x9eW\xa1y\xb5A@\xe9\xac\x80\xe6{\x88V\xc0P\x13\xad\x08/\xb4A@\x89>A\x06\xfa\x88V\xc0\xb8\x0chj\n\xb1A@\x9e\xb4\xc2\n\xbf\x8aV\xc0T\xc8;\x082\xb1A@\x9b\xf9\x13\xab\x0e\x8bV\xc0.\x94{\x1f?\xb6A@\xea\xe2S\xa4\xca\x88V\xc0\xca\x19\xa5[\xa7\xb8A@U\xda\x17\x02\x88\x86V\xc0\x06\x88\x97\x9b\xa0\xbcA@^\xe7G\xbcD\x85V\xc05\xb3\xf4\x958\xbdA@\xb2\x86X\xbd\xcb\x84V\xc0b\x0f\x8cwx\xbcA@\xf5\xf2\xea\xdc\xbc\x84V\xc0\x8bzO\x83\x9a\xb6A@\xef\xf88Z\xdd\x83V\xc0*\x99\x19b\xed\xb4A@\xd9n\xc9\xf8\xff\x82V\xc0\xbd\xb7:ef\xb5A@\xddt\xc0Q\xfa\x7fV\xc0\xfa1D^\x07\xb9A@\xc9\xc9\xad\x97\xae\x82V\xc0\xc0\x01\xc9L\x9a\xc1A@\xb3\x04\xe4\x99\x9f\x82V\xc06Z0\xad~\xc5A@E\xf3h\xf8\x1e\x82V\xc0\x93n\x00+\xb8\xc6A@\xeb\xc8\xca\xd2V\x7fV\xc0\xbc\x83\xe6\xa1\xe7\xc7A@\xa8\x93\x92O\x97}V\xc08mn\xcf$\xc4A@:v@\n\xa6|V\xc0\xca\x11r(t\xc3A@\xa4\x8f\xe9\xc8\x99{V\xc0(uJ\x8a\xc0\xc3A@\x8b\xd6M\xe8\xfdzV\xc0\x10\xd6e\xed\xe7\xc5A@P\xc1U\xcfQ}V\xc0\xc0\xe7^\x00\x12\xcaA@\xb9\x0e\xf2\xabA}V\xc0\xe2\xae\xc3\x1c5\xcdA@@\x98\xfcc)xV\xc0\x03\xe8\xc5\xb8\x13\xd1A@\xb4\xce\xac\x80JwV\xc0 \x16\x0f\x1e\x9e\xd0A@c\xbf\xde\x9fZvV\xc0\xc8\r(<\x99\xd2A@p\xb2\xd2\x7f\xdevV\xc0\xe0\x91\xc9\xf5\xe7\xd5A@\x12\x14\xa0\x81`wV\xc0\x94eou1\xd6A@\x12\xddE\xe5.yV\xc0\xc4\xceMu\xf9\xd3A@\xb8\xc8GJ\x82{V\xc03\x00\xb7s\x92\xd6A@\xd6\x07\x18\x8e\xef|V\xc0\xc4\xcd\x18O5\xdbA@\xa24\xca+\xdf|V\xc07/^\x07\xff\xddA@\xea\xb2*H;zV\xc0\x9ca\xa5\x08\xa1\xe0A@\xd7\xfc\xde\x81\twV\xc0\xec\x86XJ\xc7\xdfA@\x8e!\xad\x9a\xeftV\xc0r\xc0\xbc\x86\x11\xe1A@\xf6{\xa7\xf92sV\xc0\x91k3F\x1c\xe3A@\x9b\xd3}\xf7\x96rV\xc0\xb8,J\xe5\x1e\xe7A@\x14\xa0P\xd4\xa1pV\xc04\xa6\xe6\xc0\xa3\xe8A@\x1a*\x9c\xf0\x1aoV\xc0J\xe9\xfb\x85O\xe7A@g\xf2f\xae\xdblV\xc0\xf1 \x1f{\xee\xe9A@\xcbu\xed\xac\xe5lV\xc0y-\xe0^\xca\xebA@ag\x94\xb3\x7fpV\xc0\x8c\xd0-\x18\x8d\xefA@\xeb*\x04\xf7\x0bqV\xc0enH\xb9,\xf1A@1\xed.\xd5\xd4pV\xc0l\x9f}4\xcd\xf2A@\x11ChPaV\xc0\xca\xb0\xecI\x91-B@=7\xcc\xfeHaV\xc0g\xe1?\xfaW3B@\xb9\xe2\xfb\x82\xe5bV\xc0\xc7\xff\x18ts8B@\xc5D\x14\xc1\x07aV\xc04\x1eDPf\x02V\xc0\xba\xa7\xdb\xbf\xe3DB@k3P=\xa1\x02V\xc0\xa1<\x13\x9d\x96JB@8h\x02\xdb\x90\x04V\xc0{\xe8H\xdd\xffVB@\xaf\x0e\xcc\xbb\xb9\xf7U\xc0\xbd\xa8*\xaa\xafUB@n\xf4\xd1Z\xa0\xf6U\xc0*W\x9dg\x1dRB@DGn\xc4b\xecU\xc0\xad\xd89\x98~RB@\xe1\xc3\xfe~\x00\xe9U\xc0_m\x85{\x96RB@*\x93\x11\xe1.\xd6U\xc0\xd6\x0bE\x85\x1bSB@\xeb\x07\x9d\x836\xc7U\xc0\x12\xdb$\n^SB@0w\x96!]\xc4U\xc0!\xf7d\xc9MSB@\x04\xff1sP\xb1U\xc0\xe3cO\x06xSB@\xac(\xff\xc9\xae\xa0U\xc0\xea\xc3\x07z\xd9SB@\x8e\xdfe\x9f\x96\x9aU\xc0\xab|j\xc0QSB@5\x1c\x1aJ\xbc\x8cU\xc0\x06o\xacVWRB@\x0e\xe2\x9aQ\xc2~U\xc0\x10\xd8k\xd3\tQB@~\x9aI>ErU\xc0\xe9l\xd697PB@[\xd6\xac\xf0\xfd[U\xc0\xea+\x1c(!OB@\xf1\x19\x9f\xbe4SU\xc0M\xf9\xe3\x16$PB@\xd9\xb2\x9b\x9cpQU\xc0Rgj2\x14PB@{3\x1f\xca\xe6?U\xc0c+\x9bY|OB@\x93\xba\xb1\xae\xa02U\xc0ZP\x15\x01\x7fMB@\tS\xd1-\n2U\xc0\x8a;h$sMB@\xc3w\xf3\to\x10U\xc08\xd5QJ9LB@-n0\x89I\x10U\xc0\xc2\x95q\xc87LB@\xa28\xed\x87n\x00U\xc0\x1d\x96*\x8d\xc9KB@\xfcV\xf9\xdf\xe0\xfbT\xc09\xa2\x0eg\xafKB@>F\x91\xd8\x84\xecT\xc0\x08A\xde\xae\xc8JB@=\x0c\xbe\x1a6\xebT\xc0/j2X\xa2LB@\xfas\x02\x9f\xb5\xddT\xc0\xf10\x0c\xcd\xa6LB@\xc9\xbd\xc5\x1c\x9a\xd1T\xc0\xf8\x16P\x1d\xdcLB@BL\xee\x97\xe5\xcfT\xc0\xb6I~\x00\x83KB@\xbb\x0c\xea\xd0\x7f\xcdT\xc0Tx\x1f\x83FKB@._R\x0c&\xbfT\xc0h&\xab^\xafKB@.\x13\xf6|c\xb6T\xc0p4K>\xa7KB@\xce\x82\x9b\xfd\x17\xa7T\xc0\xe5\xf6&\xc2\xb7KB@>\x8e\xd7\xb9\xff\x92T\xc0b\xc7\xa2%\xc0KB@\x86j>\xc7\xdd\x8dT\xc0\xe2%*\x90\nLB@\xab0!\x1b\xde\x89T\xc0\xce\x85\x15\xe3-LB@X\x11o\xb4y{T\xc0Tq\xf8\x19HLB@\xfbPWU\xc5zT\xc0\x00\xbc\x02\xb8\x8aNB@\xee W\xab\x0cuT\xc0n\xa1!\xcaHNB@\xd2\x8b3\xd4\xbeiT\xc0\x8aE\x9f=\xc8MB@\xfa7\xc3\x94\xdejT\xc0\xe4\x085\x7f}KB@j\xdaY\xed\xf7lT\xc0\xdf\rX\xad}BB@\x97\x07\xae\xeb\xadlT\xc0\x02\xa9%;gAy\xb5{T\xc0p\xeay\x8b\xb8!B@J5\x8b\x84L\x81T\xc0tp\x8fi\x9e\x10B@3\xf9\x85o\xf7\x84T\xc0d\xf9\xddT\xd5\x0cB@9\x95q\x16\x8c\x87T\xc0\xa2\x82\xbc\xfeU\x0cB@\xa6z\xb9\x84\xd9\x89T\xc0v\x06\xf2\xe9\xe2\x11B@\xde\xde\x8b\x0fI\x8dT\xc0\xe02\xc2\x11\xd5\x12B@7\xb6\xe1;\xc7\x90T\xc0B\xc3=!l\x0fB@4K\xd4A\xf4\x93T\xc00F>:\xa6\x0fB@\x19\x84\x13\xeb\xea\x97T\xc0\x11;8\xb8\xa5\x0cB@\x00h*P!\x9aT\xc0\x94\x02-\x92\xa7\tB@\x8f\x8b;Ng\x9eT\xc0#\x08#\xf1#\xffA@.T?Qo\xa0T\xc0\xcee\x99\xce\x7f\xfcA@\x00\xa1\x88\xd2t\xa3T\xc0\xf5\x177Le\xfaA@\'rM\xb9X\xa6T\xc0\xae\r\xeb\xddP\xfbA@\xdc\x0cy<\xc3\xa6T\xc0w{\x00\xbfw\x00B@W\x00a\xdf\xe4\xa5T\xc0@ox \xe3\x02B@\xf7\x02\x0fD\xac\xa6T\xc0f\xa3\xb3\x1d\x84\x05B@\xb5v`\xaa/\xa8T\xc0x\x08.\x19\xf8\x06B@4lQI3\xa9T\xc0/R1X\xa2\x06B@r\xd1\x8e\x8d\xd6\xb0T\xc0\x9b\xf2\x03\x8b\xf4\xffA@L\x0f\x1al\x80\xb1T\xc0\xcaY\x02\xebi\xfeA@H\x8d\xe5\x08\xaf\xb1T\xc0e\x0e]gt\xfaA@\x9dX\xbb\n\xbf\xb3T\xc0\xb0\xc8G\x85\xe2\xf5A@&\xb3\xfd\xf3\xca\xb6T\xc0s$\xfd\x9bH\xf9A@\x87\xdc\xd4zU\xb9T\xc0\x1a\xdf\xf26g\xf9A@7\xed\xf4\xf6}\xbaT\xc0\x9an4\xb3\xc7\xf6A@\xedor2\x07\xbaT\xc0\x9cWd\x18\xa8\xefA@\x9f\xb5\xde\x8d\x9b\xbdT\xc0[\xcf\xe8\xb1^\xe5A@\xc2\xac\xb6\xf2(\xbfT\xc0\\V\xfbl\x15\xe3A@\xa0\xc6\xf79\xd4\xc3T\xc0\xfbx.\xbd.\xe4A@\xf9\x80\x18@\x8e\xc7T\xc0\x82\xd8e\xb7\xc7\xe1A@\xcd Date: Tue, 6 Jan 2015 16:34:35 -0700 Subject: [PATCH 53/71] minor --- .../test_interface/test_base/test_dimension/test_spatial.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py index 200f2f607..4a10f8b45 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py @@ -546,10 +546,6 @@ def test_get_intersects_no_grid(self): Test an intersects operation without a grid. """ - #todo: test field get_intersects - #todo: test with point only - #todo: test with point and polygon - poly = 'POLYGON((-94.32738470163435807 42.05244651522352228,-93.32425350681413079 37.7891389372375528,-90.87912121943982413 37.03679054112238589,-88.6220760310943092 35.97096364662588996,-84.04528995472702491 36.9113991417698486,-80.97320067059007442 38.03992173594261317,-81.03589637026632886 38.60418303302898835,-83.98259425505075626 37.91453033659008298,-86.55311794177760021 37.47566043885623088,-86.74120504080639193 38.79227013205778007,-87.36816203756902155 37.28757333982743916,-90.18946852300092587 37.03679054112238589,-93.13616640778533906 41.67627231716593883,-94.64086320001567287 44.93644870033168104,-98.15182238188647545 45.50070999741805622,-95.83208149386470609 44.4348831029215674,-94.95434169839700189 42.24053361425231401,-94.95434169839700189 42.24053361425231401,-94.32738470163435807 42.05244651522352228))' poly = wkt.loads(poly) path = ShpCabinet().get_shp_path('state_boundaries') From 5c1ba9199d6c2aaa6ccf9ad09bd3541ed51feff5 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Thu, 8 Jan 2015 15:48:34 -0700 Subject: [PATCH 54/71] time region subset fails with year boundary #349 Fixed time region selection for a month when the bounds crossed the year boundary. This required incrementing the upper month to account for the year chancge. fixes #349 --- .../test/test_ocgis/test_util/test_helpers.py | 26 +++++++++++-------- src/ocgis/util/helpers.py | 25 ++++++++++++++---- 2 files changed, 35 insertions(+), 16 deletions(-) diff --git a/src/ocgis/test/test_ocgis/test_util/test_helpers.py b/src/ocgis/test/test_ocgis/test_util/test_helpers.py index 4bd12bbdc..538628859 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_helpers.py +++ b/src/ocgis/test/test_ocgis/test_util/test_helpers.py @@ -430,19 +430,23 @@ def test_get_trimmed_array_by_mask_all_masked(self): self.assertEqual(adjust,{'col': slice(4, -5), 'row': slice(4, -5)}) def test_get_is_date_between(self): - lower = dt(1971,1,1) - upper = dt(2000,2,1) - self.assertFalse(get_is_date_between(lower,upper,month=6)) - self.assertFalse(get_is_date_between(lower,upper,month=2)) - self.assertTrue(get_is_date_between(lower,upper,month=1)) - - self.assertFalse(get_is_date_between(lower,upper,year=1968)) - self.assertTrue(get_is_date_between(lower,upper,year=1995)) - + lower = dt(1971, 1, 1) + upper = dt(2000, 2, 1) + self.assertFalse(get_is_date_between(lower, upper, month=6)) + self.assertFalse(get_is_date_between(lower, upper, month=2)) + self.assertTrue(get_is_date_between(lower, upper, month=1)) + + self.assertFalse(get_is_date_between(lower, upper, year=1968)) + self.assertTrue(get_is_date_between(lower, upper, year=1995)) + lower = dt(2013, 1, 1, 0, 0) upper = dt(2013, 1, 2, 0, 0) - self.assertTrue(get_is_date_between(lower,upper,year=2013)) - + self.assertTrue(get_is_date_between(lower, upper, year=2013)) + + lower = dt(2001, 12, 1) + upper = dt(2002, 1, 1) + self.assertTrue(get_is_date_between(lower, upper, month=12)) + def test_get_formatted_slice(self): ret = get_formatted_slice(slice(None,None,None),10) diff --git a/src/ocgis/util/helpers.py b/src/ocgis/util/helpers.py index 6ab883b47..932dde5b3 100644 --- a/src/ocgis/util/helpers.py +++ b/src/ocgis/util/helpers.py @@ -324,7 +324,18 @@ def _format_(slc): return ret -def get_is_date_between(lower,upper,month=None,year=None): +def get_is_date_between(lower, upper, month=None, year=None): + """ + :param lower: The lower boundary time coordinate. + :type lower: :class:`datetime.datetime` + :param upper: The upper boundary time coordinate. + :type upper: :class:`datetime.datetime` + :param int month: The month to check. + :param int year: The year to check. + :returns: ``True`` if the check value occurs in the interval. + :rtype: bool + """ + if month is not None: attr = 'month' to_test = month @@ -332,12 +343,16 @@ def get_is_date_between(lower,upper,month=None,year=None): attr = 'year' to_test = year - part_lower,part_upper = getattr(lower,attr),getattr(upper,attr) + part_lower, part_upper = getattr(lower, attr), getattr(upper, attr) if part_lower != part_upper: - ret = np.logical_and(to_test >= part_lower,to_test < part_upper) + if part_lower > part_upper: + # in the case of a year overlap, increment the upper into another year by adding 12 months + part_upper += 12 + ret = np.logical_and(to_test >= part_lower, to_test < part_upper) else: - ret = np.logical_and(to_test >= part_lower,to_test <= part_upper) - return(ret) + ret = np.logical_and(to_test >= part_lower, to_test <= part_upper) + + return ret def get_is_increasing(vec): From 48f630e22523d1e73621ef7ab2e94fedf696c568 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 9 Jan 2015 15:23:58 -0700 Subject: [PATCH 55/71] added column iteration to variable collection --- src/ocgis/interface/base/variable.py | 17 +++++++++++++++++ .../test_interface/test_base/test_variable.py | 14 ++++++++++++++ 2 files changed, 31 insertions(+) diff --git a/src/ocgis/interface/base/variable.py b/src/ocgis/interface/base/variable.py index 9f29c40ba..5e1ee1cb4 100644 --- a/src/ocgis/interface/base/variable.py +++ b/src/ocgis/interface/base/variable.py @@ -1,4 +1,5 @@ import abc +from collections import OrderedDict from copy import copy, deepcopy import numpy as np @@ -320,6 +321,22 @@ def get_sliced_variables(self, slc): ret = VariableCollection(variables=variables) return ret + def iter_columns(self): + """ + :returns: An iterator over each variable index. + :rtype: :class:`collections.OrderedDict` + """ + + self_itervalues = self.itervalues + dmap = {v.alias: v.value.data for v in self_itervalues()} + for idx in iter_array(self.first().value.data): + yld = OrderedDict() + for v in self_itervalues(): + alias = v.alias + yld[alias] = dmap[alias][idx] + yld['slice'] = idx + yield yld + def iter_melted(self, **kwargs): """ :returns: Call :meth:`~ocgis.Variable.iter_melted` passing ``kwargs`` for each variable in the collection. diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py index e3cfb9a7c..c5f9bdf71 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py @@ -320,6 +320,20 @@ def test_get_sliced_variables(self): for k, v in ret.iteritems(): self.assertTrue(np.may_share_memory(v.value, ret[k].value)) + def test_iter_columns(self): + variables = [self.get_variable(), self.get_variable('tas_foo2')] + variables[1].value *= 2 + variables[1].value.mask[2] = True + vc = VariableCollection(variables=variables) + rows = list(vc.iter_columns()) + self.assertEqual(len(rows), 3) + self.assertEqual(rows[1].keys(), ['tas_foo', 'tas_foo2', 'slice']) + self.assertIsInstance(rows[2], OrderedDict) + for row in rows: + for k, v in row.iteritems(): + if k != 'slice': + self.assertTrue(v < 20) + def test_iter_melted(self): variables = [self.get_variable(), self.get_variable('tas_foo2')] vc = VariableCollection(variables=variables) From b30b4709254bf0728586abe4489574b6646c242b Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Sun, 11 Jan 2015 09:18:39 -0700 Subject: [PATCH 56/71] added non-melted iterator to field --- src/ocgis/interface/base/field.py | 105 ++++++++++-------- src/ocgis/interface/base/variable.py | 2 +- .../test_interface/test_base/test_field.py | 13 ++- .../test_interface/test_base/test_variable.py | 8 +- 4 files changed, 77 insertions(+), 51 deletions(-) diff --git a/src/ocgis/interface/base/field.py b/src/ocgis/interface/base/field.py index 161a9a882..3f57fdb0e 100644 --- a/src/ocgis/interface/base/field.py +++ b/src/ocgis/interface/base/field.py @@ -2,7 +2,6 @@ from copy import copy, deepcopy from collections import deque, OrderedDict import itertools -import logging import numpy as np from shapely.ops import cascaded_union @@ -12,10 +11,9 @@ from ocgis.interface.base.attributes import Attributes from ocgis.util.helpers import get_default_or_apply, get_none_or_slice, get_formatted_slice, get_reduced_slice, \ - set_name_attributes, iter_array + set_name_attributes from ocgis.interface.base.variable import Variable, VariableCollection from ocgis import SpatialCollection -from ocgis.util.logging_ocgis import ocgis_lh class Field(Attributes): @@ -183,12 +181,13 @@ def get_intersects(self, polygon, use_spatial_index=True, select_nearest=False): return(self._get_spatial_operation_('get_intersects', polygon, use_spatial_index=use_spatial_index, select_nearest=select_nearest)) - def get_iter(self, add_masked_value=True, value_keys=None): + def get_iter(self, add_masked_value=True, value_keys=None, melted=True): """ :param bool add_masked_value: If ``False``, do not yield masked variable values. :param value_keys: A sequence of keys if the variable is a structure array. :type value_keys: [str, ...] - :returns: A dictionary for each value for each variable. + :param bool melted: If ``True``, do not use a melted format but place variable values as columns. + :returns: A dictionary containing variable values. :rtype: dict """ @@ -209,48 +208,66 @@ def _get_dimension_iterator_1d_(target): r_gid_name = self.spatial.name_uid r_name = self.name - for variable in self.variables.itervalues(): - yld = self._get_variable_iter_yield_(variable) - yld['name'] = r_name - ref_value = variable.value - masked_value = ref_value.fill_value + if melted: + for variable in self.variables.itervalues(): + yld = self._get_variable_iter_yield_(variable) + yld['name'] = r_name + ref_value = variable.value + masked_value = ref_value.fill_value + iters = map(_get_dimension_iterator_1d_, ['realization', 'temporal', 'level']) + iters.append(self.spatial.get_geom_iter()) + for [(ridx, rlz), (tidx, t), (lidx, l), (sridx, scidx, geom, gid)] in itertools.product(*iters): + to_yld = deepcopy(yld) + ref_idx = ref_value[ridx, tidx, lidx, sridx, scidx] + + # determine if the data is masked + if is_masked(ref_idx): + if add_masked_value: + ref_idx = masked_value + else: + continue + + # realization, time, and level values. + to_yld.update(rlz) + to_yld.update(t) + to_yld.update(l) + + # add geometries to the output + to_yld['geom'] = geom + to_yld[r_gid_name] = gid + + # the target value is a structure array, multiple value elements need to be added. these outputs do + # not a specific value, so it is not added. there may also be multiple elements in the structure + # which changes how the loop progresses. + if has_value_keys: + for ii in range(ref_idx.shape[0]): + for vk in value_keys: + try: + to_yld[vk] = ref_idx.data[vk][ii] + # attempt to access the data directly. masked determination is done above. + except ValueError: + to_yld[vk] = ref_idx.data[vk][ii] + yield (to_yld) + else: + to_yld['value'] = ref_idx + yield to_yld + else: iters = map(_get_dimension_iterator_1d_, ['realization', 'temporal', 'level']) iters.append(self.spatial.get_geom_iter()) - for [(ridx, rlz), (tidx, t), (lidx, l), (sridx, scidx, geom, gid)] in itertools.product(*iters): - to_yld = deepcopy(yld) - ref_idx = ref_value[ridx, tidx, lidx, sridx, scidx] - # determine if the data is masked - if is_masked(ref_idx): - if add_masked_value: - ref_idx = masked_value - else: - continue - - # realization, time, and level values. - to_yld.update(rlz) - to_yld.update(t) - to_yld.update(l) - - # add geometries to the output - to_yld['geom'] = geom - to_yld[r_gid_name] = gid - - # the target value is a structure array, multiple value elements need to be added. these outputs do not - # a specific value, so it is not added. there may also be multiple elements in the structure which - # changes how the loop progresses. - if has_value_keys: - for ii in range(ref_idx.shape[0]): - for vk in value_keys: - try: - to_yld[vk] = ref_idx.data[vk][ii] - # attempt to access the data directly. masked determination is done above. - except ValueError: - to_yld[vk] = ref_idx.data[vk][ii] - yield (to_yld) - else: - to_yld['value'] = ref_idx - yield to_yld + for [(ridx, rlz), (tidx, t), (lidx, l), (sridx, scidx, geom, gid)] in itertools.product(*iters): + yld = {} + for element in [rlz, t, l]: + yld.update(element) + yld['geom'] = geom + yld[r_gid_name] = gid + for variable_alias, variable in self.variables.iteritems(): + ref_idx = variable.value[ridx, tidx, lidx, sridx, scidx] + # determine if the data is masked + if is_masked(ref_idx): + ref_idx = variable.value.fill_value + yld[variable_alias] = ref_idx + yield yld def get_shallow_copy(self): return copy(self) diff --git a/src/ocgis/interface/base/variable.py b/src/ocgis/interface/base/variable.py index 5e1ee1cb4..003b2316a 100644 --- a/src/ocgis/interface/base/variable.py +++ b/src/ocgis/interface/base/variable.py @@ -334,7 +334,7 @@ def iter_columns(self): for v in self_itervalues(): alias = v.alias yld[alias] = dmap[alias][idx] - yld['slice'] = idx + yld = (idx, yld) yield yld def iter_melted(self, **kwargs): diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py index fb193f0e5..14dcbfc69 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py @@ -5,7 +5,6 @@ import numpy as np from shapely import wkt -from shapely.geometry import Point from shapely.ops import cascaded_union from datetime import datetime as dt @@ -271,6 +270,18 @@ def test_get_iter(self): rows = list(field.get_iter()) self.assertAsSetEqual(rows[10].keys(), ['vid', 'gid', 'month', 'year', 'alias', 'geom', 'realization', 'realization_uid', 'time_bounds_lower', 'level_bounds_upper', 'variable', 'day', 'realization_bounds_lower', 'name', 'level', 'did', 'level_bounds_lower', 'value', 'realization_bounds_upper', 'level_uid', 'time', 'tid', 'time_bounds_upper']) + # test not melted + field = self.get_field(with_value=True) + other_variable = deepcopy(field.variables.first()) + other_variable.alias = 'two' + other_variable.value *= 2 + field.variables.add_variable(other_variable, assign_new_uid=True) + rows = list(field.get_iter(melted=False)) + self.assertEqual(len(rows), 1488) + for row in rows: + for variable in field.variables.itervalues(): + self.assertIn(variable.alias, row) + def test_get_iter_spatial_only(self): """Test with only a spatial dimension.""" diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py index c5f9bdf71..e56059d43 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py @@ -327,12 +327,10 @@ def test_iter_columns(self): vc = VariableCollection(variables=variables) rows = list(vc.iter_columns()) self.assertEqual(len(rows), 3) - self.assertEqual(rows[1].keys(), ['tas_foo', 'tas_foo2', 'slice']) - self.assertIsInstance(rows[2], OrderedDict) + self.assertEqual(rows[1][1].keys(), ['tas_foo', 'tas_foo2']) + self.assertIsInstance(rows[2][1], OrderedDict) for row in rows: - for k, v in row.iteritems(): - if k != 'slice': - self.assertTrue(v < 20) + self.assertTrue(row[0], 20) def test_iter_melted(self): variables = [self.get_variable(), self.get_variable('tas_foo2')] From f4e39ee1a3cee4138209d0c05eb70ccde94599f7 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Sun, 11 Jan 2015 09:36:44 -0700 Subject: [PATCH 57/71] changed dimension iterators to use ordered dictionaries --- src/ocgis/interface/base/dimension/base.py | 15 ++++++++++----- .../test_base/test_dimension/test_base.py | 1 + 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/src/ocgis/interface/base/dimension/base.py b/src/ocgis/interface/base/dimension/base.py index 6ae26155a..8b6d3d18f 100644 --- a/src/ocgis/interface/base/dimension/base.py +++ b/src/ocgis/interface/base/dimension/base.py @@ -1,4 +1,5 @@ import abc +from collections import OrderedDict from copy import copy, deepcopy from operator import mul import numpy as np @@ -344,13 +345,17 @@ def get_iter(self): ref_name_bounds_upper = '{0}_upper'.format(self.name_bounds) for ii in range(self.value.shape[0]): - yld = {ref_name_value: ref_value[ii], ref_name_uid: ref_uid[ii]} + # yld = {ref_name_value: ref_value[ii], ref_name_uid: ref_uid[ii]} + yld = OrderedDict([(ref_name_value, ref_value[ii]), (ref_name_uid, ref_uid[ii])]) if has_bounds: - yld.update({ref_name_bounds_lower: ref_bounds[ii, 0], - ref_name_bounds_upper: ref_bounds[ii, 1]}) + ref_name_bounds_lower_value = ref_bounds[ii, 0] + ref_name_bounds_upper_value = ref_bounds[ii, 1] + else: - yld.update({ref_name_bounds_lower: None, - ref_name_bounds_upper: None}) + ref_name_bounds_lower_value = None + ref_name_bounds_upper_value = None + yld[ref_name_bounds_lower] = ref_name_bounds_lower_value + yld[ref_name_bounds_upper] = ref_name_bounds_upper_value yield ii, yld def set_extrapolated_bounds(self): diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py index f652a80b0..6fb8cf481 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py @@ -153,6 +153,7 @@ def test_get_iter(self): vdim = VectorDimension(value=[10, 20, 30, 40, 50], name='foo') tt = list(vdim.get_iter()) self.assertEqual(tt[3], (3, {'foo_uid': 4, 'foo': 40, 'foo_bounds_lower': None, 'foo_bounds_upper': None})) + self.assertIsInstance(tt[0][1], OrderedDict) vdim = VectorDimension(value=[10, 20, 30, 40, 50], bounds=[(ii - 5, ii + 5) for ii in [10, 20, 30, 40, 50]], name='foo', name_uid='hi') From bb7e9685d4c8fddd9200de1977b52a158dacb063 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 12 Jan 2015 17:47:47 -0700 Subject: [PATCH 58/71] variable attributes written after value set #350 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When creating a netCDF file, variable attributes are now written before the value is set. This way scales and offsets may be appropriately applied by netCDF4-python. Also, the keys “scale_value” and “add_offset” are removed from the attributes dictionary when units are updated. The data type of a conformed units value is no also appropriately updated to ensure it matches the output from cfunits-python. closes #350 --- src/ocgis/constants.py | 4 + src/ocgis/interface/base/dimension/base.py | 29 ++- src/ocgis/interface/base/field.py | 3 +- src/ocgis/interface/base/variable.py | 43 ++-- .../test_request/test_driver/test_nc.py | 6 + .../test_base/test_dimension/test_base.py | 129 ++++++------ .../test_interface/test_base/test_field.py | 15 ++ .../test_interface/test_base/test_variable.py | 189 +++++++++++++----- .../test/test_ocgis/test_util/test_units.py | 122 ++--------- 9 files changed, 290 insertions(+), 250 deletions(-) diff --git a/src/ocgis/constants.py b/src/ocgis/constants.py index 88e60264b..5ae618362 100644 --- a/src/ocgis/constants.py +++ b/src/ocgis/constants.py @@ -121,3 +121,7 @@ class HEADERS(object): # The standard key used to identify geometries in a dictionary. DEFAULT_GEOMETRY_KEY = 'geom' + +# Attributes to remove when a value is changed if they are present in the attributes dictionary. These attributes are +# tuned to specific value ranges and will not apply when a value is changed. +NETCDF_ATTRIBUTES_TO_REMOVE_ON_VALUE_CHANGE = ('scale_value', 'add_offset') diff --git a/src/ocgis/interface/base/dimension/base.py b/src/ocgis/interface/base/dimension/base.py index 8b6d3d18f..d72e56c3f 100644 --- a/src/ocgis/interface/base/dimension/base.py +++ b/src/ocgis/interface/base/dimension/base.py @@ -5,7 +5,6 @@ import numpy as np from ocgis import constants -from ocgis.interface.base.attributes import Attributes from ocgis.util.helpers import get_none_or_1d, get_none_or_2d, get_none_or_slice,\ get_formatted_slice, get_bounds_from_1d from ocgis.exc import EmptySubsetError, ResolutionError, BoundsAlreadyAvailableError @@ -135,27 +134,26 @@ def _get_uid_(self): return ret -class AbstractUidValueDimension(AbstractValueDimension,AbstractUidDimension): - - def __init__(self,*args,**kwds): - kwds_value = ['value','name_value','units','name','dtype','fill_value'] - kwds_uid = ['uid','name_uid','meta','properties','name'] - +class AbstractUidValueDimension(AbstractValueDimension, AbstractUidDimension): + def __init__(self, *args, **kwds): + kwds_value = ['value', 'name_value', 'units', 'name', 'dtype', 'fill_value', 'attrs'] + kwds_uid = ['uid', 'name_uid', 'meta', 'properties', 'name'] + kwds_all = kwds_value + kwds_uid for key in kwds.keys(): try: - assert(key in kwds_all) + assert (key in kwds_all) except AssertionError: - raise(ValueError('"{0}" is not a valid keyword argument for "{1}".'.format(key,self.__class__.__name__))) - - kwds_value = {key:kwds.get(key,None) for key in kwds_value} - kwds_uid = {key:kwds.get(key,None) for key in kwds_uid} + raise ValueError('"{0}" is not a valid keyword argument for "{1}".'.format(key, self.__class__.__name__)) + + kwds_value = {key: kwds.get(key, None) for key in kwds_value} + kwds_uid = {key: kwds.get(key, None) for key in kwds_uid} - AbstractValueDimension.__init__(self,*args,**kwds_value) - AbstractUidDimension.__init__(self,*args,**kwds_uid) + AbstractValueDimension.__init__(self, *args, **kwds_value) + AbstractUidDimension.__init__(self, *args, **kwds_uid) -class VectorDimension(AbstractSourcedVariable, AbstractUidValueDimension, Attributes): +class VectorDimension(AbstractSourcedVariable, AbstractUidValueDimension): _attrs_slice = ('uid', '_value', '_src_idx') _ndims = 1 @@ -173,7 +171,6 @@ def __init__(self, *args, **kwargs): self._has_interpolated_bounds = False AbstractSourcedVariable.__init__(self, kwargs.pop('data', None), kwargs.pop('src_idx', None)) - Attributes.__init__(self, attrs=kwargs.pop('attrs', None)) AbstractUidValueDimension.__init__(self, *args, **kwargs) # setting bounds requires checking the data type of value set in a superclass. diff --git a/src/ocgis/interface/base/field.py b/src/ocgis/interface/base/field.py index 3f57fdb0e..a1da1badc 100644 --- a/src/ocgis/interface/base/field.py +++ b/src/ocgis/interface/base/field.py @@ -446,9 +446,10 @@ def name_scope(target, name, axis): for variable in self.variables.itervalues(): kwargs['fill_value'] = variable.fill_value nc_variable = dataset.createVariable(variable.alias, variable.dtype, **kwargs) + # be sure and write attributes before filling to account for offset and scale factor + variable.write_attributes_to_netcdf_object(nc_variable) if not file_only: nc_variable[:] = variable.value - variable.write_attributes_to_netcdf_object(nc_variable) try: nc_variable.grid_mapping = variable_crs._name diff --git a/src/ocgis/interface/base/variable.py b/src/ocgis/interface/base/variable.py index 003b2316a..e4be2671b 100644 --- a/src/ocgis/interface/base/variable.py +++ b/src/ocgis/interface/base/variable.py @@ -4,12 +4,13 @@ import numpy as np from ocgis.api.collection import AbstractCollection +from ocgis.constants import NETCDF_ATTRIBUTES_TO_REMOVE_ON_VALUE_CHANGE from ocgis.interface.base.attributes import Attributes from ocgis.util.helpers import get_iter, iter_array from ocgis.exc import NoUnitsError, VariableInCollectionError -class AbstractValueVariable(object): +class AbstractValueVariable(Attributes): """ :param array-like value: :param units: @@ -20,12 +21,21 @@ class AbstractValueVariable(object): :param str name: :param conform_units_to: :type units: str or :class:`cfunits.Units` + :param str alias: An alternate name for the variable used to maintain uniqueness. + :param dict attrs: A dictionary of arbitrary key-value attributes. """ + __metaclass__ = abc.ABCMeta _value = None _conform_units_to = None - def __init__(self, value=None, units=None, dtype=None, fill_value=None, name=None, conform_units_to=None): + def __init__(self, value=None, units=None, dtype=None, fill_value=None, name=None, conform_units_to=None, + alias=None, attrs=None): + self.name = name + self.alias = alias or self.name + + Attributes.__init__(self, attrs=attrs) + # if the units value is not None, then convert to string. cfunits.Units may be easily handled this way without # checking for the module presence. self.units = str(units) if units is not None else None @@ -33,7 +43,6 @@ def __init__(self, value=None, units=None, dtype=None, fill_value=None, name=Non self.value = value self._dtype = dtype self._fill_value = fill_value - self.name = name # if the units value is not None, then convert to string. cfunits.Units may be easily handled this way without # checking for the module presence. self.units = str(units) if units is not None else None @@ -104,9 +113,10 @@ def _format_private_value_(self, value): def _get_value_(self): """Return the value field.""" - def cfunits_conform(self,to_units,value=None,from_units=None): - ''' - Conform units of value variable in-place using :mod:`cfunits`. + def cfunits_conform(self, to_units, value=None, from_units=None): + """ + Conform units of value variable in-place using :mod:`cfunits`. If there are an scale or offset parameters in the + attribute dictionary, they will be removed. :param to_units: Target conform units. :type t_units: str or :class:`cfunits.Units` @@ -115,14 +125,16 @@ def cfunits_conform(self,to_units,value=None,from_units=None): :param from_units: Source units to use in place of the object's value. :type from_units: str or :class:`cfunits.Units` :rtype: np.ndarray - ''' + :raises: NoUnitsError + """ from cfunits import Units + # units are required for conversion if self.cfunits == Units(None): - raise(NoUnitsError(self.alias)) + raise (NoUnitsError(self.alias)) # allow string unit representations to be passed - if not isinstance(to_units,Units): + if not isinstance(to_units, Units): to_units = Units(to_units) # pick the value to convert. this is added to keep the import of the units library in the # AbstractValueVariable.cfunits property @@ -133,7 +145,12 @@ def cfunits_conform(self,to_units,value=None,from_units=None): self.cfunits.conform(convert_value, from_units, to_units, inplace=True) # update the units attribute with the destination units self.units = str(to_units) - + # let the data type load natively from the value array + self._dtype = None + # remove any compression attributes if present + for remove in NETCDF_ATTRIBUTES_TO_REMOVE_ON_VALUE_CHANGE: + self.attrs.pop(remove, None) + return convert_value @@ -165,7 +182,7 @@ def _set_value_from_source_(self): """Should set ``_value`` using the data source and index.""" -class Variable(AbstractSourcedVariable, AbstractValueVariable, Attributes): +class Variable(AbstractSourcedVariable, AbstractValueVariable): """ :param name: Representative name for the variable. :type name: str @@ -195,15 +212,13 @@ class Variable(AbstractSourcedVariable, AbstractValueVariable, Attributes): def __init__(self, name=None, alias=None, units=None, meta=None, uid=None, value=None, did=None, data=None, conform_units_to=None, dtype=None, fill_value=None, attrs=None): - self.alias = alias or name self.meta = meta or {} self.uid = uid self.did = did AbstractSourcedVariable.__init__(self, data, None) - Attributes.__init__(self, attrs=attrs) AbstractValueVariable.__init__(self, value=value, units=units, dtype=dtype, fill_value=fill_value, name=name, - conform_units_to=conform_units_to) + conform_units_to=conform_units_to, alias=alias, attrs=attrs) def __getitem__(self, slc): ret = copy(self) diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index 4532a19ae..18e395d85 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -6,6 +6,7 @@ from collections import OrderedDict import numpy as np +from cfunits import Units import fiona from shapely.geometry.geo import shape @@ -196,6 +197,11 @@ def test_get_field_datetime_slicing(self): self.assertEqual(slced.temporal.value_datetime,np.array([dt(2001,8,28,12)])) self.assertNumpyAll(slced.temporal.bounds_datetime,np.array([dt(2001,8,28),dt(2001,8,29)]).reshape(1, 2)) + def test_get_field_units_read_from_file(self): + rd = self.test_data.get_rd('cancm4_tas') + field = rd.get() + self.assertEqual(field.variables['tas'].cfunits, Units('K')) + def test_get_field_value_datetime_after_slicing(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py index 6fb8cf481..4733b937b 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py @@ -5,7 +5,6 @@ from cfunits.cfunits import Units -from ocgis.interface.base.attributes import Attributes from ocgis.interface.base.variable import AbstractSourcedVariable, AbstractValueVariable from ocgis import constants from ocgis.exc import EmptySubsetError, ResolutionError, BoundsAlreadyAvailableError @@ -50,7 +49,6 @@ class FakeAbstractUidDimension(AbstractUidDimension): class TestAbstractUidDimension(TestBase): - def test_init(self): au = FakeAbstractUidDimension(uid=[1, 2, 3]) self.assertEqual(au.name_uid, 'None_uid') @@ -62,7 +60,6 @@ def test_init(self): class FakeAbstractValueDimension(AbstractValueDimension): - def _get_value_(self): pass @@ -86,9 +83,8 @@ def test_name_value(self): class TestVectorDimension(TestBase): - def test_init(self): - self.assertEqual(VectorDimension.__bases__, (AbstractSourcedVariable, AbstractUidValueDimension, Attributes)) + self.assertEqual(VectorDimension.__bases__, (AbstractSourcedVariable, AbstractUidValueDimension)) vd = VectorDimension(value=[4, 5]) self.assertIsInstance(vd.attrs, OrderedDict) @@ -109,16 +105,16 @@ def test_init(self): VectorDimension() def test_bad_dtypes(self): - vd = VectorDimension(value=181.5,bounds=[181,182]) - self.assertEqual(vd.value.dtype,vd.bounds.dtype) + vd = VectorDimension(value=181.5, bounds=[181, 182]) + self.assertEqual(vd.value.dtype, vd.bounds.dtype) with self.assertRaises(ValueError): - VectorDimension(value=181.5,bounds=['a','b']) + VectorDimension(value=181.5, bounds=['a', 'b']) def test_bad_keywords(self): - ## there should be keyword checks on the bad keywords names + # # there should be keyword checks on the bad keywords names with self.assertRaises(ValueError): - VectorDimension(value=40,bounds=[38,42],ddtype=float) + VectorDimension(value=40, bounds=[38, 42], ddtype=float) def test_boolean_slice(self): """Test slicing with boolean values.""" @@ -130,20 +126,20 @@ def test_boolean_slice(self): self.assertNumpyAll(vdim_slc.bounds, np.array([[3, 5], [5, 7]])) def test_bounds_only_two_dimensional(self): - value = [10,20,30,40,50] + value = [10, 20, 30, 40, 50] bounds = [ - [[b-5,b+5,b+10] for b in value], - value, - 5 - ] + [[b - 5, b + 5, b + 10] for b in value], + value, + 5 + ] for b in bounds: with self.assertRaises(ValueError): - VectorDimension(value=value,bounds=b) + VectorDimension(value=value, bounds=b) def test_dtype(self): - value = [10,20,30,40,50] + value = [10, 20, 30, 40, 50] vdim = VectorDimension(value=value) - self.assertEqual(vdim.dtype,np.array(value).dtype) + self.assertEqual(vdim.dtype, np.array(value).dtype) def test_get_iter(self): vdim = VectorDimension(value=[10, 20, 30, 40, 50]) @@ -161,14 +157,15 @@ def test_get_iter(self): self.assertEqual(tt[3], (3, {'hi': 4, 'foo': 40, 'foo_bounds_lower': 35, 'foo_bounds_upper': 45})) def test_interpolate_bounds(self): - value = [10,20,30,40,50] + value = [10, 20, 30, 40, 50] vdim = VectorDimension(value=value) - self.assertEqual(vdim.bounds,None) + self.assertEqual(vdim.bounds, None) vdim = VectorDimension(value=value) vdim.set_extrapolated_bounds() - self.assertEqual(vdim.bounds.tostring(),'\x05\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x19\x00\x00\x00\x00\x00\x00\x00\x19\x00\x00\x00\x00\x00\x00\x00#\x00\x00\x00\x00\x00\x00\x00#\x00\x00\x00\x00\x00\x00\x00-\x00\x00\x00\x00\x00\x00\x00-\x00\x00\x00\x00\x00\x00\x007\x00\x00\x00\x00\x00\x00\x00') + self.assertEqual(vdim.bounds.tostring(), + '\x05\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x19\x00\x00\x00\x00\x00\x00\x00\x19\x00\x00\x00\x00\x00\x00\x00#\x00\x00\x00\x00\x00\x00\x00#\x00\x00\x00\x00\x00\x00\x00-\x00\x00\x00\x00\x00\x00\x00-\x00\x00\x00\x00\x00\x00\x007\x00\x00\x00\x00\x00\x00\x00') def test_load_from_source(self): """Test loading from a fake data source.""" @@ -212,8 +209,8 @@ def test_one_value(self): vdim.resolution def test_resolution_with_units(self): - vdim = VectorDimension(value=[5,10,15],units='large') - self.assertEqual(vdim.resolution,5.0) + vdim = VectorDimension(value=[5, 10, 15], units='large') + self.assertEqual(vdim.resolution, 5.0) def test_set_extrapolated_bounds(self): value = np.array([1, 2, 3, 4], dtype=float) @@ -250,16 +247,16 @@ def test_set_reference(self): self.assertNumpyAll(vdim.value, vdim_slc2.value) def test_slice_source_idx_only(self): - vdim = VectorDimension(src_idx=[4,5,6],data='foo') + vdim = VectorDimension(src_idx=[4, 5, 6], data='foo') vdim_slice = vdim[0] - self.assertEqual(vdim_slice._src_idx[0],4) + self.assertEqual(vdim_slice._src_idx[0], 4) def test_units_with_bounds(self): - value = [5.,10.,15.] - vdim = VectorDimension(value=value,units='celsius', + value = [5., 10., 15.] + vdim = VectorDimension(value=value, units='celsius', bounds=get_bounds_from_1d(np.array(value))) vdim.cfunits_conform(Units('kelvin')) - self.assertNumpyAll(vdim.bounds,np.array([[275.65,280.65],[280.65,285.65],[285.65,290.65]])) + self.assertNumpyAll(vdim.bounds, np.array([[275.65, 280.65], [280.65, 285.65], [285.65, 290.65]])) def test_with_bounds(self): """Test passing bounds to the constructor.""" @@ -270,22 +267,22 @@ def test_with_bounds(self): self.assertEqual(vdim.resolution, 2.0) def test_with_units(self): - vdim = VectorDimension(value=[5,10,15],units='celsius') - self.assertEqual(vdim.cfunits,Units('celsius')) + vdim = VectorDimension(value=[5, 10, 15], units='celsius') + self.assertEqual(vdim.cfunits, Units('celsius')) vdim.cfunits_conform(Units('kelvin')) - self.assertNumpyAll(vdim.value,np.array([278.15,283.15,288.15])) + self.assertNumpyAll(vdim.value, np.array([278.15, 283.15, 288.15])) def test_with_units_and_bounds_convert_after_load(self): - vdim = VectorDimension(value=[5.,10.,15.],units='celsius') + vdim = VectorDimension(value=[5., 10., 15.], units='celsius') vdim.set_extrapolated_bounds() vdim.cfunits_conform(Units('kelvin')) - self.assertNumpyAll(vdim.bounds,np.array([[275.65,280.65],[280.65,285.65],[285.65,290.65]])) + self.assertNumpyAll(vdim.bounds, np.array([[275.65, 280.65], [280.65, 285.65], [285.65, 290.65]])) def test_with_units_and_bounds_interpolation(self): - vdim = VectorDimension(value=[5.,10.,15.],units='celsius') + vdim = VectorDimension(value=[5., 10., 15.], units='celsius') vdim.set_extrapolated_bounds() vdim.cfunits_conform(Units('kelvin')) - self.assertNumpyAll(vdim.bounds,np.array([[275.65,280.65],[280.65,285.65],[285.65,290.65]])) + self.assertNumpyAll(vdim.bounds, np.array([[275.65, 280.65], [280.65, 285.65], [285.65, 290.65]])) def test_write_to_netcdf_dataset(self): path = os.path.join(self.current_dir_output, 'foo.nc') @@ -376,17 +373,17 @@ def test_write_to_netcdf_dataset_bounds_dimension_exists(self): def test_get_between(self): vdim = VectorDimension(value=[0]) with self.assertRaises(EmptySubsetError): - vdim.get_between(100,200) + vdim.get_between(100, 200) - vdim = VectorDimension(value=[100,200,300,400]) - vdim_between = vdim.get_between(100,200) - self.assertEqual(len(vdim_between),2) + vdim = VectorDimension(value=[100, 200, 300, 400]) + vdim_between = vdim.get_between(100, 200) + self.assertEqual(len(vdim_between), 2) def test_get_between_bounds(self): - value = [0.,5.,10.] - bounds = [[-2.5,2.5],[2.5,7.5],[7.5,12.5]] + value = [0., 5., 10.] + bounds = [[-2.5, 2.5], [2.5, 7.5], [7.5, 12.5]] - ## a reversed copy of these bounds are created here + # # a reversed copy of these bounds are created here value_reverse = deepcopy(value) value_reverse.reverse() bounds_reverse = deepcopy(bounds) @@ -394,44 +391,48 @@ def test_get_between_bounds(self): for ii in range(len(bounds)): bounds_reverse[ii].reverse() - data = {'original':{'value':value,'bounds':bounds}, - 'reversed':{'value':value_reverse,'bounds':bounds_reverse}} - for key in ['original','reversed']: + data = {'original': {'value': value, 'bounds': bounds}, + 'reversed': {'value': value_reverse, 'bounds': bounds_reverse}} + for key in ['original', 'reversed']: vdim = VectorDimension(value=data[key]['value'], bounds=data[key]['bounds']) - vdim_between = vdim.get_between(1,3) - self.assertEqual(len(vdim_between),2) + vdim_between = vdim.get_between(1, 3) + self.assertEqual(len(vdim_between), 2) if key == 'original': - self.assertEqual(vdim_between.bounds.tostring(),'\x00\x00\x00\x00\x00\x00\x04\xc0\x00\x00\x00\x00\x00\x00\x04@\x00\x00\x00\x00\x00\x00\x04@\x00\x00\x00\x00\x00\x00\x1e@') + self.assertEqual(vdim_between.bounds.tostring(), + '\x00\x00\x00\x00\x00\x00\x04\xc0\x00\x00\x00\x00\x00\x00\x04@\x00\x00\x00\x00\x00\x00\x04@\x00\x00\x00\x00\x00\x00\x1e@') else: - self.assertEqual(vdim_between.bounds.tostring(),'\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00\x04@\x00\x00\x00\x00\x00\x00\x04@\x00\x00\x00\x00\x00\x00\x04\xc0') - self.assertEqual(vdim.resolution,5.0) + self.assertEqual(vdim_between.bounds.tostring(), + '\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00\x04@\x00\x00\x00\x00\x00\x00\x04@\x00\x00\x00\x00\x00\x00\x04\xc0') + self.assertEqual(vdim.resolution, 5.0) ## preference is given to the lower bound in the case of "ties" where ## the value could be assumed part of the lower or upper cell - vdim_between = vdim.get_between(2.5,2.5) - self.assertEqual(len(vdim_between),1) + vdim_between = vdim.get_between(2.5, 2.5) + self.assertEqual(len(vdim_between), 1) if key == 'original': - self.assertNumpyAll(vdim_between.bounds,np.array([[2.5,7.5]])) + self.assertNumpyAll(vdim_between.bounds, np.array([[2.5, 7.5]])) else: - self.assertNumpyAll(vdim_between.bounds,np.array([[7.5,2.5]])) + self.assertNumpyAll(vdim_between.bounds, np.array([[7.5, 2.5]])) ## if the interval is closed and the subset range falls only on bounds ## value then the subset will be empty with self.assertRaises(EmptySubsetError): - vdim.get_between(2.5,2.5,closed=True) + vdim.get_between(2.5, 2.5, closed=True) - vdim_between = vdim.get_between(2.5,7.5) + vdim_between = vdim.get_between(2.5, 7.5) if key == 'original': - self.assertEqual(vdim_between.bounds.tostring(),'\x00\x00\x00\x00\x00\x00\x04@\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00)@') + self.assertEqual(vdim_between.bounds.tostring(), + '\x00\x00\x00\x00\x00\x00\x04@\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00)@') else: - self.assertEqual(vdim_between.bounds.tostring(),'\x00\x00\x00\x00\x00\x00)@\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00\x04@') + self.assertEqual(vdim_between.bounds.tostring(), + '\x00\x00\x00\x00\x00\x00)@\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00\x1e@\x00\x00\x00\x00\x00\x00\x04@') def test_get_between_use_bounds(self): - value = [3.,5.] - bounds = [[2.,4.],[4.,6.]] - vdim = VectorDimension(value=value,bounds=bounds) - ret = vdim.get_between(3,4.5,use_bounds=False) - self.assertNumpyAll(ret.value,np.array([3.])) - self.assertNumpyAll(ret.bounds,np.array([[2.,4.]])) + value = [3., 5.] + bounds = [[2., 4.], [4., 6.]] + vdim = VectorDimension(value=value, bounds=bounds) + ret = vdim.get_between(3, 4.5, use_bounds=False) + self.assertNumpyAll(ret.value, np.array([3.])) + self.assertNumpyAll(ret.bounds, np.array([[2., 4.]])) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py index 14dcbfc69..c8beecc2a 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py @@ -607,6 +607,21 @@ def test_write_to_netcdf_dataset(self): level_shape = 1 self.assertEqual(new_field.shape, (1, 31, level_shape, 3, 4)) + def test_write_to_netcdf_dataset_scale_offset(self): + """Test with a scale and offset in the attributes.""" + + var = Variable(value=np.random.rand(1, 1, 1, 3, 4), attrs={'scale_value': 2, 'add_offset': 10}, name='tas') + grid = SpatialGridDimension(value=np.ma.array(np.random.rand(2, 3, 4))) + sdim = SpatialDimension(grid=grid) + field = Field(variables=var, spatial=sdim) + path = os.path.join(self.current_dir_output, 'foo.nc') + with self.nc_scope(path, 'w') as ds: + field.write_to_netcdf_dataset(ds) + with self.nc_scope(path, 'r') as out: + var_out = out.variables['tas'][:] + target = var_out.reshape(*var.shape) + self.assertNumpyAllClose(var.value.data, target) + def test_write_to_netcdf_dataset_with_metadata(self): """Test writing to netCDF with a source metadata dictionary attached and data loaded from file.""" diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py index e56059d43..8dd69f305 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py @@ -4,6 +4,8 @@ from cfunits import Units +from ocgis.constants import NETCDF_ATTRIBUTES_TO_REMOVE_ON_VALUE_CHANGE + from ocgis.exc import VariableInCollectionError, NoUnitsError from ocgis.interface.base.attributes import Attributes from ocgis.test.base import TestBase @@ -60,12 +62,140 @@ def _get_value_(self): class TestAbstractValueVariable(TestBase): create_dir = False + @property + def value(self): + return np.array([5, 5, 5]) + def test_init(self): + self.assertEqual(AbstractValueVariable.__bases__, (Attributes,)) + kwds = dict(value=[[4, 5, 6]]) for k in self.iter_product_keywords(kwds): av = FakeAbstractValueVariable(value=k.value) self.assertEqual(av.value, k.value) + self.assertIsNone(av.alias) + + fav = FakeAbstractValueVariable(name='foo') + self.assertEqual(fav.alias, 'foo') + + def test_init_conform_units_to(self): + """Test using the conform_units_to keyword argument.""" + + def _get_units_(v): + try: + v = Units(v) + except AttributeError: + pass + return v + + value = np.array([1, 2, 3, 4, 5]) + value_masked = np.ma.array(value, mask=[False, True, False, True, False]) + + kwds = dict(units=['celsius', None, 'mm/day'], + conform_units_to=['kelvin', Units('kelvin'), None], + value=[value, value_masked]) + + for k in itr_products_keywords(kwds, as_namedtuple=True): + try: + var = Variable(**k._asdict()) + except NoUnitsError: + # without units defined on the input array, the values may not be conformed + if k.units is None: + continue + else: + raise + except ValueError: + # units are not convertible + if _get_units_(k.units) == _get_units_('mm/day') and _get_units_(k.conform_units_to) == _get_units_('kelvin'): + continue + else: + raise + + if k.conform_units_to is not None: + try: + self.assertEqual(var.conform_units_to, Units(k.conform_units_to)) + # may already be a Units object + except AttributeError: + self.assertEqual(var.conform_units_to, k.conform_units_to) + else: + self.assertIsNone(var.conform_units_to) + + if k.conform_units_to is not None: + actual = [274.15, 275.15, 276.15, 277.15, 278.15] + if isinstance(k.value, MaskedArray): + mask = value_masked.mask + else: + mask = False + actual = np.ma.array(actual, mask=mask, fill_value=var.value.fill_value) + self.assertNumpyAll(actual, var.value) + + def test_init_units(self): + # string-based units + var = Variable(name='tas', units='celsius', value=self.value) + self.assertEqual(var.units, 'celsius') + self.assertEqual(var.cfunits, Units('celsius')) + self.assertNotEqual(var.cfunits, Units('kelvin')) + self.assertTrue(var.cfunits.equivalent(Units('kelvin'))) + + # constructor with units objects v. string + var = Variable(name='tas', units=Units('celsius'), value=self.value) + self.assertEqual(var.units, 'celsius') + self.assertEqual(var.cfunits, Units('celsius')) + + # test no units + var = Variable(name='tas', units=None, value=self.value) + self.assertEqual(var.units, None) + self.assertEqual(var.cfunits, Units(None)) + + def test_cfunits_conform(self): + # conversion of celsius units to kelvin + attrs = {k: 1 for k in NETCDF_ATTRIBUTES_TO_REMOVE_ON_VALUE_CHANGE} + var = Variable(name='tas', units='celsius', value=self.value, attrs=attrs) + self.assertEqual(len(var.attrs), 2) + var.cfunits_conform(Units('kelvin')) + self.assertNumpyAll(var.value, np.ma.array([278.15] * 3)) + self.assertEqual(var.cfunits, Units('kelvin')) + self.assertEqual(var.units, 'kelvin') + self.assertEqual(len(var.attrs), 0) + + # if there are no units associated with a variable, conforming the units should fail + var = Variable(name='tas', units=None, value=self.value) + with self.assertRaises(NoUnitsError): + var.cfunits_conform(Units('kelvin')) + + # conversion should fail for nonequivalent units + var = Variable(name='tas', units='kelvin', value=self.value) + with self.assertRaises(ValueError): + var.cfunits_conform(Units('grams')) + + # the data type should always be updated to match the output from cfunits + av = FakeAbstractValueVariable(value=np.array([4, 5, 6]), dtype=int) + self.assertEqual(av.dtype, np.dtype(int)) + with self.assertRaises(NoUnitsError): + av.cfunits_conform('K') + av.units = 'celsius' + av.cfunits_conform('K') + self.assertIsNone(av._dtype) + self.assertEqual(av.dtype, av.value.dtype) + + def test_cfunits_conform_from_file(self): + """Test conforming units on data read from file.""" + + rd = self.test_data.get_rd('cancm4_tas') + field = rd.get() + sub = field.get_time_region({'month': [5], 'year': [2005]}) + sub.variables['tas'].cfunits_conform(Units('celsius')) + self.assertAlmostEqual(sub.variables['tas'].value[:, 6, :, 30, 64], + np.ma.array([[28.2539310455]], mask=[[False]])) + self.assertEqual(sub.variables['tas'].units, 'celsius') + + def test_cfunits_conform_masked_array(self): + # assert mask is respected by inplace unit conversion + value = np.ma.array(data=[5, 5, 5], mask=[False, True, False]) + var = Variable(name='tas', units=Units('celsius'), value=value) + var.cfunits_conform(Units('kelvin')) + self.assertNumpyAll(np.ma.array([278.15, 278.15, 278.15], mask=[False, True, False]), var.value) class TestDerivedVariable(TestBase): @@ -99,12 +229,17 @@ def test_iter_melted(self): class TestVariable(TestBase): def test_init(self): - self.assertEqual(Variable.__bases__, (AbstractSourcedVariable, AbstractValueVariable, Attributes)) + self.assertEqual(Variable.__bases__, (AbstractSourcedVariable, AbstractValueVariable)) # test passing attributes var = Variable(attrs={'a': 6}, value=np.array([5])) self.assertEqual(var.attrs['a'], 6) + # test the alias transmits to superclass + var = Variable(value=np.array([4, 5]), name='tas', alias='foo') + self.assertEqual(var.name, 'tas') + self.assertEqual(var.alias, 'foo') + def test_init_with_value_with_dtype_fill_value(self): var = Variable(data='foo',dtype=np.float,fill_value=9,value=np.array([1,2,3,4])) self.assertEqual(var.dtype,np.float) @@ -133,58 +268,6 @@ def test_str(self): var = Variable(name='toon') self.assertEqual(str(var), 'Variable(name="toon", alias="toon", units=None)') - def test_conform_units_to(self): - """Test using the conform_units_to keyword argument.""" - - def _get_units_(v): - try: - v = Units(v) - except AttributeError: - pass - return v - - value = np.array([1, 2, 3, 4, 5]) - value_masked = np.ma.array(value, mask=[False, True, False, True, False]) - - kwds = dict(units=['celsius', None, 'mm/day'], - conform_units_to=['kelvin', Units('kelvin'), None], - value=[value, value_masked]) - - for k in itr_products_keywords(kwds, as_namedtuple=True): - - try: - var = Variable(**k._asdict()) - except NoUnitsError: - # without units defined on the input array, the values may not be conformed - if k.units is None: - continue - else: - raise - except ValueError: - # units are not convertible - if _get_units_(k.units) == _get_units_('mm/day') and _get_units_(k.conform_units_to) == _get_units_('kelvin'): - continue - else: - raise - - if k.conform_units_to is not None: - try: - self.assertEqual(var.conform_units_to, Units(k.conform_units_to)) - # may already be a Units object - except AttributeError: - self.assertEqual(var.conform_units_to, k.conform_units_to) - else: - self.assertIsNone(var.conform_units_to) - - if k.conform_units_to is not None: - actual = [274.15, 275.15, 276.15, 277.15, 278.15] - if isinstance(k.value, MaskedArray): - mask = value_masked.mask - else: - mask = False - actual = np.ma.array(actual, mask=mask, fill_value=var.value.fill_value) - self.assertNumpyAll(actual, var.value) - def test_get_empty_like(self): kwargs = dict(name='tas', alias='tas2', units='celsius', meta={'foo': 5}, uid=5, data='foo', did=5) value = np.array([1, 2, 3, 4, 5]) diff --git a/src/ocgis/test/test_ocgis/test_util/test_units.py b/src/ocgis/test/test_ocgis/test_util/test_units.py index b962b52ed..ebc10dd3c 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_units.py +++ b/src/ocgis/test/test_ocgis/test_util/test_units.py @@ -1,131 +1,49 @@ -import unittest -from ocgis.test.base import TestBase -from ocgis.interface.base.variable import Variable -import numpy as np from cfunits.cfunits import Units -from ocgis.exc import NoUnitsError, UnitsValidationError -from ocgis.util.units import get_are_units_equivalent, get_are_units_equal,\ - get_are_units_equal_by_string_or_cfunits - -class TestField(TestBase): - - def test_units_read_from_file(self): - rd = self.test_data.get_rd('cancm4_tas') - field = rd.get() - self.assertEqual(field.variables['tas'].cfunits,Units('K')) - - def test_units_conform_from_file(self): - rd = self.test_data.get_rd('cancm4_tas') - field = rd.get() - sub = field.get_time_region({'month':[5],'year':[2005]}) - sub.variables['tas'].cfunits_conform(Units('celsius')) - self.assertAlmostEqual(sub.variables['tas'].value[:,6,:,30,64],np.ma.array([[28.2539310455]],mask=[[False]])) - self.assertEqual(sub.variables['tas'].units,'celsius') +from ocgis.test.base import TestBase +from ocgis.util.units import get_are_units_equivalent, get_are_units_equal, get_are_units_equal_by_string_or_cfunits -class TestUnits(unittest.TestCase): +class TestUnits(TestBase): _create_dir = False - + def test_get_are_units_equivalent(self): - units = [Units('celsius'),Units('kelvin'),Units('fahrenheit')] + units = [Units('celsius'), Units('kelvin'), Units('fahrenheit')] self.assertTrue(get_are_units_equivalent(units)) - - units = [Units('celsius'),Units('kelvin'),Units('coulomb')] + + units = [Units('celsius'), Units('kelvin'), Units('coulomb')] self.assertFalse(get_are_units_equivalent(units)) - + units = [Units('celsius')] with self.assertRaises(ValueError): get_are_units_equivalent(units) - + def test_get_are_units_equal(self): - units = [Units('celsius'),Units('kelvin'),Units('fahrenheit')] + units = [Units('celsius'), Units('kelvin'), Units('fahrenheit')] self.assertFalse(get_are_units_equal(units)) - - units = [Units('celsius'),Units('celsius'),Units('celsius')] + + units = [Units('celsius'), Units('celsius'), Units('celsius')] self.assertTrue(get_are_units_equal(units)) - + units = [Units('celsius')] with self.assertRaises(ValueError): get_are_units_equal(units) - + def test_get_are_units_equal_by_string_or_cfunits(self): - _try_cfunits = [True,False] - + _try_cfunits = [True, False] + source = 'K' target = 'K' for try_cfunits in _try_cfunits: - match = get_are_units_equal_by_string_or_cfunits(source,target,try_cfunits=try_cfunits) + match = get_are_units_equal_by_string_or_cfunits(source, target, try_cfunits=try_cfunits) self.assertTrue(match) - + source = 'K' target = 'Kelvin' for try_cfunits in _try_cfunits: - match = get_are_units_equal_by_string_or_cfunits(source,target,try_cfunits=try_cfunits) - ## cfunits.Units will allow comparison of abbreviated and full name - ## form while string comparison will not + match = get_are_units_equal_by_string_or_cfunits(source, target, try_cfunits=try_cfunits) + # cfunits.Units will allow comparison of abbreviated and full name form while string comparison will not if try_cfunits: self.assertTrue(match) else: self.assertFalse(match) - - -class TestVariableUnits(TestBase): - create_dir = False - - @property - def value(self): - return(np.array([5,5,5])) - - def test_as_string(self): - ## string-based units - var = Variable(name='tas',units='celsius',value=self.value) - self.assertEqual(var.units,'celsius') - self.assertEqual(var.cfunits,Units('celsius')) - self.assertNotEqual(var.cfunits,Units('kelvin')) - self.assertTrue(var.cfunits.equivalent(Units('kelvin'))) - - def test_conform(self): - ## conversion of celsius units to kelvin - var = Variable(name='tas',units='celsius',value=self.value) - var.cfunits_conform(Units('kelvin')) - self.assertNumpyAll(var.value,np.ma.array([278.15]*3)) - self.assertEqual(var.cfunits,Units('kelvin')) - self.assertEqual(var.units,'kelvin') - - def test_conform_no_units(self): - ## if there are no units associate with a variable, conforming the units - ## should fail - var = Variable(name='tas',units=None,value=self.value) - with self.assertRaises(NoUnitsError): - var.cfunits_conform(Units('kelvin')) - - def test_conform_nonequivalent_units(self): - ## conversion should fail for nonequivalent units - var = Variable(name='tas',units='kelvin',value=self.value) - with self.assertRaises(ValueError): - var.cfunits_conform(Units('grams')) - - def test_as_object(self): - ## constructor with units objects v. string - var = Variable(name='tas',units=Units('celsius'),value=self.value) - self.assertEqual(var.units,'celsius') - self.assertEqual(var.cfunits,Units('celsius')) - - def test_no_units(self): - ## test no units - var = Variable(name='tas',units=None,value=self.value) - self.assertEqual(var.units,None) - self.assertEqual(var.cfunits,Units(None)) - - def test_masked_array(self): - ## assert mask is respected by inplace unit conversion - value = np.ma.array(data=[5,5,5],mask=[False,True,False]) - var = Variable(name='tas',units=Units('celsius'),value=value) - var.cfunits_conform(Units('kelvin')) - self.assertNumpyAll(np.ma.array([278.15,278.15,278.15],mask=[False,True,False]),var.value) - - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() From 556dc091bf544ca31e61c46267c1a99cdd48a477 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Wed, 14 Jan 2015 12:22:59 -0700 Subject: [PATCH 59/71] option to conform time units #351 An option to request datasets "t_conform_units_to" was added. This option allows time units to be conformed using the "cfunits" library. The time dimension's calendar may not be changed. closes #351 --- src/ocgis/api/request/base.py | 13 +++-- src/ocgis/api/request/driver/nc.py | 6 +-- src/ocgis/interface/base/crs.py | 1 - src/ocgis/interface/base/dimension/base.py | 51 +++++++++++-------- .../interface/base/dimension/temporal.py | 14 ++++- src/ocgis/interface/base/variable.py | 18 ++++--- .../test_api/test_request/test_base.py | 28 ++++++++++ .../test_request/test_driver/test_nc.py | 12 +++++ .../test_interface/test_base/test_crs.py | 3 +- .../test_base/test_dimension/test_base.py | 32 +++++++++++- .../test_base/test_dimension/test_temporal.py | 47 ++++++++++++++--- .../test_interface/test_base/test_variable.py | 10 +++- src/ocgis/test/test_simple/test_simple.py | 2 +- 13 files changed, 188 insertions(+), 49 deletions(-) diff --git a/src/ocgis/api/request/base.py b/src/ocgis/api/request/base.py index 5c0ee6f08..3eb7b3461 100644 --- a/src/ocgis/api/request/base.py +++ b/src/ocgis/api/request/base.py @@ -78,6 +78,11 @@ class RequestDataset(object): :type t_units: str :param t_calendar: Overload the autodiscover `time calendar`_. :type t_calendar: str + :param str t_conform_units_to: Conform the time dimension to the provided units. The calendar may not be changed. + The option dependency ``cfunits-python`` is required. + + >>> t_conform_units_to = 'days since 1949-1-1' + :param s_abstraction: Abstract the geometry data to either ``'point'`` or ``'polygon'``. If ``'polygon'`` is not possible due to missing bounds, ``'point'`` will be used instead. :type s_abstraction: str @@ -123,9 +128,9 @@ class RequestDataset(object): _Drivers[DriverVector.key] = DriverVector def __init__(self, uri=None, variable=None, alias=None, units=None, time_range=None, time_region=None, - level_range=None, conform_units_to=None, crs=None, t_units=None, t_calendar=None, did=None, - meta=None, s_abstraction=None, dimension_map=None, name=None, driver=None, regrid_source=True, - regrid_destination=False): + level_range=None, conform_units_to=None, crs=None, t_units=None, t_calendar=None, + t_conform_units_to=None, did=None, meta=None, s_abstraction=None, dimension_map=None, name=None, + driver=None, regrid_source=True, regrid_destination=False): self._is_init = True @@ -159,6 +164,8 @@ def __init__(self, uri=None, variable=None, alias=None, units=None, time_range=N self.t_units = t_units self.t_calendar = t_calendar + self.t_conform_units_to = t_conform_units_to + self.dimension_map = deepcopy(dimension_map) self.did = did self.meta = meta or {} diff --git a/src/ocgis/api/request/driver/nc.py b/src/ocgis/api/request/driver/nc.py index 9b2047c52..5cf6de40a 100644 --- a/src/ocgis/api/request/driver/nc.py +++ b/src/ocgis/api/request/driver/nc.py @@ -212,11 +212,11 @@ def _get_field_(self, format_time=True): source_metadata = self.rd.source_metadata def _get_temporal_adds_(ref_attrs): - ## calendar should default to standard if it is not present and the - ## t_calendar overload is not used. + # calendar should default to standard if it is not present and the t_calendar overload is not used. calendar = self.rd.t_calendar or ref_attrs.get('calendar', None) or 'standard' - return {'units': self.rd.t_units or ref_attrs['units'], 'calendar': calendar, 'format_time': format_time} + return {'units': self.rd.t_units or ref_attrs['units'], 'calendar': calendar, 'format_time': format_time, + 'conform_units_to': self.rd.t_conform_units_to} # parameters for the loading loop to_load = {'temporal': {'cls': NcTemporalDimension, 'adds': _get_temporal_adds_, 'axis': 'T', 'name_uid': 'tid', diff --git a/src/ocgis/interface/base/crs.py b/src/ocgis/interface/base/crs.py index de797be27..78d980be1 100644 --- a/src/ocgis/interface/base/crs.py +++ b/src/ocgis/interface/base/crs.py @@ -95,7 +95,6 @@ def write_to_rootgrp(self, rootgrp): """ variable = rootgrp.createVariable(self.name, 'c') - variable.proj4 = self.proj4 return variable diff --git a/src/ocgis/interface/base/dimension/base.py b/src/ocgis/interface/base/dimension/base.py index d72e56c3f..c8a2beeb9 100644 --- a/src/ocgis/interface/base/dimension/base.py +++ b/src/ocgis/interface/base/dimension/base.py @@ -58,7 +58,7 @@ def _get_none_or_array_(self, arr, masked=False): elif self._ndims == 2: ret = get_none_or_2d(arr) else: - raise (NotImplementedError) + raise NotImplementedError if ret is not None and masked and not isinstance(ret, np.ma.MaskedArray): ret = np.ma.array(ret, mask=False) return ret @@ -136,7 +136,7 @@ def _get_uid_(self): class AbstractUidValueDimension(AbstractValueDimension, AbstractUidDimension): def __init__(self, *args, **kwds): - kwds_value = ['value', 'name_value', 'units', 'name', 'dtype', 'fill_value', 'attrs'] + kwds_value = ['value', 'name_value', 'units', 'name', 'dtype', 'fill_value', 'attrs', 'conform_units_to'] kwds_uid = ['uid', 'name_uid', 'meta', 'properties', 'name'] kwds_all = kwds_value + kwds_uid @@ -175,6 +175,11 @@ def __init__(self, *args, **kwargs): # setting bounds requires checking the data type of value set in a superclass. self.bounds = bounds + + # conform any units if they provided. check they are not equivalent first + if self.conform_units_to is not None: + if not self.conform_units_to.equals(self.cfunits): + self.cfunits_conform(self.conform_units_to) def __len__(self): return self.shape[0] @@ -233,30 +238,33 @@ def resolution(self): def shape(self): return(self.uid.shape) - def cfunits_conform(self,to_units): - ## get the original units for bounds conversion. the "cfunits_conform" - ## method updates the object's internal "units" attribute. + def cfunits_conform(self, to_units): + """ + Convert and set value and bounds for the dimension object to new units. + + :param to_units: The destination units. + :type to_units: :class:`cfunits.cfunits.Units` + """ + + # get the original units for bounds conversion. the "cfunits_conform" method updates the object's internal + # "units" attribute. original_units = deepcopy(self.cfunits) - ## call the superclass unit conversion - AbstractValueVariable.cfunits_conform(self,to_units) - ## if the bounds are already loaded, convert + # call the superclass unit conversion + AbstractValueVariable.cfunits_conform(self, to_units) + # if the bounds are already loaded, convert if self._bounds is not None: - AbstractValueVariable.cfunits_conform(self,to_units,value=self._bounds,from_units=original_units) - ## if the bound are not set, they may be interpolated + AbstractValueVariable.cfunits_conform(self, to_units, value=self._bounds, from_units=original_units) + # if the bound are not set, they may be interpolated elif self.bounds is not None: - ## if the bounds were interpolated, then this should be set to - ## "None" so the units conforming will use the source value units - ## spec. + # if the bounds were interpolated, then this should be set to "None" so the units conforming will use the + # source value units spec. if self._has_interpolated_bounds: from_units = None else: from_units = original_units - ## conform the bounds value - AbstractValueVariable.cfunits_conform(self, - to_units, - value=self.bounds, - from_units=from_units) - + # conform the bounds value + AbstractValueVariable.cfunits_conform(self, to_units, value=self.bounds, from_units=from_units) + def get_between(self,lower,upper,return_indices=False,closed=False,use_bounds=True): assert(lower <= upper) @@ -406,8 +414,9 @@ def write_to_netcdf_dataset(self, dataset, unlimited=False, bounds_dimension_nam # data mode issues require that this be last...? self.write_attributes_to_netcdf_object(variable) - def _format_private_value_(self,value): - return(self._get_none_or_array_(value,masked=False)) + def _format_private_value_(self, value): + value = self._get_none_or_array_(value, masked=False) + return value def _format_slice_state_(self,state,slc): state.bounds = get_none_or_slice(state._bounds,(slc,slice(None))) diff --git a/src/ocgis/interface/base/dimension/temporal.py b/src/ocgis/interface/base/dimension/temporal.py index fcc9ec996..e5bdfc57a 100644 --- a/src/ocgis/interface/base/dimension/temporal.py +++ b/src/ocgis/interface/base/dimension/temporal.py @@ -37,10 +37,10 @@ def __init__(self, *args, **kwargs): kwargs['axis'] = kwargs.get('axis') or 'T' kwargs['name'] = kwargs.get('name') or 'time' kwargs['name_uid'] = kwargs.get('name_uid') or 'tid' + kwargs['units'] = kwargs.get('units') or constants.DEFAULT_TEMPORAL_UNITS super(TemporalDimension, self).__init__(*args, **kwargs) - self.units = self.units or constants.DEFAULT_TEMPORAL_UNITS # test if the units are the special case with months in the time units if self.units.startswith('months'): self._has_months_units = True @@ -74,6 +74,18 @@ def bounds_numtime(self): self._bounds_numtime = self.bounds return self._bounds_numtime + @base.VectorDimension.conform_units_to.setter + def conform_units_to(self, value): + base.VectorDimension._conform_units_to_setter_(self, value) + if self._conform_units_to is not None: + self._conform_units_to.calendar = self.calendar + + @property + def cfunits(self): + ret = super(TemporalDimension, self).cfunits + ret.calendar = self.calendar + return ret + @property def extent_datetime(self): if not self.format_time: diff --git a/src/ocgis/interface/base/variable.py b/src/ocgis/interface/base/variable.py index e4be2671b..9adb087fb 100644 --- a/src/ocgis/interface/base/variable.py +++ b/src/ocgis/interface/base/variable.py @@ -43,9 +43,6 @@ def __init__(self, value=None, units=None, dtype=None, fill_value=None, name=Non self.value = value self._dtype = dtype self._fill_value = fill_value - # if the units value is not None, then convert to string. cfunits.Units may be easily handled this way without - # checking for the module presence. - self.units = str(units) if units is not None else None @property def cfunits(self): @@ -53,18 +50,18 @@ def cfunits(self): from cfunits import Units return Units(self.units) - @property - def conform_units_to(self): + def _conform_units_to_getter_(self): return self._conform_units_to - @conform_units_to.setter - def conform_units_to(self, value): + def _conform_units_to_setter_(self, value): if value is not None: from cfunits import Units if not isinstance(value, Units): value = Units(value) self._conform_units_to = value + conform_units_to = property(_conform_units_to_getter_, _conform_units_to_setter_) + @property def dtype(self): if self._dtype is None: @@ -144,7 +141,12 @@ def cfunits_conform(self, to_units, value=None, from_units=None): # units are always converted in place. users need to execute their own deep copies self.cfunits.conform(convert_value, from_units, to_units, inplace=True) # update the units attribute with the destination units - self.units = str(to_units) + if hasattr(to_units, 'calendar'): + str_to_units = deepcopy(to_units) + delattr(str_to_units, 'calendar') + else: + str_to_units = to_units + self.units = str(str_to_units) # let the data type load natively from the value array self._dtype = None # remove any compression attributes if present diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py index 8d58dd9ae..fc098fe9d 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_base.py @@ -65,6 +65,9 @@ def test_init(self): rd = RequestDataset(uri=self.uri, crs=CFWGS84()) self.assertTrue(rd._has_assigned_coordinate_system) + rd = RequestDataset(uri=self.uri, t_conform_units_to='days since 1949-1-1') + self.assertEqual(rd.t_conform_units_to, 'days since 1949-1-1') + def test_init_driver(self): uri = ShpCabinet().get_shp_path('state_boundaries') rd = RequestDataset(uri=uri, driver='vector') @@ -78,6 +81,31 @@ def test_init_driver(self): with self.assertRaises(ValueError): RequestDataset(uri_nc, driver='vector') + def test_conform_units_to(self): + rd = RequestDataset(uri=self.uri) + self.assertIsNone(rd.conform_units_to) + rd = RequestDataset(uri=self.uri, conform_units_to=None) + self.assertIsNone(rd.conform_units_to) + + # these are exceptions + problems = ['K', 'not_real'] + for prob in problems: + with self.assertRaises(RequestValidationError): + RequestDataset(uri=self.uri, variable=['one', 'two'], conform_units_to=prob) + + # test for univariate + poss = ['K', ['K']] + for p in poss: + rd = RequestDataset(uri=self.uri, conform_units_to=p) + self.assertEqual(rd.conform_units_to, 'K') + + # test for multivariate + target = ['K', 'celsius'] + poss = [target] + for p in poss: + rd = RequestDataset(uri=self.uri, variable=['one', 'two'], conform_units_to=p) + self.assertEqual(rd.conform_units_to, tuple(target)) + def test_str(self): rd = self.test_data.get_rd('cancm4_tas') ss = str(rd) diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index 18e395d85..790edbdd6 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -132,6 +132,18 @@ def test_get_field(self): ds.close() + def test_get_field_t_conform_units_to(self): + """ + Test conforming time units is appropriately passed to field object. + """ + + uri = self.test_data.get_uri('cancm4_tas') + target = Units('days since 1949-1-1') + target.calendar = '365_day' + rd = RequestDataset(uri=uri, t_conform_units_to=target) + field = rd.get() + self.assertEqual(field.temporal.conform_units_to, target) + def test_get_field_different_dimension_names_and_values(self): """Test dimension names and dimension values are correctly read from netCDF.""" diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py index 22511caad..a8b283407 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_crs.py @@ -68,7 +68,8 @@ def test_write_to_rootgrp(self): with nc_scope(path, 'w') as ds: variable = crs.write_to_rootgrp(ds) self.assertIsInstance(variable, nc.Variable) - self.assertEqual(variable.proj4, crs.proj4) + with self.assertRaises(AttributeError): + variable.proj4 class TestWrappableCoordinateSystem(TestBase): diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py index 4733b937b..8b4c011bb 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py @@ -59,6 +59,22 @@ def test_init(self): self.assertEqual(au.name_uid, 'hello') +class FakeAbstractUidValueDimension(AbstractUidValueDimension): + _ndims = 1 + _attrs_slice = None + + def _get_value_(self): + pass + + +class TestAbstractUidValueDimension(TestBase): + + def test_init(self): + c = 'celsius' + ff = FakeAbstractUidValueDimension(conform_units_to=c) + self.assertEqual(ff.conform_units_to, Units(c)) + + class FakeAbstractValueDimension(AbstractValueDimension): def _get_value_(self): pass @@ -104,6 +120,20 @@ def test_init(self): with self.assertRaises(ValueError): VectorDimension() + def test_init_conform_units_to(self): + target = np.array([4, 5, 6]) + target_copy = target.copy() + vd = VectorDimension(value=target, units='celsius', conform_units_to='kelvin') + self.assertNumpyNotAll(vd.value, target_copy) + self.assertNumpyAll(vd.value, np.array([277.15, 278.15, 279.15])) + self.assertEqual(vd.units, 'kelvin') + self.assertEqual(vd.cfunits, Units('kelvin')) + + target = np.array([4., 5., 6.]) + target_bounds = np.array([[3.5, 4.5], [4.5, 5.5], [5.5, 6.5]]) + vd = VectorDimension(value=target, bounds=target_bounds, units='celsius', conform_units_to='kelvin') + self.assertNumpyAll(vd.bounds, np.array([[276.65, 277.65], [277.65, 278.65], [278.65, 279.65]])) + def test_bad_dtypes(self): vd = VectorDimension(value=181.5, bounds=[181, 182]) self.assertEqual(vd.value.dtype, vd.bounds.dtype) @@ -112,7 +142,7 @@ def test_bad_dtypes(self): VectorDimension(value=181.5, bounds=['a', 'b']) def test_bad_keywords(self): - # # there should be keyword checks on the bad keywords names + # there should be keyword checks on the bad keywords names with self.assertRaises(ValueError): VectorDimension(value=40, bounds=[38, 42], ddtype=float) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py index 2a95b451b..439a80133 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py @@ -5,6 +5,7 @@ import itertools import numpy as np +from cfunits import Units import netcdftime from datetime import datetime as dt @@ -146,14 +147,6 @@ def test_init(self): self.assertTrue(td._has_months_units) self.assertEqual(td.axis, 'foo') - def test_getitem(self): - td = self.get_temporal_dimension() - self.assertIsNotNone(td.value_datetime) - self.assertIsNotNone(td.value_numtime) - sub = td[3] - self.assertEqual(sub.value_datetime.shape, (1,)) - self.assertEqual(sub.value_numtime.shape, (1,)) - def test_360_day_calendar(self): months = range(1, 13) days = range(1, 31) @@ -190,6 +183,36 @@ def test_bounds_datetime_and_bounds_numtime(self): except CannotFormatTimeError: self.assertFalse(format_time) + def test_cfunits(self): + temporal = TemporalDimension(value=[4, 5, 6]) + self.assertEqual(temporal.cfunits.calendar, temporal.calendar) + + def test_cfunits_conform(self): + + def _get_temporal_(kwds=None): + rd = self.test_data.get_rd('cancm4_tas', kwds=kwds) + field = rd.get() + temporal = field.temporal + return temporal + + target = Units('days since 1949-1-1') + target.calendar = '365_day' + kwds = {'t_conform_units_to': target} + temporal = _get_temporal_(kwds) + temporal_orig = _get_temporal_() + self.assertNumpyNotAll(temporal.value, temporal_orig.value) + self.assertNumpyAll(temporal.value_datetime, temporal_orig.value_datetime) + + def test_conform_units_to(self): + d = 'days since 1949-1-1' + td = TemporalDimension(value=[4, 5, 6], conform_units_to=d) + actual = Units(d) + actual.calendar = constants.DEFAULT_TEMPORAL_CALENDAR + self.assertTrue(td.cfunits.equals(actual)) + + td = TemporalDimension(value=[4, 5, 6]) + self.assertIsNone(td.conform_units_to) + def test_extent_datetime_and_extent_numtime(self): value_numtime = np.array([6000., 6001., 6002]) value_datetime = TemporalDimension(value=value_numtime).value_datetime @@ -213,6 +236,14 @@ def test_format_slice_state(self): for element in elements: self.assertEqual(element.shape, (1, 2)) + def test_getitem(self): + td = self.get_temporal_dimension() + self.assertIsNotNone(td.value_datetime) + self.assertIsNotNone(td.value_numtime) + sub = td[3] + self.assertEqual(sub.value_datetime.shape, (1,)) + self.assertEqual(sub.value_numtime.shape, (1,)) + def test_get_between(self): keywords = dict(as_datetime=[False, True]) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py index 8dd69f305..10d5ebdce 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py @@ -5,7 +5,6 @@ from cfunits import Units from ocgis.constants import NETCDF_ATTRIBUTES_TO_REMOVE_ON_VALUE_CHANGE - from ocgis.exc import VariableInCollectionError, NoUnitsError from ocgis.interface.base.attributes import Attributes from ocgis.test.base import TestBase @@ -179,6 +178,15 @@ def test_cfunits_conform(self): self.assertIsNone(av._dtype) self.assertEqual(av.dtype, av.value.dtype) + # calendar can be finicky - those need to be stripped from the string conversion + conform_units_to = Units('days since 1949-1-1') + conform_units_to.calendar = 'standard' + units = Units('days since 1900-1-1') + units.calendar = 'standard' + av = FakeAbstractValueVariable(value=np.array([4000, 5000, 6000]), units=units, + conform_units_to=conform_units_to) + self.assertEqual(av.units, 'days since 1949-1-1') + def test_cfunits_conform_from_file(self): """Test conforming units on data read from file.""" diff --git a/src/ocgis/test/test_simple/test_simple.py b/src/ocgis/test/test_simple/test_simple.py index 54a2ae908..bd67293f7 100644 --- a/src/ocgis/test/test_simple/test_simple.py +++ b/src/ocgis/test/test_simple/test_simple.py @@ -1510,7 +1510,7 @@ def test_nc_projection(self): ret = self.get_ret(kwds={'output_format': 'nc'}) self.assertNcEqual(dataset['uri'], ret, ignore_attributes={'global': ['history'], 'time_bnds': ['calendar', 'units'], - 'crs': ['proj4', 'units']}) + 'crs': ['units']}) def test_nc_projection_to_shp(self): ret = self.get_ret(kwds={'output_format': constants.OUTPUT_FORMAT_SHAPEFILE}) From 6b7b10a03cc958c77df095b881b08355467977cf Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 16 Jan 2015 10:08:05 -0700 Subject: [PATCH 60/71] seasonal time region subsets choking on mid-year origin time series #352 A check was added to the extraction of time regions for the purposes of seasonal aggregation. This check ensures each generated time region is valid for the time series used to construct it. This issue only occurred with time-series beginning in mid-year where certain month-year combinations were not actually in the time series. --- .../interface/base/dimension/temporal.py | 12 +++ .../test_base/test_dimension/test_temporal.py | 86 ++++++++++++------- 2 files changed, 66 insertions(+), 32 deletions(-) diff --git a/src/ocgis/interface/base/dimension/temporal.py b/src/ocgis/interface/base/dimension/temporal.py index e5bdfc57a..1f032df6c 100644 --- a/src/ocgis/interface/base/dimension/temporal.py +++ b/src/ocgis/interface/base/dimension/temporal.py @@ -844,6 +844,18 @@ def get_time_regions(seasons, dates, raise_if_incomplete=True): for year, season in itertools.product(years, seasons): time_regions.append([{'year': [year], 'month': season}]) + # ensure each time region is valid. if it is not, remove it from the returned list + td = TemporalDimension(value=dates) + remove = [] + for idx, time_region in enumerate(time_regions): + try: + for sub_time_region in time_region: + td.get_time_region(sub_time_region) + except EmptySubsetError: + remove.append(idx) + for xx in remove: + time_regions.pop(xx) + return time_regions diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py index 439a80133..1e3917624 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py @@ -565,42 +565,64 @@ def test_months_not_in_time_units(self): td = TemporalDimension(value=value, units=units, calendar='standard') self.assertFalse(td._has_months_units) - def test_seasonal_get_time_regions(self): - dates = get_date_list(dt(2012,1,1),dt(2013,12,31),1) + def test_get_time_regions(self): + dates = get_date_list(dt(2012, 1, 1), dt(2013, 12, 31), 1) - ## two simple seasons - calc_grouping = [[6,7,8],[9,10,11]] - time_regions = get_time_regions(calc_grouping,dates) - correct = [[{'month': [6, 7, 8], 'year': [2012]}], [{'month': [9, 10, 11], 'year': [2012]}], [{'month': [6, 7, 8], 'year': [2013]}], [{'month': [9, 10, 11], 'year': [2013]}]] - self.assertEqual(time_regions,correct) + # two simple seasons + calc_grouping = [[6, 7, 8], [9, 10, 11]] + time_regions = get_time_regions(calc_grouping, dates) + correct = [[{'month': [6, 7, 8], 'year': [2012]}], [{'month': [9, 10, 11], 'year': [2012]}], + [{'month': [6, 7, 8], 'year': [2013]}], [{'month': [9, 10, 11], 'year': [2013]}]] + self.assertEqual(time_regions, correct) - ## add an interannual season at the back - calc_grouping = [[6,7,8],[9,10,11],[12,1,2]] + # add an interannual season at the back + calc_grouping = [[6, 7, 8], [9, 10, 11], [12, 1, 2]] with self.assertRaises(IncompleteSeasonError): - get_time_regions(calc_grouping,dates) - time_regions = get_time_regions(calc_grouping,dates,raise_if_incomplete=False) - correct = [[{'month': [6, 7, 8], 'year': [2012]}], [{'month': [9, 10, 11], 'year': [2012]}], [{'month': [12], 'year': [2012]}, {'month': [2, 1], 'year': [2013]}], [{'month': [6, 7, 8], 'year': [2013]}], [{'month': [9, 10, 11], 'year': [2013]}]] - self.assertEqual(time_regions,correct) - - ## put the interannual season in the middle - calc_grouping = [[9,10,11],[12,1,2],[6,7,8]] + get_time_regions(calc_grouping, dates) + time_regions = get_time_regions(calc_grouping, dates, raise_if_incomplete=False) + correct = [[{'month': [6, 7, 8], 'year': [2012]}], [{'month': [9, 10, 11], 'year': [2012]}], + [{'month': [12], 'year': [2012]}, {'month': [2, 1], 'year': [2013]}], + [{'month': [6, 7, 8], 'year': [2013]}], [{'month': [9, 10, 11], 'year': [2013]}]] + self.assertEqual(time_regions, correct) + + # put the interannual season in the middle + calc_grouping = [[9, 10, 11], [12, 1, 2], [6, 7, 8]] with self.assertRaises(IncompleteSeasonError): - get_time_regions(calc_grouping,dates) - time_regions = get_time_regions(calc_grouping,dates,raise_if_incomplete=False) - correct = [[{'month': [9, 10, 11], 'year': [2012]}], [{'month': [12], 'year': [2012]}, {'month': [2, 1], 'year': [2013]}], [{'month': [6, 7, 8], 'year': [2012]}], [{'month': [9, 10, 11], 'year': [2013]}], [{'month': [6, 7, 8], 'year': [2013]}]] - self.assertEqual(time_regions,correct) - - ## odd seasons, but covering the whole year - calc_grouping = [[1,2,3],[4,5,6],[7,8,9],[10,11,12]] - time_regions = get_time_regions(calc_grouping,dates) - correct = [[{'month': [1, 2, 3], 'year': [2012]}], [{'month': [4, 5, 6], 'year': [2012]}], [{'month': [7, 8, 9], 'year': [2012]}], [{'month': [10, 11, 12], 'year': [2012]}], [{'month': [1, 2, 3], 'year': [2013]}], [{'month': [4, 5, 6], 'year': [2013]}], [{'month': [7, 8, 9], 'year': [2013]}], [{'month': [10, 11, 12], 'year': [2013]}]] - self.assertEqual(time_regions,correct) - - ## standard seasons - calc_grouping = [[3,4,5],[6,7,8],[9,10,11],[12,1,2]] - time_regions = get_time_regions(calc_grouping,dates,raise_if_incomplete=False) - correct = [[{'month': [3, 4, 5], 'year': [2012]}], [{'month': [6, 7, 8], 'year': [2012]}], [{'month': [9, 10, 11], 'year': [2012]}], [{'month': [12], 'year': [2012]}, {'month': [2, 1], 'year': [2013]}], [{'month': [3, 4, 5], 'year': [2013]}], [{'month': [6, 7, 8], 'year': [2013]}], [{'month': [9, 10, 11], 'year': [2013]}]] - self.assertEqual(time_regions,correct) + get_time_regions(calc_grouping, dates) + time_regions = get_time_regions(calc_grouping, dates, raise_if_incomplete=False) + correct = [[{'month': [9, 10, 11], 'year': [2012]}], + [{'month': [12], 'year': [2012]}, {'month': [2, 1], 'year': [2013]}], + [{'month': [6, 7, 8], 'year': [2012]}], [{'month': [9, 10, 11], 'year': [2013]}], + [{'month': [6, 7, 8], 'year': [2013]}]] + self.assertEqual(time_regions, correct) + + # odd seasons, but covering the whole year + calc_grouping = [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]] + time_regions = get_time_regions(calc_grouping, dates) + correct = [[{'month': [1, 2, 3], 'year': [2012]}], [{'month': [4, 5, 6], 'year': [2012]}], + [{'month': [7, 8, 9], 'year': [2012]}], [{'month': [10, 11, 12], 'year': [2012]}], + [{'month': [1, 2, 3], 'year': [2013]}], [{'month': [4, 5, 6], 'year': [2013]}], + [{'month': [7, 8, 9], 'year': [2013]}], [{'month': [10, 11, 12], 'year': [2013]}]] + self.assertEqual(time_regions, correct) + + # standard seasons + calc_grouping = [[3, 4, 5], [6, 7, 8], [9, 10, 11], [12, 1, 2]] + time_regions = get_time_regions(calc_grouping, dates, raise_if_incomplete=False) + correct = [[{'month': [3, 4, 5], 'year': [2012]}], [{'month': [6, 7, 8], 'year': [2012]}], + [{'month': [9, 10, 11], 'year': [2012]}], + [{'month': [12], 'year': [2012]}, {'month': [2, 1], 'year': [2013]}], + [{'month': [3, 4, 5], 'year': [2013]}], [{'month': [6, 7, 8], 'year': [2013]}], + [{'month': [9, 10, 11], 'year': [2013]}]] + self.assertEqual(time_regions, correct) + + # in this case, the time series starts in december. the first season/year combination will not actually be + # present in the time series and should be removed by the code. + actual = [[{'month': [3, 4, 5], 'year': [1950]}], [{'month': [3, 4, 5], 'year': [1951]}]] + raise_if_incomplete = False + seasons = [[3, 4, 5]] + dates = get_date_list(dt(1949, 12, 16), dt(1951, 12, 16), 1) + target = get_time_regions(seasons, dates, raise_if_incomplete=raise_if_incomplete) + self.assertEqual(actual, target) def test_time_range_subset(self): dt1 = datetime.datetime(1950,01,01,12) From 0f17e16dc06d7be6e537e6ae2e160b71e56971f2 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 19 Jan 2015 15:52:51 -0700 Subject: [PATCH 61/71] add write to fiona as method on field objects Added a method to write to a fiona file on field objects. This included includes some changes to the way constants are used. --- src/ocgis/api/request/driver/nc.py | 6 +- src/ocgis/constants.py | 12 + src/ocgis/interface/base/dimension/base.py | 71 ++++-- .../interface/base/dimension/temporal.py | 2 - src/ocgis/interface/base/field.py | 79 +++++- src/ocgis/test/base.py | 16 +- .../test_request/test_driver/test_nc.py | 30 +-- .../test_base/test_dimension/test_base.py | 50 +++- .../test_base/test_dimension/test_temporal.py | 5 +- .../test_interface/test_base/test_field.py | 103 +++++++- src/ocgis/util/addict/__init__.py | 9 + src/ocgis/util/addict/addict.py | 231 ++++++++++++++++++ 12 files changed, 537 insertions(+), 77 deletions(-) create mode 100644 src/ocgis/util/addict/__init__.py create mode 100644 src/ocgis/util/addict/addict.py diff --git a/src/ocgis/api/request/driver/nc.py b/src/ocgis/api/request/driver/nc.py index 5cf6de40a..f9771f435 100644 --- a/src/ocgis/api/request/driver/nc.py +++ b/src/ocgis/api/request/driver/nc.py @@ -194,8 +194,8 @@ def _get_vector_dimension_(self, k, v, source_metadata): # check for the name of the bounds dimension in the source metadata. loop through the dimension map, # look for a bounds variable, and choose the bounds dimension if possible - name_bounds_suffix = self._get_name_bounds_suffix_(source_metadata) - kwds['name_bounds_suffix'] = name_bounds_suffix + name_bounds_dimension = self._get_name_bounds_dimension_(source_metadata) + kwds['name_bounds_dimension'] = name_bounds_dimension # create instance of the dimension fill = v['cls'](**kwds) @@ -294,7 +294,7 @@ def _get_temporal_adds_(ref_attrs): return ret @staticmethod - def _get_name_bounds_suffix_(source_metadata): + def _get_name_bounds_dimension_(source_metadata): """ :param dict source_metadata: Metadata dictionary as returned from :attr:`~ocgis.RequestDataset.source_metadata`. :returns: The name of the bounds suffix to use when creating dimensions. If no bounds are found in the source diff --git a/src/ocgis/constants.py b/src/ocgis/constants.py index 5ae618362..a0e73f942 100644 --- a/src/ocgis/constants.py +++ b/src/ocgis/constants.py @@ -125,3 +125,15 @@ class HEADERS(object): # Attributes to remove when a value is changed if they are present in the attributes dictionary. These attributes are # tuned to specific value ranges and will not apply when a value is changed. NETCDF_ATTRIBUTES_TO_REMOVE_ON_VALUE_CHANGE = ('scale_value', 'add_offset') + +NAME_DIMENSION_REALIZATION = 'rlz' +NAME_DIMENSION_TEMPORAL = 'time' +NAME_DIMENSION_LEVEL = 'level' + +NAME_BOUNDS_DIMENSION_LOWER = 'lb' +NAME_BOUNDS_DIMENSION_UPPER = 'ub' + +NAME_UID_DIMENSION_REALIZATION = 'rid' +NAME_UID_DIMENSION_TEMPORAL = 'tid' +NAME_UID_DIMENSION_LEVEL = 'lid' +NAME_UID_FIELD = 'fid' diff --git a/src/ocgis/interface/base/dimension/base.py b/src/ocgis/interface/base/dimension/base.py index c8a2beeb9..f84a39a2e 100644 --- a/src/ocgis/interface/base/dimension/base.py +++ b/src/ocgis/interface/base/dimension/base.py @@ -5,11 +5,11 @@ import numpy as np from ocgis import constants +from ocgis.constants import NAME_BOUNDS_DIMENSION_LOWER, NAME_BOUNDS_DIMENSION_UPPER, OCGIS_BOUNDS from ocgis.util.helpers import get_none_or_1d, get_none_or_2d, get_none_or_slice,\ get_formatted_slice, get_bounds_from_1d from ocgis.exc import EmptySubsetError, ResolutionError, BoundsAlreadyAvailableError -from ocgis.interface.base.variable import AbstractValueVariable,\ - AbstractSourcedVariable +from ocgis.interface.base.variable import AbstractValueVariable, AbstractSourcedVariable class AbstractDimension(object): @@ -162,10 +162,15 @@ def __init__(self, *args, **kwargs): msg = 'Without a "data" object, "value" is required.' raise ValueError(msg) + self._bounds = None + self._name_bounds = None + self._name_bounds_tuple = None + + self.name_bounds_dimension = kwargs.pop('name_bounds_dimension', OCGIS_BOUNDS) bounds = kwargs.pop('bounds', None) # used for creating name_bounds as well as the name of the bounds dimension in netCDF - self.name_bounds_suffix = kwargs.pop('name_bounds_suffix', None) or constants.OCGIS_BOUNDS - self._name_bounds = kwargs.pop('name_bounds', None) + self.name_bounds = kwargs.pop('name_bounds', None) + self.name_bounds_tuple = kwargs.pop('name_bounds_tuple', None) self.axis = kwargs.pop('axis', None) # if True, bounds were interpolated. if False, they were loaded from source data. used in conforming units. self._has_interpolated_bounds = False @@ -213,7 +218,7 @@ def extent(self): @property def name_bounds(self): if self._name_bounds is None: - ret = '{0}_{1}'.format(self.name, self.name_bounds_suffix) + ret = '{0}_{1}'.format(self.name, self.name_bounds_dimension) else: ret = self._name_bounds return ret @@ -221,22 +226,39 @@ def name_bounds(self): @name_bounds.setter def name_bounds(self, value): self._name_bounds = value + + @property + def name_bounds_tuple(self): + if self._name_bounds_tuple is None: + ret = tuple(['{0}_{1}'.format(prefix, self.name) for prefix in [NAME_BOUNDS_DIMENSION_LOWER, + NAME_BOUNDS_DIMENSION_UPPER]]) + else: + ret = self._name_bounds_tuple + return ret + + @name_bounds_tuple.setter + def name_bounds_tuple(self, value): + if value is not None: + value = tuple(value) + assert len(value) == 2 + self._name_bounds_tuple = value @property def resolution(self): if self.bounds is None and self.value.shape[0] < 2: - raise(ResolutionError('With no bounds and a single coordinate, approximate resolution may not be determined.')) + msg = 'With no bounds and a single coordinate, approximate resolution may not be determined.' + raise ResolutionError(msg) elif self.bounds is None: res_array = np.diff(self.value[0:constants.RESOLUTION_LIMIT]) else: res_bounds = self.bounds[0:constants.RESOLUTION_LIMIT] - res_array = res_bounds[:,1] - res_bounds[:,0] + res_array = res_bounds[:, 1] - res_bounds[:, 0] ret = np.abs(res_array).mean() - return(ret) + return ret @property def shape(self): - return(self.uid.shape) + return self.uid.shape def cfunits_conform(self, to_units): """ @@ -330,7 +352,7 @@ def get_between(self,lower,upper,return_indices=False,closed=False,use_bounds=Tr return(ret) - def get_iter(self): + def get_iter(self, with_bounds=True): ref_value, ref_bounds = self._get_iter_value_bounds_() if ref_bounds is None: @@ -346,21 +368,19 @@ def get_iter(self): raise ValueError(msg) ref_name_uid = self.name_uid - ref_name_bounds_lower = '{0}_lower'.format(self.name_bounds) - ref_name_bounds_upper = '{0}_upper'.format(self.name_bounds) + ref_name_bounds_lower, ref_name_bounds_upper = self.name_bounds_tuple for ii in range(self.value.shape[0]): - # yld = {ref_name_value: ref_value[ii], ref_name_uid: ref_uid[ii]} - yld = OrderedDict([(ref_name_value, ref_value[ii]), (ref_name_uid, ref_uid[ii])]) - if has_bounds: - ref_name_bounds_lower_value = ref_bounds[ii, 0] - ref_name_bounds_upper_value = ref_bounds[ii, 1] - - else: - ref_name_bounds_lower_value = None - ref_name_bounds_upper_value = None - yld[ref_name_bounds_lower] = ref_name_bounds_lower_value - yld[ref_name_bounds_upper] = ref_name_bounds_upper_value + yld = OrderedDict([(ref_name_uid, ref_uid[ii]), (ref_name_value, ref_value[ii])]) + if with_bounds: + if has_bounds: + ref_name_bounds_lower_value = ref_bounds[ii, 0] + ref_name_bounds_upper_value = ref_bounds[ii, 1] + else: + ref_name_bounds_lower_value = None + ref_name_bounds_upper_value = None + yld[ref_name_bounds_lower] = ref_name_bounds_lower_value + yld[ref_name_bounds_upper] = ref_name_bounds_upper_value yield ii, yld def set_extrapolated_bounds(self): @@ -379,7 +399,8 @@ def write_to_netcdf_dataset(self, dataset, unlimited=False, bounds_dimension_nam :type dataset: :class:`netCDF4.Dataset` :param bool unlimited: If ``True``, create the dimension on the netCDF object with ``size=None``. See http://unidata.github.io/netcdf4-python/netCDF4.Dataset-class.html#createDimension. - :param str bounds_dimension_name: If ``None``, default to :attrs:`ocgis.constants.OCGIS_BOUNDS`. + :param str bounds_dimension_name: If ``None``, default to + :attr:`ocgis.interface.base.dimension.base.VectorDimension.name_bounds_dimension`. :param kwargs: Extra keyword arguments in addition to ``dimensions`` to pass to ``createVariable``. See http://unidata.github.io/netcdf4-python/netCDF4.Dataset-class.html#createVariable """ @@ -387,7 +408,7 @@ def write_to_netcdf_dataset(self, dataset, unlimited=False, bounds_dimension_nam if self.name is None: raise ValueError('Writing to netCDF requires a "name" be set to a string value. It is currently None.') - bounds_dimension_name = bounds_dimension_name or self.name_bounds_suffix + bounds_dimension_name = bounds_dimension_name or self.name_bounds_dimension if unlimited: size = None diff --git a/src/ocgis/interface/base/dimension/temporal.py b/src/ocgis/interface/base/dimension/temporal.py index 1f032df6c..5edcc09f6 100644 --- a/src/ocgis/interface/base/dimension/temporal.py +++ b/src/ocgis/interface/base/dimension/temporal.py @@ -35,8 +35,6 @@ def __init__(self, *args, **kwargs): self.format_time = kwargs.pop('format_time', True) kwargs['axis'] = kwargs.get('axis') or 'T' - kwargs['name'] = kwargs.get('name') or 'time' - kwargs['name_uid'] = kwargs.get('name_uid') or 'tid' kwargs['units'] = kwargs.get('units') or constants.DEFAULT_TEMPORAL_UNITS super(TemporalDimension, self).__init__(*args, **kwargs) diff --git a/src/ocgis/interface/base/field.py b/src/ocgis/interface/base/field.py index a1da1badc..0e4d6f851 100644 --- a/src/ocgis/interface/base/field.py +++ b/src/ocgis/interface/base/field.py @@ -4,14 +4,17 @@ import itertools import numpy as np +import fiona +from shapely.geometry import mapping from shapely.ops import cascaded_union from shapely.geometry.multipoint import MultiPoint from shapely.geometry.multipolygon import MultiPolygon from shapely.geometry.point import Point +from ocgis.constants import NAME_DIMENSION_REALIZATION, NAME_DIMENSION_LEVEL, NAME_DIMENSION_TEMPORAL, \ + NAME_UID_DIMENSION_TEMPORAL, NAME_UID_DIMENSION_LEVEL, NAME_UID_DIMENSION_REALIZATION, NAME_UID_FIELD from ocgis.interface.base.attributes import Attributes -from ocgis.util.helpers import get_default_or_apply, get_none_or_slice, get_formatted_slice, get_reduced_slice, \ - set_name_attributes +from ocgis.util.helpers import get_default_or_apply, get_none_or_slice, get_formatted_slice, get_reduced_slice from ocgis.interface.base.variable import Variable, VariableCollection from ocgis import SpatialCollection @@ -41,7 +44,7 @@ class Field(Attributes): _variables = None def __init__(self, variables=None, realization=None, temporal=None, level=None, spatial=None, meta=None, uid=None, - name=None, regrid_destination=False, attrs=None): + name=None, regrid_destination=False, attrs=None, name_uid=NAME_UID_FIELD): if spatial is None: msg = 'At least "spatial" is required.' @@ -49,6 +52,7 @@ def __init__(self, variables=None, realization=None, temporal=None, level=None, Attributes.__init__(self, attrs=attrs) + self.name_uid = name_uid self.realization = realization self.temporal = temporal self.uid = uid @@ -68,8 +72,7 @@ def __init__(self, variables=None, realization=None, temporal=None, level=None, self._has_assigned_coordinate_system = False # set default names for the dimensions - name_mapping = {self.realization: 'realization', self.level: 'level'} - set_name_attributes(name_mapping) + self._set_default_names_uids_() def __iter__(self): raise NotImplementedError @@ -196,7 +199,11 @@ def _get_dimension_iterator_1d_(target): if attr is None: ret = [(0, {})] else: - ret = attr.get_iter() + if target == 'realization': + with_bounds = False + else: + with_bounds = True + ret = attr.get_iter(with_bounds=with_bounds) return ret is_masked = np.ma.is_masked @@ -256,11 +263,10 @@ def _get_dimension_iterator_1d_(target): iters.append(self.spatial.get_geom_iter()) for [(ridx, rlz), (tidx, t), (lidx, l), (sridx, scidx, geom, gid)] in itertools.product(*iters): - yld = {} + yld = OrderedDict() for element in [rlz, t, l]: yld.update(element) yld['geom'] = geom - yld[r_gid_name] = gid for variable_alias, variable in self.variables.iteritems(): ref_idx = variable.value[ridx, tidx, lidx, sridx, scidx] # determine if the data is masked @@ -372,6 +378,49 @@ def iter(self): yld[variable.alias] = value yield yld + def write_fiona(self, path=None, driver='ESRI Shapefile', melted=False, fobject=None): + """ + Write a ``fiona``-enabled format. This may go to a newly created location specified by ``path`` or an open + collection object set by ``fobject``. + + :param str path: Path to the location to write. + :param str driver: The ``fiona`` driver to use for writing. + :param bool melted: If ``True``, use a melted iterator. + :param fobject: The collection object to write to. This will overload ``path``. + :type fobject: :class:`fiona.collection.Collection` + """ + + build = True + geom_type = self.spatial.abstraction_geometry.geom_type + try: + for row in self.get_iter(melted=melted): + geom = row.pop('geom') + for k, v in row.iteritems(): + try: + row[k] = v.tolist() + except AttributeError: + continue + if build: + from ocgis.conv.fiona_ import FionaConverter + fproperties = OrderedDict() + fconvert = {} + for k, v in row.iteritems(): + ftype = FionaConverter.get_field_type(type(v)) + fproperties[k] = 'int' if ftype is None else ftype + if ftype == 'str': + fconvert[k] = str + if fobject is None: + schema = {'geometry': geom_type, 'properties': fproperties} + fobject = fiona.open(path, driver=driver, schema=schema, crs=self.spatial.crs.value, mode='w') + build = False + for k, v in fconvert.iteritems(): + row[k] = v(row[k]) + frow = {'properties': row, 'geometry': mapping(geom)} + fobject.write(frow) + finally: + if fobject is not None: + fobject.close() + def write_to_netcdf_dataset(self, dataset, file_only=False, **kwargs): """ Write the field object to an open netCDF dataset object. @@ -512,6 +561,20 @@ def _set_new_value_mask_(field,mask): ref = v[idx_r,idx_t,idx_l] ref.mask = ref_logical_or(ref.mask,mask) + def _set_default_names_uids_(self): + + def _set_(dim, name, name_uid): + dim = getattr(self, dim) + if dim is not None: + if dim._name_uid is None: + dim.name_uid = name_uid + if dim.name is None: + dim.name = name + + _set_('realization', NAME_DIMENSION_REALIZATION, NAME_UID_DIMENSION_REALIZATION) + _set_('temporal', NAME_DIMENSION_TEMPORAL, NAME_UID_DIMENSION_TEMPORAL) + _set_('level', NAME_DIMENSION_LEVEL, NAME_UID_DIMENSION_LEVEL) + class DerivedField(Field): diff --git a/src/ocgis/test/base.py b/src/ocgis/test/base.py index ab341a09e..4581ee2b7 100644 --- a/src/ocgis/test/base.py +++ b/src/ocgis/test/base.py @@ -2,7 +2,6 @@ import unittest import abc import tempfile -import datetime import subprocess import itertools import shutil @@ -10,9 +9,9 @@ import os from collections import OrderedDict import netCDF4 as nc - import numpy as np +import datetime from ocgis.api.collection import SpatialCollection from ocgis.interface.base.field import Field from ocgis.interface.base.dimension.spatial import SpatialGridDimension, SpatialDimension @@ -95,6 +94,10 @@ def assertDictEqual(self, d1, d2, msg=None): self.assertEqual(v, d2[k], msg=msg) self.assertEqual(set(d1.keys()), set(d2.keys())) + def assertIsInstances(self, obj, klasses): + for klass in klasses: + self.assertIsInstance(obj, klass) + def assertNumpyAll(self, arr1, arr2, check_fill_value_dtype=True, check_arr_dtype=True, check_arr_type=True): """ Asserts arrays are equal according to the test criteria. @@ -377,6 +380,15 @@ def get_netcdf_path_no_row_column(self): field.write_to_netcdf_dataset(ds) return path + def get_temporary_file_path(self, name): + """ + :param str name: The name to append to the current temporary output directory. + :returns: Temporary path in the current output directory. + :rtype: str + """ + + return os.path.join(self.current_dir_output, name) + def get_temporary_output_directory(self): """ :returns: A path to a temporary directory with an appropriate prefix. diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index 790edbdd6..b99561053 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -483,21 +483,10 @@ def test_get_field_without_row_column_vectors(self): self.assertNcEqual(path, path2, ignore_attributes={'foo': ['grid_mapping']}, ignore_variables=['latitude_longitude']) - def test_get_vector_dimension(self): - # test exception raised with no row and column - path = self.get_netcdf_path_no_row_column() - rd = RequestDataset(path) - driver = DriverNetcdf(rd) - k = 'row' - v = {'name_uid': 'yc_id', 'axis': 'Y', 'adds': {'interpolate_bounds': False}, 'name': 'yc', 'cls': VectorDimension} - source_metadata = rd.source_metadata - res = driver._get_vector_dimension_(k, v, source_metadata) - self.assertEqual(res['name'], 'yc') - - def test_get_name_bounds_suffix(self): + def test_get_name_bounds_dimension(self): rd = self.test_data.get_rd('cancm4_tas') source_metadata = rd.source_metadata - res = DriverNetcdf._get_name_bounds_suffix_(source_metadata) + res = DriverNetcdf._get_name_bounds_dimension_(source_metadata) self.assertEqual(res, 'bnds') # remove any mention of bounds from the dimension map and try again @@ -508,7 +497,7 @@ def test_get_name_bounds_suffix(self): # likely a nonetype if value is not None: raise - res = DriverNetcdf._get_name_bounds_suffix_(source_metadata) + res = DriverNetcdf._get_name_bounds_dimension_(source_metadata) self.assertIsNone(res) # now remove the bounds key completely @@ -518,9 +507,20 @@ def test_get_name_bounds_suffix(self): except AttributeError: if value is not None: raise - res = DriverNetcdf._get_name_bounds_suffix_(source_metadata) + res = DriverNetcdf._get_name_bounds_dimension_(source_metadata) self.assertIsNone(res) + def test_get_vector_dimension(self): + # test exception raised with no row and column + path = self.get_netcdf_path_no_row_column() + rd = RequestDataset(path) + driver = DriverNetcdf(rd) + k = 'row' + v = {'name_uid': 'yc_id', 'axis': 'Y', 'adds': {'interpolate_bounds': False}, 'name': 'yc', 'cls': VectorDimension} + source_metadata = rd.source_metadata + res = driver._get_vector_dimension_(k, v, source_metadata) + self.assertEqual(res['name'], 'yc') + def test_open(self): # test a multifile dataset where the variable does not appear in all datasets uri1 = self.test_data.get_uri('cancm4_tas') diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py index 8b4c011bb..c7e8c766c 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_base.py @@ -5,6 +5,7 @@ from cfunits.cfunits import Units +from ocgis.constants import OCGIS_BOUNDS from ocgis.interface.base.variable import AbstractSourcedVariable, AbstractValueVariable from ocgis import constants from ocgis.exc import EmptySubsetError, ResolutionError, BoundsAlreadyAvailableError @@ -100,26 +101,31 @@ def test_name_value(self): class TestVectorDimension(TestBase): def test_init(self): - self.assertEqual(VectorDimension.__bases__, (AbstractSourcedVariable, AbstractUidValueDimension)) - vd = VectorDimension(value=[4, 5]) + self.assertIsInstances(vd, (AbstractSourcedVariable, AbstractUidValueDimension)) self.assertIsInstance(vd.attrs, OrderedDict) self.assertIsNone(vd.name) self.assertIsNone(vd.name_value) self.assertEqual(vd.name_uid, 'None_uid') - self.assertEqual(vd.name_bounds, 'None_bounds') - self.assertEqual(vd.name_bounds_suffix, constants.OCGIS_BOUNDS) + self.assertEqual(vd.name_bounds, 'None_{0}'.format(OCGIS_BOUNDS)) self.assertIsNone(vd.axis) + self.assertEqual(vd.name_bounds_dimension, OCGIS_BOUNDS) # test passing attributes to the constructor attrs = {'something': 'underground'} - vd = VectorDimension(value=[4, 5], attrs=attrs, axis='D') + vd = VectorDimension(value=[4, 5], attrs=attrs, axis='D', name_bounds_dimension='vds') self.assertEqual(vd.attrs, attrs) self.assertEqual(vd.axis, 'D') + self.assertEqual(vd.name_bounds_dimension, 'vds') + # empty dimensions are not allowed with self.assertRaises(ValueError): VectorDimension() + # test passing the name bounds + vd = VectorDimension(value=[5, 6], name_bounds='lat_bnds') + self.assertEqual(vd.name_bounds, 'lat_bnds') + def test_init_conform_units_to(self): target = np.array([4, 5, 6]) target_copy = target.copy() @@ -178,13 +184,18 @@ def test_get_iter(self): vdim = VectorDimension(value=[10, 20, 30, 40, 50], name='foo') tt = list(vdim.get_iter()) - self.assertEqual(tt[3], (3, {'foo_uid': 4, 'foo': 40, 'foo_bounds_lower': None, 'foo_bounds_upper': None})) + self.assertEqual(tt[3], (3, {'foo_uid': 4, 'foo': 40, 'lb_foo': None, 'ub_foo': None})) self.assertIsInstance(tt[0][1], OrderedDict) vdim = VectorDimension(value=[10, 20, 30, 40, 50], bounds=[(ii - 5, ii + 5) for ii in [10, 20, 30, 40, 50]], name='foo', name_uid='hi') tt = list(vdim.get_iter()) - self.assertEqual(tt[3], (3, {'hi': 4, 'foo': 40, 'foo_bounds_lower': 35, 'foo_bounds_upper': 45})) + self.assertEqual(tt[3], (3, {'hi': 4, 'foo': 40, 'lb_foo': 35, 'ub_foo': 45})) + + vdim = VectorDimension(value=[4, 5, 6, 7, 8, 9, 10], name='new') + for slc, row in vdim.get_iter(with_bounds=False): + for k in row.iterkeys(): + self.assertFalse(OCGIS_BOUNDS in k) def test_interpolate_bounds(self): value = [10, 20, 30, 40, 50] @@ -212,14 +223,30 @@ def test_name_bounds(self): self.assertEqual(vd.name_bounds, 'hello_bounds') self.assertIsNone(vd._name_bounds) - vd = VectorDimension(value=[5, 6], name='hello', name_bounds_suffix='suffit') - self.assertEqual(vd.name_bounds, 'hello_suffit') - vd = VectorDimension(value=[5, 6], name_bounds='hello') self.assertEqual(vd.name_bounds, 'hello') self.assertEqual(vd._name_bounds, 'hello') self.assertIsNone(vd.name) + vd = VectorDimension(value=[5, 6], name_bounds='hello', name='hi') + self.assertEqual(vd.name_bounds, 'hello') + self.assertEqual(vd._name_bounds, 'hello') + self.assertEqual(vd.name, 'hi') + + vd = VectorDimension(value=[5, 6], name='hello', name_bounds_dimension='whatever') + self.assertEqual(vd.name_bounds, 'hello_whatever') + + def test_name_bounds_tuple(self): + vd = VectorDimension(value=[4, 5]) + self.assertEqual(vd.name_bounds_tuple, ('lb_None', 'ub_None')) + self.assertIsNone(vd._name_bounds_tuple) + + vd = VectorDimension(value=[4, 5], name='never') + self.assertEqual(vd.name_bounds_tuple, ('lb_never', 'ub_never')) + + vd = VectorDimension(value=[4, 5], name_bounds_tuple=('a', 'b')) + self.assertEqual(vd.name_bounds_tuple, ('a', 'b')) + def test_one_value(self): """Test passing a single value.""" @@ -323,7 +350,6 @@ def test_write_to_netcdf_dataset(self): unlimited=[False, True], kwargs=[{}, {'zlib': True}], bounds_dimension_name=[None, other_bounds_name], - name_bounds_suffix=[None, 'asuffix'], axis=[None, 'GG'], name=[None, 'temporal'], name_bounds=[None, 'time_bounds'], @@ -336,7 +362,7 @@ def test_write_to_netcdf_dataset(self): else: attrs = None vd = VectorDimension(value=[2., 4.], attrs=attrs, name=k.name, name_bounds=k.name_bounds, - name_value=k.name_value, name_bounds_suffix=k.name_bounds_suffix, axis=k.axis) + name_value=k.name_value, axis=k.axis) if k.with_bounds: vd.set_extrapolated_bounds() with nc_scope(path, 'w') as ds: diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py index 1e3917624..3c4de6f8d 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_temporal.py @@ -135,8 +135,9 @@ def get_temporal_dimension(self, add_bounds=True, start=None, stop=None, days=1, def test_init(self): td = TemporalDimension(value=[datetime.datetime(2000, 1, 1)]) self.assertEqual(td.axis, 'T') - self.assertEqual(td.name, 'time') - self.assertEqual(td.name_uid, 'tid') + self.assertIsNone(td.name) + self.assertEqual(td.name_uid, 'None_uid') + self.assertIsNone(td._name_uid) self.assertEqual(td.calendar, constants.DEFAULT_TEMPORAL_CALENDAR) self.assertEqual(td.units, constants.DEFAULT_TEMPORAL_UNITS) self.assertIsInstance(td, VectorDimension) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py index c8beecc2a..9e8da2de2 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py @@ -4,13 +4,16 @@ from collections import OrderedDict import numpy as np +import fiona from shapely import wkt +from shapely.geometry import shape from shapely.ops import cascaded_union from datetime import datetime as dt import datetime -from ocgis import constants, SpatialCollection +from ocgis import constants, SpatialCollection, ShpCabinet from ocgis import RequestDataset +from ocgis.constants import NAME_UID_FIELD, NAME_UID_DIMENSION_LEVEL from ocgis.interface.base.attributes import Attributes from ocgis.interface.base.crs import WGS84, Spherical from ocgis.util.helpers import get_date_list, make_poly @@ -123,8 +126,9 @@ class TestField(AbstractTestField): def test_init(self): for b, wv in itertools.product([True, False], [True, False]): field = self.get_field(with_bounds=b, with_value=wv, with_dimension_names=False) + self.assertEqual(field.name_uid, NAME_UID_FIELD) self.assertEqual(field.level.name, 'level') - self.assertEqual(field.level.name_uid, 'level_uid') + self.assertEqual(field.level.name_uid, NAME_UID_DIMENSION_LEVEL) self.assertEqual(field.spatial.grid.row.name, 'yc') with self.assertRaises(NotImplementedError): list(field) @@ -254,12 +258,13 @@ def test_get_iter(self): rows = list(field.get_iter()) self.assertEqual(len(rows), 2 * 31 * 2 * 3 * 4) rows[100]['geom'] = rows[100]['geom'].bounds - real = {'realization_bounds_lower': None, 'vid': 1, 'time_bounds_upper': datetime.datetime(2000, 1, 6, 0, 0), - 'realization_bounds_upper': None, 'year': 2000, 'gid': 5, 'level_bounds_upper': 100, - 'realization_uid': 1, 'realization': 1, 'geom': (-100.5, 38.5, -99.5, 39.5), 'level_bounds_lower': 0, - 'variable': 'tmax', 'month': 1, 'time_bounds_lower': datetime.datetime(2000, 1, 5, 0, 0), 'day': 5, - 'level': 50, 'did': None, 'value': 0.32664490177209615, 'alias': 'tmax', 'level_uid': 1, + real = {'vid': 1, 'ub_time': datetime.datetime(2000, 1, 6, 0, 0), + 'year': 2000, 'gid': 5, 'ub_level': 100, + 'rid': 1, 'realization': 1, 'geom': (-100.5, 38.5, -99.5, 39.5), 'lb_level': 0, + 'variable': 'tmax', 'month': 1, 'lb_time': datetime.datetime(2000, 1, 5, 0, 0), 'day': 5, + 'level': 50, 'did': None, 'value': 0.32664490177209615, 'alias': 'tmax', 'lid': 1, 'time': datetime.datetime(2000, 1, 5, 12, 0), 'tid': 5, 'name': 'tmax'} + self.assertAsSetEqual(rows[100].keys(), real.keys()) for k, v in rows[100].iteritems(): self.assertEqual(real[k], v) self.assertEqual(set(real.keys()), set(rows[100].keys())) @@ -268,7 +273,7 @@ def test_get_iter(self): # test without names field = self.get_field(with_value=True, with_dimension_names=False) rows = list(field.get_iter()) - self.assertAsSetEqual(rows[10].keys(), ['vid', 'gid', 'month', 'year', 'alias', 'geom', 'realization', 'realization_uid', 'time_bounds_lower', 'level_bounds_upper', 'variable', 'day', 'realization_bounds_lower', 'name', 'level', 'did', 'level_bounds_lower', 'value', 'realization_bounds_upper', 'level_uid', 'time', 'tid', 'time_bounds_upper']) + self.assertAsSetEqual(rows[10].keys(), ['lid', 'name', 'vid', 'ub_time', 'did', 'lb_level', 'time', 'year', 'value', 'month', 'alias', 'tid', 'geom', 'ub_level', 'rlz', 'variable', 'gid', 'rid', 'level', 'lb_time', 'day']) # test not melted field = self.get_field(with_value=True) @@ -516,6 +521,88 @@ def test_variables(self): with self.assertRaises(ValueError): field.variables = 'foo' + def test_write_fiona(self): + + keywords = dict(with_realization=[True, False], + with_level=[True, False], + with_temporal=[True, False], + driver=['ESRI Shapefile', 'GeoJSON'], + melted=[False, True]) + + for ii, k in enumerate(self.iter_product_keywords(keywords)): + path = os.path.join(self.current_dir_output, '{0}'.format(ii)) + field = self.get_field(with_value=True, crs=WGS84(), with_dimension_names=False, + with_realization=k.with_realization, with_level=k.with_level, + with_temporal=k.with_temporal) + newvar = deepcopy(field.variables.first()) + newvar.alias = 'newvar' + newvar.value += 10 + field.variables.add_variable(newvar, assign_new_uid=True) + field = field[:, 0:2, :, 0:2, 0:2] + + field.write_fiona(path, driver=k.driver, melted=k.melted) + + with fiona.open(path) as source: + records = list(source) + + if k.melted: + dd = {a: [] for a in field.variables.keys()} + for r in records: + dd[r['properties']['alias']].append(r['properties']['value']) + for kk, v in dd.iteritems(): + self.assertAlmostEqual(np.mean(v), field.variables[kk].value.mean(), places=6) + else: + for alias in field.variables.keys(): + values = [r['properties'][alias] for r in records] + self.assertAlmostEqual(np.mean(values), field.variables[alias].value.mean(), places=6) + + n = reduce(lambda x, y: x*y, field.shape) + if k.melted: + n *= len(field.variables) + self.assertEqual(n, len(records)) + + # test with a point abstraction + field = self.get_field(with_value=True, crs=WGS84()) + field = field[0, 0, 0, 0, 0] + field.spatial.abstraction = 'point' + path = self.get_temporary_file_path('foo.shp') + field.write_fiona(path) + with fiona.open(path) as source: + gtype = source.meta['schema']['geometry'] + self.assertEqual(gtype, 'Point') + + # test with a fake object passed in as a fiona object. this should raise an exception as the method will attempt + # to use the object instead of creating a new collection. + + class DontHateMe(Exception): + pass + + class Nothing(object): + def close(self): + raise DontHateMe() + + with self.assertRaises(DontHateMe): + field.write_fiona(path, fobject=Nothing()) + + # test all geometries are accounted for as well as properties + path = ShpCabinet().get_shp_path('state_boundaries') + rd = RequestDataset(path) + field = rd.get() + out = self.get_temporary_file_path('foo.shp') + field.write_fiona(out) + + with fiona.open(out, 'r') as source: + for record in source: + target = shape(record['geometry']) + self.assertEqual(record['properties'].keys(), + [u'UGID', u'STATE_FIPS', u'ID', u'STATE_NAME', u'STATE_ABBR']) + found = False + for geom in field.spatial.abstraction_geometry.value.flat: + if target.almost_equals(geom): + found = True + break + self.assertTrue(found) + def test_write_to_netcdf_dataset(self): keywords = dict(file_only=[False, True], second_variable_alias=[None, 'tmin_alias'], diff --git a/src/ocgis/util/addict/__init__.py b/src/ocgis/util/addict/__init__.py new file mode 100644 index 000000000..6140b4a1a --- /dev/null +++ b/src/ocgis/util/addict/__init__.py @@ -0,0 +1,9 @@ +from .addict import Dict + + +__title__ = 'addict' +__version__ = '0.2.7' +__author__ = 'Mats Julian Olsen' +__license__ = 'MIT' +__copyright__ = 'Copyright 2014 Mats Julian Olsen' +__all__ = ['Dict'] diff --git a/src/ocgis/util/addict/addict.py b/src/ocgis/util/addict/addict.py new file mode 100644 index 000000000..927506655 --- /dev/null +++ b/src/ocgis/util/addict/addict.py @@ -0,0 +1,231 @@ +from inspect import isgenerator + + +class Dict(dict): + """ + Dict is a subclass of dict, which allows you to get AND SET(!!) + items in the dict using the attribute syntax! + + When you previously had to write: + + my_dict = {'a': {'b': {'c': [1, 2, 3]}}} + + you can now do the same simply by: + + my_Dict = Dict() + my_Dict.a.b.c = [1, 2, 3] + + Or for instance, if you'd like to add some additional stuff, + where you'd with the normal dict would write + + my_dict['a']['b']['d'] = [4, 5, 6], + + you may now do the AWESOME + + my_Dict.a.b.d = [4, 5, 6] + + instead. But hey, you can always use the same syntax as a regular dict, + however, this will not raise TypeErrors or AttributeErrors at any time + while you try to get an item. A lot like a defaultdict. + + """ + def __init__(self, *args, **kwargs): + """ + If we're initialized with a dict, make sure we turn all the + subdicts into Dicts as well. + + """ + for arg in args: + if not arg: + continue + elif isinstance(arg, dict): + for key, val in arg.items(): + self[key] = val + elif isinstance(arg, tuple) and (not isinstance(arg[0], tuple)): + self[arg[0]] = arg[1] + elif isinstance(arg, (list, tuple)) or isgenerator(arg): + for key, val in arg: + self[key] = val + else: + raise TypeError("Dict does not understand " + "{0} types".format(type(arg))) + + for key, val in kwargs.items(): + self[key] = val + + def __setattr__(self, name, value): + """ + setattr is called when the syntax a.b = 2 is used to set a value. + + """ + if hasattr(Dict, name): + raise AttributeError("'Dict' object attribute " + "'{0}' is read-only".format(name)) + else: + self[name] = value + + def __setitem__(self, name, value): + """ + This is called when trying to set a value of the Dict using []. + E.g. some_instance_of_Dict['b'] = val. If 'val + + """ + value = self._hook(value) + super(Dict, self).__setitem__(name, value) + + @classmethod + def _hook(cls, item): + """ + Called to ensure that each dict-instance that are being set + is a addict Dict. Recurses. + + """ + if isinstance(item, dict): + return cls(item) + elif isinstance(item, (list, tuple)): + return type(item)(cls._hook(elem) for elem in item) + return item + + def __getattr__(self, item): + return self.__getitem__(item) + + def __getitem__(self, name): + """ + This is called when the Dict is accessed by []. E.g. + some_instance_of_Dict['a']; + If the name is in the dict, we return it. Otherwise we set both + the attr and item to a new instance of Dict. + + """ + if name not in self: + self[name] = {} + return super(Dict, self).__getitem__(name) + + def __delattr__(self, name): + """ + Is invoked when del some_instance_of_Dict.b is called. + + """ + del self[name] + + def __dir__(self): + """ + Is invoked on a Dict instance causes __getitem__() to get invoked + which in this module will trigger the creation of the following + properties: `__members__` and `__methods__` + + To avoid these keys from being added, we simply return an explicit + call to dir for the Dict object + """ + return dir(Dict) + + def _ipython_display_(self): + print(str(self)) # pragma: no cover + + def _repr_html_(self): + return str(self) + + def get(self, k, default=None, protected=False): + """ + If ``protected`` is ``True``, check the key exist in the dictionary. If it does not, raise ``KeyError``. Behaves + like the superclass ``get`` method otherwise. + + >>> a = Dict() + >>> a['foo'] = 6 + >>> a.get('not_there') + {} + >>> a.get('not_there', protected=True) + KeyError + + """ + if protected and default is None: + if k not in self: + raise KeyError(k) + return super(Dict, self).get(k, default) + + def prune(self, prune_zero=False, prune_empty_list=True): + """ + Removes all empty Dicts and falsy stuff inside the Dict. + E.g + >>> a = Dict() + >>> a.b.c.d + {} + >>> a.a = 2 + >>> a + {'a': 2, 'b': {'c': {'d': {}}}} + >>> a.prune() + >>> a + {'a': 2} + + Set prune_zero=True to remove 0 values + E.g + >>> a = Dict() + >>> a.b.c.d = 0 + >>> a.prune(prune_zero=True) + >>> a + {} + + Set prune_empty_list=False to have them persist + E.g + >>> a = Dict({'a': []}) + >>> a.prune() + >>> a + {} + >>> a = Dict({'a': []}) + >>> a.prune(prune_empty_list=False) + >>> a + {'a': []} + """ + for key, val in list(self.items()): + if ((not val) and ((val != 0) or prune_zero) and + not isinstance(val, list)): + del self[key] + elif isinstance(val, Dict): + val.prune(prune_zero, prune_empty_list) + if not val: + del self[key] + elif isinstance(val, (list, tuple)): + new_iter = self._prune_iter(val, prune_zero, prune_empty_list) + if (not new_iter) and prune_empty_list: + del self[key] + else: + if isinstance(val, tuple): + new_iter = tuple(new_iter) + self[key] = new_iter + + @classmethod + def _prune_iter(cls, some_iter, prune_zero=False, prune_empty_list=True): + new_iter = [] + for item in some_iter: + if item == 0 and prune_zero: + continue + elif isinstance(item, Dict): + item.prune(prune_zero, prune_empty_list) + if item: + new_iter.append(item) + elif isinstance(item, (list, tuple)): + new_item = type(item)( + cls._prune_iter(item, prune_zero, prune_empty_list)) + if new_item or not prune_empty_list: + new_iter.append(new_item) + else: + new_iter.append(item) + return new_iter + + def to_dict(self): + """ + Recursively turn your addict Dicts into dicts. + + """ + base = dict() + for key, value in self.items(): + if isinstance(value, type(self)): + base[key] = value.to_dict() + elif isinstance(value, (list, tuple)): + base[key] = type(value)( + item.to_dict() if isinstance(item, type(self)) else + item for item in value) + else: + base[key] = value + return base + From dd817e118e191e5e3448b7040604bc9c136b5d3e Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 26 Jan 2015 11:22:34 -0700 Subject: [PATCH 62/71] support unmelted tabular output format #353 The default iteration method for creating tabular output formats is now unmelted. Variables aliases are each given their own column. An environment variable was added to allow the melted parameter to be globally set. Multiple request datasets still require a melted form which will be adjusted automatically with a warning. --- doc/api.rst | 23 +- src/ocgis/api/collection.py | 45 +- src/ocgis/api/interpreter.py | 171 +++--- src/ocgis/api/operations.py | 91 +-- src/ocgis/api/parms/definition.py | 559 ++++++++++-------- src/ocgis/api/request/driver/vector.py | 8 +- src/ocgis/api/subset.py | 144 ++--- src/ocgis/calc/base.py | 14 +- src/ocgis/calc/library/index/duration.py | 136 ++--- src/ocgis/conv/base.py | 55 +- src/ocgis/conv/csv_.py | 80 +-- src/ocgis/conv/fiona_.py | 94 +-- src/ocgis/interface/base/dimension/base.py | 118 ++-- src/ocgis/interface/base/dimension/spatial.py | 156 ++--- src/ocgis/interface/base/field.py | 105 +++- src/ocgis/interface/base/variable.py | 44 +- src/ocgis/test/base.py | 112 ++-- src/ocgis/test/test_misc/test_conversion.py | 41 +- .../test_ocgis/test_api/test_collection.py | 108 ++-- .../test_ocgis/test_api/test_interpreter.py | 27 +- .../test_ocgis/test_api/test_operations.py | 272 +++++---- .../test_api/test_parms/test_definition.py | 270 +++++---- .../test_request/test_driver/test_vector.py | 9 +- .../test/test_ocgis/test_api/test_subset.py | 169 +++++- .../test_library/test_index/test_duration.py | 114 ++-- .../test/test_ocgis/test_conv/test_base.py | 114 ++-- .../test/test_ocgis/test_conv/test_csv_.py | 54 ++ .../test/test_ocgis/test_conv/test_fiona_.py | 76 ++- .../test_interface/test_base/test_field.py | 461 +++++++++------ .../test_interface/test_base/test_variable.py | 39 +- .../test_ocgis/test_util/test_environment.py | 16 +- .../test_real_data/test_random_datasets.py | 6 +- src/ocgis/test/test_simple/test_simple.py | 194 +++--- src/ocgis/test/test_work.py | 34 ++ src/ocgis/util/environment.py | 131 ++-- src/ocgis/util/logging_ocgis.py | 92 ++- 36 files changed, 2503 insertions(+), 1679 deletions(-) create mode 100644 src/ocgis/test/test_work.py diff --git a/doc/api.rst b/doc/api.rst index 441190d64..4a535f258 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -6,8 +6,8 @@ Python API Additional information on keyword arguments can be found below the initial documentation in the `Detailed Argument Information`_ section. -:mod:`ocgis.env` -================ +Environment/Global Parameters +============================= These are global parameters used by OpenClimateGIS. For those familiar with :mod:`arcpy` programming, this behaves similarly to the :mod:`arcpy.env` module. Any :mod:`ocgis.env` variable be overloaded with system environment variables by setting `OCGIS_`. @@ -22,6 +22,9 @@ These are global parameters used by OpenClimateGIS. For those familiar with :mod :attr:`env.DIR_SHPCABINET` = Location of the shapefile directory for use by :class:`~ocgis.ShpCabinet`. +:attr:`env.MELTED` = ``False`` + If ``True``, use a melted tabular format with all variable values collected in a single column. + :attr:`env.OVERWRITE` = ``False`` .. warning:: Use with caution. @@ -39,11 +42,11 @@ These are global parameters used by OpenClimateGIS. For those familiar with :mod :attr:`env.VERBOSE` = ``False`` Indicate if additional output information should be printed to terminal. -:class:`ocgis.OcgOperations` -============================ +Operations API (:class:`ocgis.OcgOperations`) +============================================= .. autoclass:: ocgis.OcgOperations - :members: execute, get_base_request_size +:members: execute, get_base_request_size Detailed Argument Information ----------------------------- @@ -59,10 +62,10 @@ A ``dataset`` is the target file(s) or object(s) containing data to process. A ` 3. An OpenClimateGIS field object (use :class:`~Field` or :class:`~ocgis.RequestDatasetCollection`). If a :class:`~ocgis.Field` object is used, be aware operations may modify the object inplace. .. autoclass:: ocgis.RequestDataset - :members: inspect, inspect_as_dct +:members: inspect, inspect_as_dct .. autoclass:: ocgis.RequestDatasetCollection - :members: update +:members: update dir_output ~~~~~~~~~~ @@ -391,10 +394,10 @@ A dictionary with regridding options. Please see the documentation for :meth:`~o ========================= .. autoclass:: ocgis.ShpCabinet - :members: keys, iter_geoms +:members: keys, iter_geoms .. autoclass:: ocgis.ShpCabinetIterator - :members: __iter__ +:members: __iter__ Adding Additional Shapefile Data -------------------------------- @@ -409,7 +412,7 @@ The shapefile's "`geom key`_" is the name of the shapefile. It must have an alph ========================= .. autoclass:: ocgis.Inspect - :members: +:members: Data Collections ================ diff --git a/src/ocgis/api/collection.py b/src/ocgis/api/collection.py index 23135e58d..46d4be568 100644 --- a/src/ocgis/api/collection.py +++ b/src/ocgis/api/collection.py @@ -6,7 +6,6 @@ from shapely.geometry.base import BaseMultipartGeometry from ocgis.interface.base.crs import CFWGS84 -from ocgis import constants from ocgis.util.helpers import get_ordered_dicts_from_records_array @@ -92,7 +91,6 @@ def values(self): class SpatialCollection(AbstractCollection): - _default_headers = constants.HEADERS_RAW _multi_cast = {'Point': MultiPoint, 'Polygon': MultiPolygon} def __init__(self, meta=None, key=None, crs=None, headers=None, value_keys=None): @@ -101,7 +99,7 @@ def __init__(self, meta=None, key=None, crs=None, headers=None, value_keys=None) self.meta = meta self.key = key self.crs = crs or CFWGS84() - self.headers = headers or self._default_headers + self.headers = headers self.value_keys = value_keys self.geoms = OrderedDict() @@ -143,22 +141,30 @@ def add_field(self, ugid, geom, field, properties=None, name=None): assert (name not in self[ugid]) self[ugid].update({name: field}) - def get_iter_dict(self, use_upper_keys=False, conversion_map=None): - r_headers = self.headers - id_selection_geometry = constants.HEADERS.ID_SELECTION_GEOMETRY - default_geometry_key = constants.DEFAULT_GEOMETRY_KEY - use_conversion = False if conversion_map is None else True + def get_iter_dict(self, use_upper_keys=False, conversion_map=None, melted=False): + """ + :param bool use_upper_keys: If ``True``, capitalize all keys in the yielded data dictionary. + :param dict conversion_map: If present, keys correspond to headers with values being the type to convert to. + :param bool melted: If ``True``, yield in melted form with variables collected under the value header. + :returns: A generator yielding tuples. If headers on the collection are not ``None``, these headers will be used + to limit keys in the yielded data dictionary. + :rtype: tuple(:class:`shapely.geometry.base.BaseGeometry`, dict) + """ + + if conversion_map is None or melted is False: + use_conversion = False + else: + use_conversion = True for ugid, field_dict in self.iteritems(): + ugid = ugid if melted else None for field in field_dict.itervalues(): - for row in field.get_iter(value_keys=self.value_keys): - row[id_selection_geometry] = ugid - yld_row = {k: row.get(k) for k in r_headers} - if use_conversion: - for k, v in conversion_map.iteritems(): - yld_row[k] = v(yld_row[k]) - if use_upper_keys: - yld_row = {k.upper(): v for k, v in yld_row.iteritems()} - yield row[default_geometry_key], yld_row + for yld_geom, row in field.get_iter(value_keys=self.value_keys, melted=melted, + use_upper_keys=use_upper_keys, headers=self.headers, ugid=ugid): + if melted: + if use_conversion: + for k, v in conversion_map.iteritems(): + row[k] = v(row[k]) + yield yld_geom, row def get_iter_elements(self): for ugid, fields in self.iteritems(): @@ -192,7 +198,7 @@ def write_ugeom(self, path=None, driver='ESRI Shapefile', fobject=None): :type fobject: :class:`fiona.collection.Collection` """ - from ocgis.conv.fiona_ import FionaConverter + from ocgis.conv.fiona_ import AbstractFionaConverter build = True if fobject is None else False is_open = False @@ -217,7 +223,8 @@ def write_ugeom(self, path=None, driver='ESRI Shapefile', fobject=None): fiona_properties = OrderedDict() archetype_properties = self.properties[ugid] for name in archetype_properties.dtype.names: - fiona_properties[name] = FionaConverter.get_field_type(type(archetype_properties[name][0])) + fiona_properties[name] = AbstractFionaConverter.get_field_type( + type(archetype_properties[name][0])) fiona_schema = {'geometry': geometry, 'properties': fiona_properties} fiona_kwds = {'schema': fiona_schema, 'driver': driver, 'mode': 'w'} if self.crs is not None: diff --git a/src/ocgis/api/interpreter.py b/src/ocgis/api/interpreter.py index dc9f0f46d..3303eb3e8 100644 --- a/src/ocgis/api/interpreter.py +++ b/src/ocgis/api/interpreter.py @@ -1,54 +1,62 @@ -from ocgis.util.logging_ocgis import ocgis_lh, ProgressOcgOperations import logging +import os +import shutil + +from ocgis.util.logging_ocgis import ocgis_lh, ProgressOcgOperations from ocgis import exc, env from ocgis.conv.meta import MetaConverter from subset import SubsetOperation -import os -import shutil -from ocgis.conv.base import AbstractConverter +from ocgis.conv.base import AbstractConverter, AbstractTabularConverter class Interpreter(object): - '''Superclass for custom interpreter frameworks. - - ops :: OcgOperations''' - - def __init__(self,ops): + """Superclass for custom interpreter frameworks. + + :param ops: The input operations object to interpret. + :type ops: :class:`ocgis.OcgOperations` + """ + + def __init__(self, ops): self.ops = ops - + @classmethod - def get_interpreter(cls,ops): - '''Select interpreter class.''' - - imap = {'ocg':OcgInterpreter} + def get_interpreter(cls, ops): + """Select interpreter class.""" + + imap = {'ocg': OcgInterpreter} try: - return(imap[ops.backend](ops)) + return imap[ops.backend](ops) except KeyError: - raise(exc.InterpreterNotRecognized) - + raise exc.InterpreterNotRecognized + def check(self): - '''Validate operation definition dictionary.''' - raise(NotImplementedError) - + """Validate operation definition dictionary.""" + + raise NotImplementedError + def execute(self): - '''Run requested operations and return a path to the output file or a - NumPy-based output object depending on specification.''' - raise(NotImplementedError) + """ + Run requested operations and return a path to the output file or a NumPy-based output object depending on + specification. + """ + + raise NotImplementedError class OcgInterpreter(Interpreter): - '''The OCGIS interpreter and execution framework.''' - + """The OCGIS interpreter and execution framework.""" + def check(self): pass - + def execute(self): - ## check for a user-supplied output prefix + # check for a user-supplied output prefix prefix = self.ops.prefix - + # do directory management # - # flag to indicate a directory is made. mostly a precaution to make sure the appropriate directory is is removed. + # flag to indicate a directory is made. mostly a precaution to make sure the appropriate directory is is + # removed. made_output_directory = False if self.ops.output_format in ['numpy', 'esmpy', 'meta']: @@ -58,12 +66,12 @@ def execute(self): # directories or a single output file(s) is created for the other cases if self.ops.add_auxiliary_files: # auxiliary files require that a directory be created - outdir = os.path.join(self.ops.dir_output,prefix) + outdir = os.path.join(self.ops.dir_output, prefix) if os.path.exists(outdir): if env.OVERWRITE: shutil.rmtree(outdir) else: - raise(IOError('The output directory exists but env.OVERWRITE is False: {0}'.format(outdir))) + raise (IOError('The output directory exists but env.OVERWRITE is False: {0}'.format(outdir))) os.mkdir(outdir) # on an exception, the output directory needs to be removed made_output_directory = True @@ -72,74 +80,69 @@ def execute(self): outdir = self.ops.dir_output try: - ## configure logging ################################################### - - ## if file logging is enable, perform some logic based on the operational - ## parameters. - if env.ENABLE_FILE_LOGGING and self.ops.add_auxiliary_files == True: + # # configure logging ################################################### + + # if file logging is enable, perform some logic based on the operational parameters. + if env.ENABLE_FILE_LOGGING and self.ops.add_auxiliary_files is True: if self.ops.output_format in ['numpy', 'esmpy', 'meta']: to_file = None else: - to_file = os.path.join(outdir, prefix+'.log') + to_file = os.path.join(outdir, prefix + '.log') else: to_file = None - - ## flags to determine streaming to console + + # flags to determine streaming to console if env.VERBOSE: to_stream = True else: to_stream = False - - ## configure the logger + + # configure the logger if env.DEBUG: level = logging.DEBUG else: level = logging.INFO - ## this wraps the callback function with methods to capture the - ## completion of major operations. + # this wraps the callback function with methods to capture the completion of major operations. progress = ProgressOcgOperations(callback=self.ops.callback) - ocgis_lh.configure(to_file=to_file,to_stream=to_stream,level=level, - callback=progress,callback_level=level) - - ## create local logger + ocgis_lh.configure(to_file=to_file, to_stream=to_stream, level=level, callback=progress, + callback_level=level) + + # create local logger interpreter_log = ocgis_lh.get_logger('interpreter') - - ocgis_lh('Initializing...',interpreter_log) - - ## set up environment ############################################## - - self.check() ## run validation - doesn't do much now - - ## do not perform vector wrapping for NetCDF output + + ocgis_lh('Initializing...', interpreter_log) + + # # set up environment ############################################## + + # run validation - doesn't do much now + self.check() + + # do not perform vector wrapping for NetCDF output if self.ops.output_format == 'nc': ocgis_lh('"vector_wrap" set to False for netCDF output', - interpreter_log,level=logging.WARN) + interpreter_log, level=logging.WARN) self.ops.vector_wrap = False - - ## if the requested output format is "meta" then no operations are run - ## and only the operations dictionary is required to generate output. + + # if the requested output format is "meta" then no operations are run and only the operations dictionary is + # required to generate output. if self.ops.output_format == 'meta': ret = MetaConverter(self.ops).write() - ## this is the standard request for other output types. + # this is the standard request for other output types. else: - ## the operations object performs subsetting and calculations - ocgis_lh('initializing subset',interpreter_log,level=logging.DEBUG) - so = SubsetOperation(self.ops,progress=progress) - ## if there is no grouping on the output files, a singe converter is - ## is needed + # the operations object performs subsetting and calculations + ocgis_lh('initializing subset', interpreter_log, level=logging.DEBUG) + so = SubsetOperation(self.ops, progress=progress) + # if there is no grouping on the output files, a singe converter is is needed if self.ops.output_grouping is None: - Conv = AbstractConverter.get_converter(self.ops.output_format) - ocgis_lh('initializing converter',interpreter_log, - level=logging.DEBUG) - conv = Conv(so,outdir,prefix,ops=self.ops,add_auxiliary_files=self.ops.add_auxiliary_files, - overwrite=env.OVERWRITE) - ocgis_lh('starting converter write loop: {0}'.format(self.ops.output_format),interpreter_log, + ocgis_lh('initializing converter', interpreter_log, level=logging.DEBUG) + conv = self._get_converter_(outdir, prefix, so) + ocgis_lh('starting converter write loop: {0}'.format(self.ops.output_format), interpreter_log, level=logging.DEBUG) ret = conv.write() else: - raise(NotImplementedError) - - ocgis_lh('Operations successful.'.format(self.ops.prefix),interpreter_log) + raise NotImplementedError + + ocgis_lh('Operations successful.'.format(self.ops.prefix), interpreter_log) return ret except: @@ -149,5 +152,23 @@ def execute(self): shutil.rmtree(outdir) raise finally: - ## shut down logging + # shut down logging ocgis_lh.shutdown() + + def _get_converter_(self, outdir, prefix, so): + """ + :param str outdir: The output directory to contain converted files. + :param str prefix: The file prefix for the file outputs. + :param so: The subset operation object doing all the work. + :type so: :class:`ocgis.api.subset.SubsetOperation` + :returns: A converter object. + :rtype: :class:`ocgis.conv.base.AbstractConverter` + """ + + conv_klass = AbstractConverter.get_converter(self.ops.output_format) + kwargs = dict(outdir=outdir, prefix=prefix, ops=self.ops, add_auxiliary_files=self.ops.add_auxiliary_files, + overwrite=env.OVERWRITE) + if issubclass(conv_klass, AbstractTabularConverter): + kwargs['melted'] = self.ops.melted + conv = conv_klass(so, **kwargs) + return conv diff --git a/src/ocgis/api/operations.py b/src/ocgis/api/operations.py index 7be2f9d35..2524bd05f 100644 --- a/src/ocgis/api/operations.py +++ b/src/ocgis/api/operations.py @@ -1,4 +1,3 @@ -from ocgis.conv.base import AbstractConverter from ocgis.api.parms.definition import * from ocgis.api.interpreter import OcgInterpreter from ocgis import env @@ -114,19 +113,23 @@ class OcgOperations(object): :param dict regrid_options: Overload the default keywords for regridding. Dictionary elements must map to the names of keyword arguments for :meth:`~ocgis.regrid.base.iter_regridded_fields`. If this is left as ``None``, then the default keyword values are used. Please see :ref:`esmpy-regridding` for an overview. + :param bool melted: If ``None``, default to :attr:`ocgis.env.MELTED`. If ``False`` (the default), variable names are + individual columns in tabular output formats (i.e. ``'csv'``). If ``True``, all variable values will be collected + under a single value column. """ - + def __init__(self, dataset=None, spatial_operation='intersects', geom=None, aggregate=False, calc=None, calc_grouping=None, calc_raw=False, abstraction=None, snippet=False, backend='ocg', prefix=None, output_format='numpy', agg_selection=False, select_ugid=None, vector_wrap=True, allow_empty=False, dir_output=None, slice=None, file_only=False, headers=None, format_time=True, calc_sample_size=False, search_radius_mult=2.0, output_crs=None, interpolate_spatial_bounds=False, add_auxiliary_files=True, optimizations=None, callback=None, time_range=None, time_region=None, level_range=None, - conform_units_to=None, select_nearest=False, regrid_destination=None, regrid_options=None): - + conform_units_to=None, select_nearest=False, regrid_destination=None, regrid_options=None, + melted=False): + # tells "__setattr__" to not perform global validation until all values are set initially self._is_init = True - + self.dataset = Dataset(dataset) self.spatial_operation = SpatialOperation(spatial_operation) self.aggregate = Aggregate(aggregate) @@ -162,15 +165,17 @@ def __init__(self, dataset=None, spatial_operation='intersects', geom=None, aggr self.select_nearest = SelectNearest(select_nearest) self.regrid_destination = RegridDestination(init_value=regrid_destination, dataset=self._get_object_('dataset')) self.regrid_options = RegridOptions(regrid_options) - + self.melted = Melted(init_value=env.MELTED or melted, dataset=self._get_object_('dataset'), + output_format=self._get_object_('output_format')) + # these values are left in to perhaps be added back in at a later date. self.output_grouping = None - + # Initial values have been set and global validation should now occur when any parameters are updated. self._is_init = False self._update_dependents_() self._validate_() - + def __str__(self): msg = ['{0}('.format(self.__class__.__name__)] for key, value in self.as_dict().iteritems(): @@ -179,16 +184,16 @@ def __str__(self): msg.append('{0}, '.format(self._get_object_(key))) msg.append(')') msg = ''.join(msg) - return(msg) - + return msg + def __getattribute__(self, name): attr = object.__getattribute__(self, name) if isinstance(attr, OcgParameter): ret = attr.value else: ret = attr - return(ret) - + return ret + def __setattr__(self, name, value): if isinstance(value, OcgParameter): object.__setattr__(self, name, value) @@ -201,17 +206,17 @@ def __setattr__(self, name, value): if self._is_init is False: self._update_dependents_() self._validate_() - + def get_base_request_size(self): - ''' + """ Return the estimated request size in kilobytes. This is the estimated size of the requested data not the returned data product. - + :returns: Dictionary with keys ``'total'`` and ``'variables'``. The ``'variables'`` key maps a variable's alias to its estimated value and dimension sizes, shapes, and data types. :return type: dict - + >>> ret = ops.get_base_request_size() {'total':555., 'variables':{ @@ -225,7 +230,7 @@ def get_base_request_size(self): } } } - ''' + """ if self.regrid_destination is not None: msg = 'Base request size not supported with a regrid destination.' @@ -272,72 +277,72 @@ def _get_zero_or_kb_(dimension): total += float(v3['kb']) ret['total'] = total return ret - + def get_meta(self): meta_converter = MetaConverter(self) rows = meta_converter.get_rows() - return('\n'.join(rows)) - + return '\n'.join(rows) + @classmethod def parse_query(cls, query): parms = [SpatialOperation, Geom, Aggregate, Calc, CalcGrouping, CalcRaw, Abstraction, Snippet, Backend, Prefix, OutputFormat, AggregateSelection, SelectUgid, VectorWrap, AllowEmpty] - + kwds = {} ds = Dataset.parse_query(query) - kwds.update({ds.name:ds.value}) - + kwds.update({ds.name: ds.value}) + for parm in parms: obj = parm() obj.parse_query(query) - kwds.update({obj.name:obj.value}) - + kwds.update({obj.name: obj.value}) + ops = OcgOperations(**kwds) - return(ops) - + return ops + def as_dict(self): - """:rtype: dictionary""" + """:rtype: dict""" + ret = {} for value in self.__dict__.itervalues(): try: - ret.update({value.name:value.value}) + ret.update({value.name: value.value}) except AttributeError: pass - return(ret) - + return ret + def execute(self): """Execute the request using the selected backend. :rtype: Path to an output file/folder or dictionary composed of :class:`ocgis.api.collection.AbstractCollection` objects. """ interp = OcgInterpreter(self) - return(interp.execute()) - + return interp.execute() + def _get_object_(self, name): - return(object.__getattribute__(self, name)) - + return object.__getattribute__(self, name) + def _update_dependents_(self): - ## the select_ugid parameter must always connect to the geometry selection + # the select_ugid parameter must always connect to the geometry selection geom = self._get_object_('geom') svalue = self._get_object_('select_ugid')._value geom.select_ugid = svalue - ## time and/or level subsets must be applied to the request datasets - ## individually. if they are not none. + # time and/or level subsets must be applied to the request datasets individually. if they are not none. for attr in ['time_range', 'time_region', 'level_range']: if getattr(self, attr) is not None: for rd in self.dataset.itervalues(): setattr(rd, attr, getattr(self, attr)) - ## unit conforms are tied to request dataset objects + # unit conforms are tied to request dataset objects if self.conform_units_to is not None: for rd in self.dataset.itervalues(): try: rd.conform_units_to = self.conform_units_to except ValueError as e: msg = '"{0}: {1}"'.format(e.__class__.__name__, e.message) - raise(DefinitionValidationError(Dataset, msg)) + raise (DefinitionValidationError(Dataset, msg)) def _validate_(self): ocgis_lh(logger='operations', msg='validating operations') @@ -383,7 +388,7 @@ def _raise_(msg, obj=OutputFormat): _raise_(msg, obj=OutputCRS) if self.aggregate or self.spatial_operation == 'clip': msg = ( - '{0} data if clipped or spatially averaged must be written to ' '{1}. The "output_crs" is being updated to {2}.').format( + '{0} data if clipped or spatially averaged must be written to ' '{1}. The "output_crs" is being updated to {2}.').format( CFRotatedPole.__name__, CFWGS84.__name__, CFWGS84.__name__) ocgis_lh(level=logging.WARN, msg=msg, logger='operations') self._get_object_('output_crs')._value = CFWGS84() @@ -399,7 +404,9 @@ def _raise_(msg, obj=OutputFormat): # snippet only relevant for subsetting not operations with a calculation or time region if self.snippet: if self.calc is not None: - _raise_('Snippets are not implemented for calculations. Apply a limiting time range for faster responses.',obj=Snippet) + _raise_( + 'Snippets are not implemented for calculations. Apply a limiting time range for faster responses.', + obj=Snippet) for rd in self.dataset.iter_request_datasets(): if rd.time_region is not None: _raise_('Snippets are not implemented for time regions.', obj=Snippet) diff --git a/src/ocgis/api/parms/definition.py b/src/ocgis/api/parms/definition.py index 7d897b458..97c3b0d9f 100644 --- a/src/ocgis/api/parms/definition.py +++ b/src/ocgis/api/parms/definition.py @@ -14,6 +14,7 @@ from shapely.geometry.multipolygon import MultiPolygon from shapely.geometry.point import Point +from ocgis.conv.base import AbstractConverter, AbstractTabularConverter import datetime from ocgis.api.parms import base from ocgis.exc import DefinitionValidationError @@ -32,17 +33,17 @@ class Abstraction(base.StringOptionParameter): name = 'abstraction' default = None - valid = ('point','polygon') + valid = ('point', 'polygon') nullable = True - + def _get_meta_(self): if self.value is None: msg = 'Highest order geometry available used for spatial output.' else: msg = 'Spatial dimension abstracted to {0}.'.format(self.value) - return(msg) - - + return (msg) + + class AddAuxiliaryFiles(base.BooleanParameter): name = 'add_auxiliary_files' default = True @@ -56,7 +57,7 @@ class AllowEmpty(base.BooleanParameter): meta_true = 'Empty returns are allowed. Selection geometries not overlapping with dataset geometries are excluded from a return. Empty output data may results for absolutely no overlap.' meta_false = 'Empty returns NOT allowed. If a selection geometry has no intersecting geometries from the target dataset, an exception is raised.' - + class Aggregate(base.BooleanParameter): name = 'aggregate' default = False @@ -65,8 +66,8 @@ class Aggregate(base.BooleanParameter): 'spatial operation. Weights are normalized using the maximum area ' 'of the geometry set.') meta_false = 'Selected geometries are not aggregated (unioned).' - - + + class AggregateSelection(base.BooleanParameter): name = 'agg_selection' default = False @@ -78,48 +79,48 @@ class Backend(base.StringOptionParameter): name = 'backend' default = 'ocg' valid = ('ocg',) - + def _get_meta_(self): if self.value == 'ocg': ret = 'OpenClimateGIS backend used for processing.' else: - raise(NotImplementedError) - return(ret) - - + raise (NotImplementedError) + return (ret) + + class Callback(base.OcgParameter): input_types = [FunctionType] name = 'callback' nullable = True default = None return_type = [FunctionType] - + def _get_meta_(self): if self.value is None: msg = 'No callback function provided.' else: msg = 'Callback enabled.' - return(msg) - - -class Calc(base.IterableParameter,base.OcgParameter): + return (msg) + + +class Calc(base.IterableParameter, base.OcgParameter): name = 'calc' default = None nullable = True - input_types = [list,tuple] + input_types = [list, tuple] return_type = [list] - element_type = [dict,str] + element_type = [dict, str] unique = False - _possible = ['es=tas+4',['es=tas+4'],[{'func':'mean','name':'mean'}]] + _possible = ['es=tas+4', ['es=tas+4'], [{'func': 'mean', 'name': 'mean'}]] _required_keys = set(['ref', 'meta_attrs', 'name', 'func', 'kwds']) - - def __init__(self,*args,**kwargs): - ## this flag is used by the parser to determine if an eval function has - ## been passed. very simple test for this...if there is an equals sign + + def __init__(self, *args, **kwargs): + # # this flag is used by the parser to determine if an eval function has + # # been passed. very simple test for this...if there is an equals sign ## in the string then it is considered an eval function self._is_eval_function = False - base.OcgParameter.__init__(self,*args,**kwargs) - + base.OcgParameter.__init__(self, *args, **kwargs) + def __str__(self): if self.value is None: ret = base.OcgParameter.__str__(self) @@ -132,25 +133,27 @@ def __str__(self): ii['kwds'][k] = type(v) ret = '{0}={1}'.format(self.name, cb) return ret - + def get_url_string(self): - raise(NotImplementedError) -# if self.value is None: -# ret = 'none' -# else: -# elements = [] -# for element in self.value: -# strings = [] -# template = '{0}~{1}' -# if element['ref'] != library.SampleSize: -# strings.append(template.format(element['func'],element['name'])) -# for k,v in element['kwds'].iteritems(): -# strings.append(template.format(k,v)) -# if len(strings) > 0: -# elements.append('!'.join(strings)) -# ret = '|'.join(elements) -# return(ret) - + raise (NotImplementedError) + + # if self.value is None: + + # ret = 'none' + # else: + # elements = [] + # for element in self.value: + # strings = [] + # template = '{0}~{1}' + # if element['ref'] != library.SampleSize: + # strings.append(template.format(element['func'],element['name'])) + # for k,v in element['kwds'].iteritems(): + # strings.append(template.format(k,v)) + # if len(strings) > 0: + # elements.append('!'.join(strings)) + # ret = '|'.join(elements) + # return(ret) + def _get_meta_(self): if self.value is None: ret = 'No computations applied.' @@ -160,10 +163,10 @@ def _get_meta_(self): else: ret = ['The following computations were applied:'] for ii in self.value: - ret.append('{0}: {1}'.format(ii['name'],ii['ref'].description)) - return(ret) - - def _parse_(self,value): + ret.append('{0}: {1}'.format(ii['name'], ii['ref'].description)) + return (ret) + + def _parse_(self, value): # test if the value is an eval function and set internal flag if '=' in value: self._is_eval_function = True @@ -200,13 +203,13 @@ def _parse_(self,value): ## if it is not an eval function, then do the standard argument parsing else: fr = register.FunctionRegistry() - + ## get the function key string form the calculation definition dictionary function_key = value['func'] ## this is the message for the DefinitionValidationError if this key ## may not be found. dve_msg = 'The function key "{0}" is not available in the function registry.'.format(function_key) - + ## retrieve the calculation class reference from the function registry try: value['ref'] = fr[function_key] @@ -218,13 +221,13 @@ def _parse_(self,value): if function_key.startswith('{0}_'.format(constants.ICCLIM_PREFIX_FUNCTION_KEY)): register.register_icclim(fr) else: - raise(DefinitionValidationError(self,dve_msg)) + raise (DefinitionValidationError(self, dve_msg)) ## make another attempt to register the function try: value['ref'] = fr[function_key] except KeyError: - raise(DefinitionValidationError(self,dve_msg)) - + raise (DefinitionValidationError(self, dve_msg)) + ## parameters will be set to empty if none are present in the calculation ## dictionary. if 'kwds' not in value: @@ -232,7 +235,7 @@ def _parse_(self,value): ## make the keyword parameter definitions lowercase. else: value['kwds'] = OrderedDict(value['kwds']) - for k,v in value['kwds'].iteritems(): + for k, v in value['kwds'].iteritems(): try: value['kwds'][k] = v.lower() except AttributeError: @@ -243,135 +246,141 @@ def _parse_(self,value): value['meta_attrs'] = None return value - - def _parse_string_(self,value): + + def _parse_string_(self, value): try: - key,uname = value.split('~',1) + key, uname = value.split('~', 1) try: - uname,kwds_raw = uname.split('!',1) + uname, kwds_raw = uname.split('!', 1) kwds_raw = kwds_raw.split('!') kwds = OrderedDict() for kwd in kwds_raw: - kwd_name,kwd_value = kwd.split('~') + kwd_name, kwd_value = kwd.split('~') try: - kwds.update({kwd_name:float(kwd_value)}) + kwds.update({kwd_name: float(kwd_value)}) except ValueError: - kwds.update({kwd_name:str(kwd_value)}) + kwds.update({kwd_name: str(kwd_value)}) except ValueError: kwds = OrderedDict() - ret = {'func':key,'name':uname,'kwds':kwds} + ret = {'func': key, 'name': uname, 'kwds': kwds} except ValueError: ## likely a string to use for an eval function if '=' not in value: msg = 'String may not be parsed: "{0}".'.format(value) - raise(DefinitionValidationError(self,msg)) + raise (DefinitionValidationError(self, msg)) else: self._is_eval_function = True ret = value - - return(ret) - - def _validate_(self,value): + + return (ret) + + def _validate_(self, value): if not self._is_eval_function: # get the aliases of the calculations aliases = [ii['name'] for ii in value] if len(aliases) != len(set(aliases)): - raise(DefinitionValidationError(self,'User-provided calculation aliases must be unique: {0}'.format(aliases))) + raise (DefinitionValidationError(self, 'User-provided calculation aliases must be unique: {0}'.format( + aliases))) for v in value: if set(v.keys()) != self._required_keys: msg = 'Required keys are: {0}'.format(self._required_keys) - raise(DefinitionValidationError(self, msg)) + raise (DefinitionValidationError(self, msg)) + - -class CalcGrouping(base.IterableParameter,base.OcgParameter): +class CalcGrouping(base.IterableParameter, base.OcgParameter): name = 'calc_grouping' nullable = True - input_types = [list,tuple] + input_types = [list, tuple] return_type = tuple default = None - element_type = [str,list] + element_type = [str, list] unique = True - _flags = ('unique','year') - _standard_groups = ('day','month','year') - + _flags = ('unique', 'year') + _standard_groups = ('day', 'month', 'year') + @classmethod def iter_possible(cls): - standard_seasons = [[3,4,5],[6,7,8],[9,10,11],[12,1,2]] - for r in [1,2,3]: - for combo in itertools.combinations(cls._standard_groups,r): - yield(combo) + standard_seasons = [[3, 4, 5], [6, 7, 8], [9, 10, 11], [12, 1, 2]] + for r in [1, 2, 3]: + for combo in itertools.combinations(cls._standard_groups, r): + yield (combo) for one in ['all']: - yield(one) + yield (one) flags = list(cls._flags) + [None] for flag in flags: if flag is not None: yld = deepcopy(standard_seasons) - yld.insert(0,flag) + yld.insert(0, flag) else: yld = standard_seasons - yield(yld) - - def parse(self,value): + yield (yld) + + def parse(self, value): try: - ## not interested in looking for unique letters in the "_flags" + # # not interested in looking for unique letters in the "_flags" parse_value = list(deepcopy(value)) - ## if we do remove a flag, be sure and append it back + # # if we do remove a flag, be sure and append it back add_back = None for flag in self._flags: if flag in parse_value: parse_value.remove(flag) add_back = flag ## call superclass method to parse the value for iteration - ret = base.IterableParameter.parse(self,parse_value,check_basestrings=False) + ret = base.IterableParameter.parse(self, parse_value, check_basestrings=False) ## add the value back if it has been set if add_back is not None: ret.append(add_back) - ## value is likely a NoneType + # # value is likely a NoneType except TypeError as e: if value is None: ret = None else: - raise(e) - return(ret) - + raise (e) + return (ret) + def finalize(self): if self._value == ('all',): self._value = 'all' - + def _get_meta_(self): if self.value is None: msg = 'No temporal aggregation applied.' else: msg = 'Temporal aggregation determined by the following group(s): {0}'.format(self.value) - return(msg) - - def _validate_(self,value): - ## the 'all' parameter will be reduced to a string eventually + return (msg) + + def _validate_(self, value): + # # the 'all' parameter will be reduced to a string eventually if len(value) == 1 and value[0] == 'all': pass else: try: for val in value: if val not in self._standard_groups: - raise(DefinitionValidationError(self,'"{0}" is not a valid temporal group or is currently not supported. Supported groupings are combinations of day, month, and year.'.format(val))) - ## the grouping may not be a date part but a seasonal aggregation + raise (DefinitionValidationError(self, + '"{0}" is not a valid temporal group or is currently not supported. Supported groupings are combinations of day, month, and year.'.format( + val))) + # # the grouping may not be a date part but a seasonal aggregation except DefinitionValidationError: - months = range(1,13) + months = range(1, 13) for element in value: ## the keyword year and unique are okay for seasonal aggregations if element in self._flags: continue - elif isinstance(element,basestring): + elif isinstance(element, basestring): if element not in self._flags: - raise(DefinitionValidationError(self,'Seasonal flag not recognized: "{0}".'.format(element))) + raise ( + DefinitionValidationError(self, 'Seasonal flag not recognized: "{0}".'.format(element))) else: for month in element: if month not in months: - raise(DefinitionValidationError(self,'Month integer value is not recognized: {0}'.format(month))) + raise (DefinitionValidationError(self, + 'Month integer value is not recognized: {0}'.format( + month))) + - class CalcRaw(base.BooleanParameter): name = 'calc_raw' default = False @@ -394,9 +403,10 @@ class ConformUnitsTo(base.OcgParameter): input_types = [] def __init__(self, init_value=None): - ## cfunits is an optional installation. account for this on the import types. + # # cfunits is an optional installation. account for this on the import types. try: from cfunits import Units + self.input_types.append(Units) self.return_type = [self.return_type] + [Units] except ImportError: @@ -408,7 +418,7 @@ def _get_meta_(self): ret = 'Units were not conformed.' else: ret = 'Units of all requested datasets were conformed to: "{0}".'.format(self.value) - return(ret) + return (ret) class Dataset(base.OcgParameter): @@ -438,6 +448,7 @@ def __init__(self, init_value): else: if isinstance(init_value, ESMF.Field): from ocgis.regrid.base import get_ocgis_field_from_esmpy_field + field = get_ocgis_field_from_esmpy_field(init_value) itr = [field] should_raise = False @@ -452,15 +463,15 @@ def __init__(self, init_value): else: init_value = init_value super(Dataset, self).__init__(init_value) - - def parse_string(self,value): + + def parse_string(self, value): lowered = value.strip() if lowered == 'none': ret = None else: ret = self._parse_string_(lowered) - return(ret) - + return ret + def get_meta(self): try: ret = self.value._get_meta_rows_() @@ -468,14 +479,14 @@ def get_meta(self): # likely a field object ret = ['Field object with name: "{0}"'.format(self.value.name)] return ret - + def _get_meta_(self): pass - + def _parse_string_(self, lowered): raise NotImplementedError - - + + class DirOutput(base.StringParameter): _lower_string = False name = 'dir_output' @@ -483,14 +494,14 @@ class DirOutput(base.StringParameter): default = ocgis.env.DIR_OUTPUT return_type = str input_types = [] - + def _get_meta_(self): ret = 'At execution time, data was originally written to this processor-local location: {0}'.format(self.value) - return(ret) - - def _validate_(self,value): + return (ret) + + def _validate_(self, value): if not exists(value): - raise(DefinitionValidationError(self,'Output directory does not exist: {0}'.format(value))) + raise (DefinitionValidationError(self, 'Output directory does not exist: {0}'.format(value))) class FileOnly(base.BooleanParameter): @@ -516,54 +527,56 @@ class Geom(base.OcgParameter): _shp_key = None _bounds = None _ugid_key = 'UGID' - - def __init__(self,*args,**kwds): - self.select_ugid = kwds.pop('select_ugid',None) - ## just store the value if it is a parameter object - if isinstance(self.select_ugid,SelectUgid): + + def __init__(self, *args, **kwds): + self.select_ugid = kwds.pop('select_ugid', None) + # just store the value if it is a parameter object + if isinstance(self.select_ugid, SelectUgid): self.select_ugid = self.select_ugid._value - + args = [self] + list(args) - base.OcgParameter.__init__(*args,**kwds) - + base.OcgParameter.__init__(*args, **kwds) + def __str__(self): if self.value is None: value = None elif self._shp_key is not None: value = '"{0}"'.format(self._shp_key) elif self._bounds is not None: - value = '|'.join(map(str,self._bounds)) + value = '|'.join(map(str, self._bounds)) else: value = '<{0} geometry(s)>'.format(len(self.value)) - ret = '{0}={1}'.format(self.name,value) - return(ret) - + ret = '{0}={1}'.format(self.name, value) + return ret + def _get_value_(self): - if isinstance(self._value,ShpCabinetIterator): + if isinstance(self._value, ShpCabinetIterator): self._value.select_ugid = self.select_ugid - return(base.OcgParameter._get_value_(self)) - value = property(_get_value_,base.OcgParameter._set_value_) - + return base.OcgParameter._get_value_(self) + + value = property(_get_value_, base.OcgParameter._set_value_) + def parse(self, value): if type(value) in [list, tuple]: - if all([isinstance(element,dict) for element in value]): - for ii,element in enumerate(value,start=1): + if all([isinstance(element, dict) for element in value]): + for ii, element in enumerate(value, start=1): if 'geom' not in element: - ocgis_lh(exc=DefinitionValidationError(self,'Geometry dictionaries must have a "geom" key.')) + ocgis_lh(exc=DefinitionValidationError(self, 'Geometry dictionaries must have a "geom" key.')) if 'properties' not in element: - element['properties'] = {self._ugid_key:ii} + element['properties'] = {self._ugid_key: ii} crs = element.get('crs', CFWGS84()) if 'crs' not in element: - ocgis_lh(msg='No CRS in geometry dictionary - assuming WGS84.',level=logging.WARN,check_duplicate=True) + ocgis_lh(msg='No CRS in geometry dictionary - assuming WGS84.', level=logging.WARN, + check_duplicate=True) ret = SpatialDimension.from_records(value, crs=crs) else: if len(value) == 2: geom = Point(value[0], value[1]) elif len(value) == 4: minx, miny, maxx, maxy = value - geom = Polygon(((minx, miny), (minx, maxy), (maxx, maxy), (maxx, miny))) + geom = Polygon(((minx, miny), (minx, maxy), (maxx, maxy), (maxx, miny))) if not geom.is_valid: - raise(DefinitionValidationError(self, 'Parsed geometry is not valid.')) + raise (DefinitionValidationError(self, 'Parsed geometry is not valid.')) ret = [{'geom': geom, 'properties': {self._ugid_key: 1}}] ret = SpatialDimension.from_records(ret, crs=CFWGS84()) self._bounds = geom.bounds @@ -587,41 +600,39 @@ def parse(self, value): ret = tuple(self._iter_spatial_dimension_tuple(ret)) return ret - - def parse_string(self,value): + + def parse_string(self, value): elements = value.split('|') try: elements = [float(e) for e in elements] - ## switch geometry creation based on length. length of 2 is a point - ## otherwise a bounding box + # switch geometry creation based on length. length of 2 is a point otherwise a bounding box if len(elements) == 2: - geom = Point(elements[0],elements[1]) + geom = Point(elements[0], elements[1]) else: - minx,miny,maxx,maxy = elements - geom = Polygon(((minx,miny), - (minx,maxy), - (maxx,maxy), - (maxx,miny))) + minx, miny, maxx, maxy = elements + geom = Polygon(((minx, miny), + (minx, maxy), + (maxx, maxy), + (maxx, miny))) if not geom.is_valid: - raise(DefinitionValidationError(self,'Parsed geometry is not valid.')) - ret = [{'geom':geom,'properties':{'ugid':1}}] + raise (DefinitionValidationError(self, 'Parsed geometry is not valid.')) + ret = [{'geom': geom, 'properties': {'ugid': 1}}] self._bounds = elements - ## try the value as a key or path + # try the value as a key or path except ValueError: - ## if the path exists, then assume it is a path to a shapefile, - ## otherwise assume it is a key + # if the path exists, then assume it is a path to a shapefile, otherwise assume it is a key kwds = {} if os.path.exists(value): kwds['path'] = value else: kwds['key'] = value - ## this is saved for later use by the openclimategis metadata output - ## as the input value is inherently transformed + # this is saved for later use by the openclimategis metadata output as the input value is inherently + # transformed self._shp_key = value - ## get the select_ugid test value + # get the select_ugid test value try: test_value = self.select_ugid.value - ## it may not have been passed as a parameter object + # it may not have been passed as a parameter object except AttributeError: test_value = self.select_ugid if test_value is None: @@ -630,8 +641,8 @@ def parse_string(self,value): select_ugid = test_value kwds['select_ugid'] = select_ugid ret = ShpCabinetIterator(**kwds) - return(ret) - + return ret + def _get_meta_(self): if self.value is None: ret = 'No user-supplied geometry. All data returned.' @@ -641,7 +652,7 @@ def _get_meta_(self): ret = 'The bounding box coordinates used for subset are: {0}.'.format(self._bounds) else: ret = '{0} custom user geometries provided.'.format(len(self.value)) - return(ret) + return ret @staticmethod def _iter_spatial_dimension_tuple(spatial_dimension): @@ -656,44 +667,44 @@ def _iter_spatial_dimension_tuple(spatial_dimension): for row, col in itertools.product(row_range, col_range): yield spatial_dimension[row, col] - -class Headers(base.IterableParameter,base.OcgParameter): + +class Headers(base.IterableParameter, base.OcgParameter): name = 'headers' default = None return_type = tuple - valid = set(constants.HEADERS_RAW+constants.HEADERS_CALC+constants.HEADERS_MULTI) - input_types = [list,tuple] + valid = set(constants.HEADERS_RAW + constants.HEADERS_CALC + constants.HEADERS_MULTI) + input_types = [list, tuple] nullable = True element_type = str unique = True def __repr__(self): try: - msg = '{0}={1}'.format(self.name,self.split_string.join(self.value)) - ## likely a NoneType + msg = '{0}={1}'.format(self.name, self.split_string.join(self.value)) + # likely a NoneType except TypeError: if self.value is None: msg = '{0}=none'.format(self.name) else: raise - return(msg) - - def parse_all(self,value): + return msg + + def parse_all(self, value): for header in constants.HEADERS_REQUIRED: if header in value: value.remove(header) - return(constants.HEADERS_REQUIRED+value) - - def validate_all(self,values): + return constants.HEADERS_REQUIRED + value + + def validate_all(self, values): if len(values) == 0: msg = 'At least one header value must be passed.' - raise(DefinitionValidationError(self,msg)) + raise (DefinitionValidationError(self, msg)) if not self.valid.issuperset(values): msg = 'Valid headers are {0}.'.format(list(self.valid)) - raise(DefinitionValidationError(self,msg)) + raise (DefinitionValidationError(self, msg)) def _get_meta_(self): - return('The following headers were used for file creation: {0}'.format(self.value)) + return 'The following headers were used for file creation: {0}'.format(self.value) class InterpolateSpatialBounds(base.BooleanParameter): @@ -703,29 +714,56 @@ class InterpolateSpatialBounds(base.BooleanParameter): meta_false = 'If no bounds are present on the coordinate variables, no attempt will be made to interpolate boundary polygons.' -class LevelRange(base.IterableParameter,base.OcgParameter): +class LevelRange(base.IterableParameter, base.OcgParameter): name = 'level_range' - element_type = [int,float] + element_type = [int, float] nullable = True - input_types = [list,tuple] + input_types = [list, tuple] return_type = tuple unique = False default = None - - def validate_all(self,value): + + def validate_all(self, value): if len(value) != 2: msg = 'There must be two elements in the sequence.' - raise(DefinitionValidationError(self,msg)) + raise (DefinitionValidationError(self, msg)) if value[0] > value[1]: msg = 'The second element must be >= the first element.' - raise(DefinitionValidationError(self,msg)) - + raise (DefinitionValidationError(self, msg)) + def _get_meta_(self): - if self.value == None: + if self.value is None: msg = 'No level subset.' else: msg = 'The following level subset was applied to all request datasets: {0}'.format(self.value) - return(msg) + return msg + + +class Melted(base.BooleanParameter): + """ + .. note:: Accepts all parameters to :class:`ocgis.api.parms.base.BooleanParameter`. + + :keyword dataset: + :type dataset: :class:`ocgis.api.parms.definition.Dataset` + :keyword output_format: + :type output_format: :class:`ocgis.api.parms.definition.OutputFormat` + """ + + name = 'melted' + default = False + meta_true = 'Melted tabular iteration requested.' + meta_false = 'Flat tabular iteration requested.' + + def __init__(self, **kwargs): + dataset = kwargs.pop('dataset') + output_format = kwargs.pop('output_format') + if len(dataset.value) > 1 and kwargs.get('init_value', False) is False: + converter_class = output_format.get_converter_class() + if issubclass(converter_class, AbstractTabularConverter): + kwargs['init_value'] = True + msg = 'Tabular output formats require "melted" is "False". Setting "melted" to "False".' + ocgis_lh(msg=msg, logger='operations', level=logging.WARNING, check_duplicate=True) + super(Melted, self).__init__(**kwargs) class Optimizations(base.OcgParameter): @@ -734,24 +772,24 @@ class Optimizations(base.OcgParameter): input_types = [dict] nullable = True return_type = [dict] - #: 'tgds' - dictionary mapping field aliases to TemporalGroupDimension objects - _allowed_keys = ['tgds','fields'] + # : 'tgds' - dictionary mapping field aliases to TemporalGroupDimension objects + _allowed_keys = ['tgds', 'fields'] _perform_deepcopy = False - + def _get_meta_(self): if self.value is None: ret = 'No optimizations were used.' else: ret = 'The following optimizations were used: {0}.'.format(self.value.keys()) - return(ret) - - def _validate_(self,value): + return ret + + def _validate_(self, value): if len(value) == 0: msg = 'Empty dictionaries are not allowed for optimizations. Use None instead.' - raise(DefinitionValidationError(self,msg)) + raise DefinitionValidationError(self, msg) if set(value.keys()).issubset(set(self._allowed_keys)) == False: msg = 'Allowed optimization keys are "{0}".'.format(self._allowed_keys) - raise(DefinitionValidationError(self,msg)) + raise DefinitionValidationError(self, msg) class OutputCRS(base.OcgParameter): @@ -760,13 +798,14 @@ class OutputCRS(base.OcgParameter): nullable = True return_type = [CoordinateReferenceSystem] default = None - + def _get_meta_(self): if self.value is None: ret = "No CRS associated with dataset. WGS84 Lat/Lon Geographic (EPSG:4326) assumed." else: - ret = 'The PROJ.4 definition of the coordinate reference system is: "{0}"'.format(self.value.sr.ExportToProj4()) - return(ret) + ret = 'The PROJ.4 definition of the coordinate reference system is: "{0}"'.format( + self.value.sr.ExportToProj4()) + return ret class OutputFormat(base.StringOptionParameter): @@ -786,11 +825,14 @@ def iter_possible(cls): for element in cls.valid: yield element + def get_converter_class(self): + return AbstractConverter.get_converter(self.value) + def _get_meta_(self): ret = 'The output format is "{0}".'.format(self.value) return ret - - + + class Prefix(base.StringParameter): name = 'prefix' nullable = False @@ -798,10 +840,10 @@ class Prefix(base.StringParameter): input_types = [str] return_type = str _lower_string = False - + def _get_meta_(self): msg = 'Data output given the following prefix: {0}.'.format(self.value) - return(msg) + return msg class RegridDestination(base.OcgParameter): @@ -822,7 +864,7 @@ def _get_meta_(self): msg = 'Input data was not regridded.' return msg - def _parse_(self,value): + def _parse_(self, value): # get the request dataset from the collection if the value is a string if isinstance(value, basestring): value = self.dataset.value[value] @@ -899,14 +941,15 @@ class SearchRadiusMultiplier(base.OcgParameter): nullable = False return_type = [float] default = 2.0 - + def _get_meta_(self): - msg = 'If point geometries were used for selection, a modifier of {0} times the data resolution was used to spatially select data.'.format(self.value) - return(msg) - - def _validate_(self,value): + msg = 'If point geometries were used for selection, a modifier of {0} times the data resolution was used to spatially select data.'.format( + self.value) + return msg + + def _validate_(self, value): if value <= 0: - raise(DefinitionValidationError(self,msg='must be >= 0')) + raise DefinitionValidationError(self, msg='must be >= 0') class SelectNearest(base.BooleanParameter): @@ -914,55 +957,55 @@ class SelectNearest(base.BooleanParameter): default = False meta_true = 'The nearest geometry to the centroid of the selection geometry was returned.' meta_false = 'All geometries returned regardless of distance.' - -class SelectUgid(base.IterableParameter,base.OcgParameter): + +class SelectUgid(base.IterableParameter, base.OcgParameter): name = 'select_ugid' return_type = tuple nullable = True default = None - input_types = [list,tuple] + input_types = [list, tuple] element_type = int unique = True - + def _get_meta_(self): if self.value is None: ret = 'No geometry selection by unique identifier.' else: ret = 'The following UGID values were used to select from the input geometries: {0}.'.format(self.value) - return(ret) - - -class Slice(base.IterableParameter,base.OcgParameter): + return (ret) + + +class Slice(base.IterableParameter, base.OcgParameter): name = 'slice' return_type = tuple nullable = True default = None - input_types = [list,tuple] - element_type = [NoneType,int,tuple,list,slice] + input_types = [list, tuple] + element_type = [NoneType, int, tuple, list, slice] unique = False - - def validate_all(self,values): + + def validate_all(self, values): if len(values) != 5: - raise(DefinitionValidationError(self,'Slices must have 5 values.')) - - def _parse_(self,value): + raise (DefinitionValidationError(self, 'Slices must have 5 values.')) + + def _parse_(self, value): if value is None: ret = slice(None) elif type(value) == int: - ret = slice(value,value+1) - elif type(value) in [list,tuple]: + ret = slice(value, value + 1) + elif type(value) in [list, tuple]: ret = slice(*value) else: - raise(DefinitionValidationError(self,'"{0}" cannot be converted to a slice object'.format(value))) - return(ret) - + raise (DefinitionValidationError(self, '"{0}" cannot be converted to a slice object'.format(value))) + return (ret) + def _get_meta_(self): if self.value is None: ret = 'No slice passed.' else: ret = 'A slice was used.' - return(ret) + return (ret) class Snippet(base.BooleanParameter): @@ -970,8 +1013,8 @@ class Snippet(base.BooleanParameter): default = False meta_true = 'First temporal slice or temporal group returned.' meta_false = 'All time points returned.' - - + + class SpatialOperation(base.StringOptionParameter): name = 'spatial_operation' default = 'intersects' @@ -980,39 +1023,39 @@ class SpatialOperation(base.StringOptionParameter): @classmethod def iter_possible(cls): for v in cls.valid: - yield(v) - + yield (v) + def _get_meta_(self): if self.value == 'intersects': ret = 'Geometries touching AND overlapping returned.' else: ret = 'A full geometric intersection occurred. Where geometries overlapped, a new geometry was created.' - return(ret) + return (ret) -class TimeRange(base.IterableParameter,base.OcgParameter): +class TimeRange(base.IterableParameter, base.OcgParameter): name = 'time_range' element_type = [datetime.datetime] nullable = True - input_types = [list,tuple] + input_types = [list, tuple] return_type = tuple unique = False default = None - - def validate_all(self,value): + + def validate_all(self, value): if len(value) != 2: msg = 'There must be two elements in the sequence.' - raise(DefinitionValidationError(self,msg)) + raise (DefinitionValidationError(self, msg)) if value[0] > value[1]: msg = 'The second element must be >= the first element.' - raise(DefinitionValidationError(self,msg)) - + raise (DefinitionValidationError(self, msg)) + def _get_meta_(self): if self.value == None: msg = 'No time range subset.' else: msg = 'The following time range subset was applied to all request datasets: {0}'.format(self.value) - return(msg) + return (msg) class TimeRegion(base.OcgParameter): @@ -1020,28 +1063,28 @@ class TimeRegion(base.OcgParameter): nullable = True default = None return_type = dict - input_types = [dict,OrderedDict] - - def _parse_(self,value): + input_types = [dict, OrderedDict] + + def _parse_(self, value): if value != None: - ## add missing keys - for add_key in ['month','year']: + # # add missing keys + for add_key in ['month', 'year']: if add_key not in value: - value.update({add_key:None}) - ## confirm only month and year keys are present + value.update({add_key: None}) + # # confirm only month and year keys are present for key in value.keys(): - if key not in ['month','year']: - raise(DefinitionValidationError(self,'Time region keys must be month and/or year.')) + if key not in ['month', 'year']: + raise (DefinitionValidationError(self, 'Time region keys must be month and/or year.')) if all([i is None for i in value.values()]): value = None - return(value) - + return (value) + def _get_meta_(self): if self.value == None: msg = 'No time region subset.' else: msg = 'The following time region subset was applied to all request datasets: {0}'.format(self.value) - return(msg) + return (msg) class VectorWrap(base.BooleanParameter): diff --git a/src/ocgis/api/request/driver/vector.py b/src/ocgis/api/request/driver/vector.py index 549e0b4c1..da6b17485 100644 --- a/src/ocgis/api/request/driver/vector.py +++ b/src/ocgis/api/request/driver/vector.py @@ -9,13 +9,15 @@ class DriverVector(AbstractDriver): extensions = ('.*\.shp',) key = 'vector' - output_formats = [constants.OUTPUT_FORMAT_NUMPY, constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH] + output_formats = [constants.OUTPUT_FORMAT_NUMPY, constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH, + constants.OUTPUT_FORMAT_SHAPEFILE] def close(self, obj): pass def get_crs(self): from ocgis import CoordinateReferenceSystem + return CoordinateReferenceSystem(self.rd.source_metadata['crs']) def get_dimensioned_variables(self): @@ -35,13 +37,15 @@ def inspect(self): def open(self): from ocgis import ShpCabinetIterator + return ShpCabinetIterator(path=self.rd.uri) def _get_field_(self, format_time=None): - #todo: option to pass select_ugid + # todo: option to pass select_ugid #todo: option for time dimension and time subsetting #todo: remove format_time option - there for compatibility with the netCDF driver from ocgis import SpatialDimension + ds = self.open() try: records = list(ds) diff --git a/src/ocgis/api/subset.py b/src/ocgis/api/subset.py index c9bc1bfc4..5285e9e31 100644 --- a/src/ocgis/api/subset.py +++ b/src/ocgis/api/subset.py @@ -26,26 +26,27 @@ class SubsetOperation(object): :param :class:`ocgis.util.logging_ocgis.ProgressOcgOperations` progress: """ - def __init__(self,ops,request_base_size_only=False,progress=None): + def __init__(self, ops, request_base_size_only=False, progress=None): self.ops = ops self._request_base_size_only = request_base_size_only self._subset_log = ocgis_lh.get_logger('subset') self._progress = progress or ProgressOcgOperations() - ## create the calculation engine + # # create the calculation engine if self.ops.calc == None or self._request_base_size_only == True: self.cengine = None self._has_multivariate_calculations = False else: - ocgis_lh('initializing calculation engine',self._subset_log,level=logging.DEBUG) + ocgis_lh('initializing calculation engine', self._subset_log, level=logging.DEBUG) self.cengine = OcgCalculationEngine(self.ops.calc_grouping, - self.ops.calc, - raw=self.ops.calc_raw, - agg=self.ops.aggregate, - calc_sample_size=self.ops.calc_sample_size, - progress=self._progress) - self._has_multivariate_calculations = any([self.cengine._check_calculation_members_(self.cengine.funcs,k) \ - for k in [AbstractMultivariateFunction,MultivariateEvalFunction]]) + self.ops.calc, + raw=self.ops.calc_raw, + agg=self.ops.aggregate, + calc_sample_size=self.ops.calc_sample_size, + progress=self._progress) + self._has_multivariate_calculations = any([self.cengine._check_calculation_members_(self.cengine.funcs, k) \ + for k in + [AbstractMultivariateFunction, MultivariateEvalFunction]]) # in the case of netcdf output, geometries must be unioned. this is also true for the case of the selection # geometry being requested as aggregated. @@ -66,21 +67,21 @@ def __init__(self,ops,request_base_size_only=False,progress=None): def __iter__(self): ''':rtype: AbstractCollection''' - ocgis_lh('beginning iteration',logger='conv.__iter__',level=logging.DEBUG) + ocgis_lh('beginning iteration', logger='conv.__iter__', level=logging.DEBUG) self._ugid_unique_store = [] self._geom_unique_store = [] - ## simple iterator for serial operations + # # simple iterator for serial operations for coll in self._iter_collections_(): - yield(coll) + yield (coll) def _iter_collections_(self): ''' :yields: :class:`~ocgis.SpatialCollection` ''' - ## multivariate calculations require datasets come in as a list with all - ## variable inputs part of the same sequence. + # # multivariate calculations require datasets come in as a list with all + # # variable inputs part of the same sequence. if self._has_multivariate_calculations: itr_rd = [[r for r in self.ops.dataset.itervalues()]] @@ -90,22 +91,23 @@ def _iter_collections_(self): ## configure the progress object self._progress.n_subsettables = len(itr_rd) - self._progress.n_geometries = get_default_or_apply(self.ops.geom,len,default=1) - self._progress.n_calculations = get_default_or_apply(self.ops.calc,len,default=0) + self._progress.n_geometries = get_default_or_apply(self.ops.geom, len, default=1) + self._progress.n_calculations = get_default_or_apply(self.ops.calc, len, default=0) ## send some messages msg = '{0} dataset collection(s) to process.'.format(self._progress.n_subsettables) - ocgis_lh(msg=msg,logger=self._subset_log) + ocgis_lh(msg=msg, logger=self._subset_log) if self.ops.geom is None: msg = 'Entire spatial domain returned. No selection geometries requested.' else: - msg = 'Each data collection will be subsetted by {0} selection geometries.'.format(self._progress.n_geometries) - ocgis_lh(msg=msg,logger=self._subset_log) + msg = 'Each data collection will be subsetted by {0} selection geometries.'.format( + self._progress.n_geometries) + ocgis_lh(msg=msg, logger=self._subset_log) if self._progress.n_calculations == 0: msg = 'No calculations requested.' else: - msg = 'The following calculations will be applied to each data collection: {0}.'.\ - format(', '.join([_['func'] for _ in self.ops.calc])) - ocgis_lh(msg=msg,logger=self._subset_log) + msg = 'The following calculations will be applied to each data collection: {0}.'. \ + format(', '.join([_['func'] for _ in self.ops.calc])) + ocgis_lh(msg=msg, logger=self._subset_log) ## process the data collections for rds in itr_rd: @@ -138,7 +140,7 @@ def _iter_collections_(self): else: tgds = self.ops.optimizations.get('tgds') ## execute the calculations - coll = self.cengine.execute(coll,file_only=self.ops.file_only, + coll = self.cengine.execute(coll, file_only=self.ops.file_only, tgds=tgds) else: ## if there are no calculations, mark progress to indicate @@ -147,10 +149,10 @@ def _iter_collections_(self): ## conversion of groups. if self.ops.output_grouping is not None: - raise(NotImplementedError) + raise (NotImplementedError) else: - ocgis_lh('subset yielding',self._subset_log,level=logging.DEBUG) - yield(coll) + ocgis_lh('subset yielding', self._subset_log, level=logging.DEBUG) + yield (coll) def _process_subsettables_(self, rds): """ @@ -159,42 +161,42 @@ def _process_subsettables_(self, rds): :rtype: :class:~`ocgis.SpatialCollection` """ - ocgis_lh(msg='entering _process_geometries_',logger=self._subset_log,level=logging.DEBUG) + ocgis_lh(msg='entering _process_geometries_', logger=self._subset_log, level=logging.DEBUG) - ## select headers + # # select headers and any value keys for keyed output functions + value_keys = None if self.ops.headers is not None: headers = self.ops.headers else: - if self.cengine is not None: - if self._has_multivariate_calculations: - headers = constants.HEADERS_MULTI + if self.ops.melted: + if self.cengine is not None: + if self._has_multivariate_calculations: + headers = constants.HEADERS_MULTI + else: + headers = constants.HEADERS_CALC else: - headers = constants.HEADERS_CALC - else: - headers = constants.HEADERS_RAW - - ## keyed output functions require appending headers regardless. there is - ## only one keyed output function allowed in a request. - if self.cengine is not None: - if self.cengine._check_calculation_members_(self.cengine.funcs,AbstractKeyedOutputFunction): - value_keys = self.cengine.funcs[0]['ref'].structure_dtype['names'] - headers = list(headers) + value_keys - ## remove the 'value' attribute headers as this is replaced by the - ## keyed output names. - try: - headers.remove('value') - ## it may not be in the list because of a user overload - except ValueError: - pass + headers = constants.HEADERS_RAW else: - value_keys = None - else: - value_keys = None + headers = None + + # keyed output functions require appending headers regardless. there is only one keyed output function + # allowed in a request. + if headers is not None: + if self.cengine is not None: + if self.cengine._check_calculation_members_(self.cengine.funcs, AbstractKeyedOutputFunction): + value_keys = self.cengine.funcs[0]['ref'].structure_dtype['names'] + headers = list(headers) + value_keys + # remove the 'value' attribute headers as this is replaced by the keyed output names. + try: + headers.remove('value') + # it may not be in the list because of a user overload + except ValueError: + pass alias = '_'.join([r.name for r in rds]) - ocgis_lh('processing...',self._subset_log,alias=alias,level=logging.DEBUG) - ## return the field object + ocgis_lh('processing...', self._subset_log, alias=alias, level=logging.DEBUG) + # return the field object try: # look for field optimizations if self.ops.optimizations is not None and 'fields' in self.ops.optimizations: @@ -202,7 +204,7 @@ def _process_subsettables_(self, rds): # no field optimizations, extract the target data from the dataset collection else: len_rds = len(rds) - field = [None]*len_rds + field = [None] * len_rds for ii in range(len_rds): rds_element = rds[ii] try: @@ -235,14 +237,14 @@ def _process_subsettables_(self, rds): if len(field) > 1: try: - ## reset the variable uid and let the collection handle its assignment + # # reset the variable uid and let the collection handle its assignment variable_to_add = field[1].variables.first() variable_to_add.uid = None field[0].variables.add_variable(variable_to_add) ## reset the field names and let these be auto-generated for f in field: f._name = None - ## this will fail for optimizations as the fields are already joined + # # this will fail for optimizations as the fields are already joined except VariableInCollectionError: if self.ops.optimizations is not None and 'fields' in self.ops.optimizations: pass @@ -257,19 +259,19 @@ def _process_subsettables_(self, rds): coll = SpatialCollection(headers=headers) coll.add_field(1, None, None, name='_'.join([rd.name for rd in rds])) try: - yield(coll) + yield (coll) finally: return else: ocgis_lh(exc=ExtentError(message=str(e)), alias=str([rd.name for rd in rds]), logger=self._subset_log) - ## set iterator based on presence of slice. slice always overrides geometry. + # # set iterator based on presence of slice. slice always overrides geometry. if self.ops.slice is not None: itr = [None] else: itr = [None] if self.ops.geom is None else self.ops.geom - for coll in self._process_geometries_(itr,field,headers,value_keys,alias): - yield(coll) + for coll in self._process_geometries_(itr, field, headers, value_keys, alias): + yield (coll) def _get_initialized_collection_(self, field, headers, value_keys): """ @@ -361,7 +363,7 @@ def _get_spatially_subsetted_field_(self, alias, field, subset_sdim, subset_ugid :raises: AssertionError, ExtentError """ - assert(subset_sdim is not None) + assert (subset_sdim is not None) subset_geom = subset_sdim.single.geom @@ -413,7 +415,8 @@ def _get_spatially_subsetted_field_(self, alias, field, subset_sdim, subset_ugid level=logging.WARN) sfield = None else: - msg = str(e) + ' This typically means the selection geometry falls outside the spatial domain of the target dataset.' + msg = str( + e) + ' This typically means the selection geometry falls outside the spatial domain of the target dataset.' ocgis_lh(exc=ExtentError(message=msg), alias=alias, logger=self._subset_log) # if the subset geometry is unwrapped and the vector wrap option is true, wrap the subset geometry. @@ -437,15 +440,15 @@ def _update_subset_geometry_if_point_(self, field, subset_sdim, subset_ugid): """ if type(subset_sdim.single.geom) in [Point, MultiPoint]: - assert(subset_sdim.abstraction == 'point') + assert (subset_sdim.abstraction == 'point') ocgis_lh(logger=self._subset_log, msg='buffering point geometry', level=logging.DEBUG) - subset_geom = subset_sdim.single.geom.buffer(self.ops.search_radius_mult*field.spatial.grid.resolution) + subset_geom = subset_sdim.single.geom.buffer(self.ops.search_radius_mult * field.spatial.grid.resolution) value = np.ma.array([[None]]) value[0, 0] = subset_geom subset_sdim.geom._polygon = SpatialGeometryPolygonDimension(value=value, uid=subset_ugid) # the polygon should be used for subsetting, update the spatial dimension to use this abstraction subset_sdim.abstraction = 'polygon' - assert(subset_sdim.abstraction == 'polygon') + assert (subset_sdim.abstraction == 'polygon') def _check_masking_(self, alias, sfield, subset_ugid): """ @@ -461,7 +464,7 @@ def _check_masking_(self, alias, sfield, subset_ugid): if variable.value.mask.all(): # masked data may be okay... if self.ops.snippet or self.ops.allow_empty or ( - self.ops.output_format == 'numpy' and self.ops.allow_empty): + self.ops.output_format == 'numpy' and self.ops.allow_empty): if self.ops.snippet: ocgis_lh('all masked data encountered but allowed for snippet', self._subset_log, alias=alias, ugid=subset_ugid, level=logging.WARN) @@ -491,7 +494,7 @@ def _get_regridded_field_with_subset_(self, sfield, subset_sdim_for_regridding=N :param bool with_buffer: If ``True``, buffer the geometry used to subset the destination grid. """ - #todo: cache spatial operations on regrid destination field + # todo: cache spatial operations on regrid destination field from ocgis.regrid.base import iter_regridded_fields from ocgis.util.spatial.spatial_subset import SpatialSubsetOperation @@ -579,7 +582,8 @@ def _get_regridded_field_with_subset_(self, sfield, subset_sdim_for_regridding=N # subset the output from the regrid operation as masked values may be introduced on the edges if subset_sdim_for_regridding is not None: ss = SpatialSubsetOperation(sfield) - sfield = ss.get_spatial_subset('intersects', subset_sdim_for_regridding, use_spatial_index=env.USE_SPATIAL_INDEX, + sfield = ss.get_spatial_subset('intersects', subset_sdim_for_regridding, + use_spatial_index=env.USE_SPATIAL_INDEX, select_nearest=False) return sfield @@ -663,7 +667,7 @@ def _process_geometries_(self, itr, field, headers, value_keys, alias): # if empty returns are allowed, there be an empty field if sfield is not None: - ## aggregate if requested + # # aggregate if requested if self.ops.aggregate: ocgis_lh('executing spatial average', self._subset_log, alias=alias, ugid=subset_ugid) sfield = sfield.get_spatially_aggregated(new_spatial_uid=subset_ugid) diff --git a/src/ocgis/calc/base.py b/src/ocgis/calc/base.py index 25e7d6d1b..bfd740fc5 100644 --- a/src/ocgis/calc/base.py +++ b/src/ocgis/calc/base.py @@ -70,7 +70,7 @@ def dtype(self): def key(self): str - #: The calculation's long name. + # : The calculation's long name. @abc.abstractproperty def long_name(self): str @@ -225,7 +225,7 @@ def set_variable_metadata(self, variable): pass @classmethod - def validate(self, ops): + def validate(cls, ops): """ Optional method to overload that validates the input :class:`ocgis.OcgOperations`. @@ -233,13 +233,9 @@ def validate(self, ops): :raises: :class:`ocgis.exc.DefinitionValidationError` """ - pass - def validate_units(self, *args, **kwargs): """Optional method to overload for units validation at the calculation level.""" - pass - def _add_to_collection_(self, units=None, value=None, parent_variables=None, alias=None, dtype=None, fill_value=None): """ @@ -441,7 +437,7 @@ class AbstractUnivariateFunction(AbstractFunction): """ __metaclass__ = abc.ABCMeta - #: Optional sequence of acceptable string units defintions for input variables. If this is set to ``None``, no unit + # : Optional sequence of acceptable string units defintions for input variables. If this is set to ``None``, no unit #: validation will occur. required_units = None @@ -501,7 +497,7 @@ def parms_definition(self): >>> {'threshold':float,'operation':str,'basis':None} """ dict - + def _format_parms_(self, values): """ :param values: A dictionary containing the parameter values to check. @@ -582,7 +578,7 @@ class AbstractMultivariateFunction(AbstractFunction): """ __metaclass__ = abc.ABCMeta - #: Optional dictionary mapping unit definitions for required variables. + # : Optional dictionary mapping unit definitions for required variables. #: For example: required_units = {'tas':'fahrenheit','rhs':'percent'} required_units = None #: If True, time aggregation is external to the calculation and will require running the standard time aggregation diff --git a/src/ocgis/calc/library/index/duration.py b/src/ocgis/calc/library/index/duration.py index fdda1e438..e9e719609 100644 --- a/src/ocgis/calc/library/index/duration.py +++ b/src/ocgis/calc/library/index/duration.py @@ -7,57 +7,57 @@ from ocgis import constants -class Duration(base.AbstractUnivariateSetFunction,base.AbstractParameterizedFunction): +class Duration(base.AbstractUnivariateSetFunction, base.AbstractParameterizedFunction): key = 'duration' - parms_definition = {'threshold':float,'operation':str,'summary':str} - ## output data type will vary by the summary operation (e.g. float for mean, - ## int for max) + parms_definition = {'threshold': float, 'operation': str, 'summary': str} + # output data type will vary by the summary operation (e.g. float for mean, int for max) dtype = constants.NP_FLOAT description = 'Summarizes consecutive occurrences in a sequence where the logical operation returns TRUE. The summary operation is applied to the sequences within a temporal aggregation.' standard_name = 'duration' long_name = 'Duration' - def calculate(self,values,threshold=None,operation=None,summary='mean'): - ''' + def calculate(self, values, threshold=None, operation=None, summary='mean'): + """ :param threshold: The threshold value to use for the logical operation. :type threshold: float :param operation: The logical operation. One of 'gt','gte','lt', or 'lte'. :type operation: str :param summary: The summary operation to apply the durations. One of 'mean','median','std','max', or 'min'. :type summary: str - ''' - assert(len(values.shape) == 3) - ## storage array for counts + """ + + assert (len(values.shape) == 3) + # storage array for counts shp_out = values.shape[-2:] - store = np.zeros(shp_out,dtype=self.dtype).flatten() - ## get the summary operation from the numpy library - summary_operation = getattr(np,summary) + store = np.zeros(shp_out, dtype=self.dtype).flatten() + # get the summary operation from the numpy library + summary_operation = getattr(np, summary) - ## find longest sequence for each geometry across time dimension - for ii,fill in enumerate(self._iter_consecutive_(values,threshold,operation)): - ## case of only a singular occurrence + # find longest sequence for each geometry across time dimension + for ii, fill in enumerate(self._iter_consecutive_(values, threshold, operation)): + # case of only a singular occurrence if len(fill) > 1: fill = summary_operation(fill) else: try: fill = fill[0] - ## value is likely masked + # value is likely masked except IndexError: fill = 0 store[ii] = fill - + store.resize(shp_out) - - ## update the output mask. this only applies to geometries so pick the - ## first masked time field - store = np.ma.array(store,mask=values.mask[0,:,:]) - - return(store) - - def _iter_consecutive_(self,values,threshold,operation): - ## time index reference - ref = np.arange(0,values.shape[0]) - ## perform requested logical operation + + # update the output mask. this only applies to geometries so pick the + # first masked time field + store = np.ma.array(store, mask=values.mask[0, :, :]) + + return store + + def _iter_consecutive_(self, values, threshold, operation): + # time index reference + ref = np.arange(0, values.shape[0]) + # perform requested logical operation if operation == 'gt': arr = values > threshold elif operation == 'lt': @@ -67,65 +67,66 @@ def _iter_consecutive_(self,values,threshold,operation): elif operation == 'lte': arr = values <= threshold - ## find longest sequence for each geometry across time dimension - for rowidx,colidx in iter_array(values[0,:,:],use_mask=False): - vec = arr[:,rowidx,colidx].reshape(-1) - ## check first if there is a longer series than 1 + # find longest sequence for each geometry across time dimension + for rowidx, colidx in iter_array(values[0, :, :], use_mask=False): + vec = arr[:, rowidx, colidx].reshape(-1) + # check first if there is a longer series than 1 if np.any(np.diff(ref[vec]) == 1): - ## find locations where the values switch + # find locations where the values switch diff_idx = np.diff(vec) if diff_idx.shape != ref.shape: - diff_idx = np.append(diff_idx,[False]) + diff_idx = np.append(diff_idx, [False]) split_idx = ref[diff_idx] + 1 - splits = np.array_split(vec,split_idx) + splits = np.array_split(vec, split_idx) fill = [a.sum() for a in splits if np.all(a)] - ## case of only a singular occurrence + # case of only a singular occurrence elif np.any(vec): fill = [1] - ## case for no occurrence + # case for no occurrence else: fill = [0] - - yield(fill) - - @classmethod - def validate(cls,ops): + + yield (fill) + + @classmethod + def validate(cls, ops): if 'year' not in ops.calc_grouping: msg = 'Calculation grouping must include "year" for duration calculations.' - raise(DefinitionValidationError('calc',msg)) + raise DefinitionValidationError('calc', msg) -class FrequencyDuration(base.AbstractKeyedOutputFunction,Duration): +class FrequencyDuration(base.AbstractKeyedOutputFunction, Duration): key = 'freq_duration' description = 'Count the frequency of spell durations within the temporal aggregation.' dtype = object - structure_dtype = OrderedDict([['names',['duration','count']],['formats',[constants.NP_INT,constants.NP_INT]]]) - parms_definition = {'threshold':float,'operation':str} + structure_dtype = OrderedDict([['names', ['duration', 'count']], ['formats', [constants.NP_INT, constants.NP_INT]]]) + parms_definition = {'threshold': float, 'operation': str} standard_name = 'frequency_duration' long_name = 'Frequency Duration' - - def calculate(self,values,threshold=None,operation=None): - ''' + + def calculate(self, values, threshold=None, operation=None): + """ :param threshold: The threshold value to use for the logical operation. :type threshold: float :param operation: The logical operation. One of 'gt','gte','lt', or 'lte'. :type operation: str - ''' + """ + shp_out = values.shape[-2:] - store = np.zeros(shp_out,dtype=object).flatten() - for ii,duration in enumerate(self._iter_consecutive_(values,threshold,operation)): + store = np.zeros(shp_out, dtype=object).flatten() + for ii, duration in enumerate(self._iter_consecutive_(values, threshold, operation)): summary = self._get_summary_(duration) store[ii] = summary store.resize(shp_out) - - ## update the output mask. this only applies to geometries so pick the - ## first masked time field - store = np.ma.array(store,mask=values.mask[0,:,:]) - - return(store) - - def _get_summary_(self,duration): - ''' + + # update the output mask. this only applies to geometries so pick the + # first masked time field + store = np.ma.array(store, mask=values.mask[0, :, :]) + + return store + + def _get_summary_(self, duration): + """ :param duration: List of duration elements for frequency target. :type duration: list @@ -133,16 +134,17 @@ def _get_summary_(self,duration): :returns: NumPy structure with dimension equal to the count of unique elements in the `duration` sequence. - ''' + """ + set_duration = set(duration) - ret = np.empty(len(set_duration),dtype=self.structure_dtype) - for ii,sd in enumerate(set_duration): + ret = np.empty(len(set_duration), dtype=self.structure_dtype) + for ii, sd in enumerate(set_duration): idx = np.array(duration) == sd count = idx.sum() ret[ii]['duration'] = sd ret[ii]['count'] = count - return(ret) - + return ret + @classmethod - def validate(cls,ops): + def validate(cls, ops): Duration.validate(ops) diff --git a/src/ocgis/conv/base.py b/src/ocgis/conv/base.py index 2f6173f24..9bb117daa 100644 --- a/src/ocgis/conv/base.py +++ b/src/ocgis/conv/base.py @@ -23,7 +23,8 @@ class AbstractConverter(object): :type colls: sequence of :class:`~ocgis.SpatialCollection` :param str outdir: Path to the output directory. :param str prefix: The string prepended to the output file or directory. - :param :class:~`ocgis.OcgOperations ops: Optional operations definition. This is required for some converters. + :param ops: Optional operations definition. This is required for some converters. + :type ops: :class:`~ocgis.OcgOperations` :param bool add_meta: If False, do not add a source and OCGIS metadata file. :param bool add_auxiliary_files: If False, do not create an output folder. Write only the target ouput file. :param bool overwrite: If True, attempt to overwrite any existing output files. @@ -35,6 +36,7 @@ class AbstractConverter(object): _add_ugeom = False # added user geometry in the output folder _add_ugeom_nest = True # nest the user geometry in a shp folder _add_source_meta = True # add a source metadata file + _use_upper_keys = True # if headers should be capitalized def __init__(self, colls, outdir=None, prefix=None, ops=None, add_meta=True, add_auxiliary_files=True, overwrite=False): @@ -59,6 +61,28 @@ def __init__(self, colls, outdir=None, prefix=None, ops=None, add_meta=True, add ocgis_lh('converter initialized', level=logging.DEBUG, logger=self._log) + def get_headers(self, coll): + """ + :type coll: :class:`ocgis.SpatialCollection` + :returns: A list of headers from the first element return from the collection iterator. + :rtype: [str, ...] + """ + + ret = self.get_iter_from_spatial_collection(coll) + ret = ret.next() + ret = ret[1].keys() + return ret + + def get_iter_from_spatial_collection(self, coll): + """ + :type coll: :class:`ocgis.SpatialCollection` + :returns: A generator from the input collection. + :rtype: generator + """ + + itr = coll.get_iter_dict(use_upper_keys=self._use_upper_keys) + return itr + def _build_(self, *args, **kwargs): raise NotImplementedError @@ -129,12 +153,12 @@ def write(self): if coll.meta is None: # convert the collection properties to fiona properties - from fiona_ import FionaConverter + from fiona_ import AbstractFionaConverter fiona_properties = {} archetype_properties = coll.properties.values()[0] for name in archetype_properties.dtype.names: - fiona_properties[name] = FionaConverter.get_field_type( + fiona_properties[name] = AbstractFionaConverter.get_field_type( type(archetype_properties[name][0])) fiona_schema = {'geometry': 'MultiPolygon', 'properties': fiona_properties} @@ -159,7 +183,7 @@ def write(self): r_geom = coll.geoms.values()[0] if isinstance(r_geom, Polygon): r_geom = MultiPolygon([r_geom]) - #see if this geometry is in the unique geometry store + # see if this geometry is in the unique geometry store should_append = self._get_should_append_to_unique_geometry_store_( unique_geometry_store, r_geom, @@ -309,3 +333,26 @@ def validate_ops(cls, ops): :type ops: :class:`ocgis.OcgOperations` :raises: DefinitionValidationError """ + + +class AbstractTabularConverter(AbstractConverter): + """ + .. note:: Accepts all parameters to :class:`~ocgis.conv.base.AbstractConverter`. + + :keyword bool melted: (``=False``) If ``True``, use a melted tabular output format with variable values collected in + a single column. + """ + + def __init__(self, *args, **kwargs): + self.melted = kwargs.pop('melted', None) or False + super(AbstractTabularConverter, self).__init__(*args, **kwargs) + + def get_iter_from_spatial_collection(self, coll): + """ + :type coll: :class:`ocgis.SpatialCollection` + :returns: A generator from the input collection. + :rtype: generator + """ + + itr = coll.get_iter_dict(use_upper_keys=self._use_upper_keys, melted=self.melted) + return itr diff --git a/src/ocgis/conv/csv_.py b/src/ocgis/conv/csv_.py index 8f25c22de..b728fe022 100644 --- a/src/ocgis/conv/csv_.py +++ b/src/ocgis/conv/csv_.py @@ -7,7 +7,7 @@ import fiona from shapely.geometry.geo import mapping -from ocgis.conv.base import AbstractConverter +from ocgis.conv.base import AbstractTabularConverter from ocgis.util.logging_ocgis import ocgis_lh @@ -15,11 +15,11 @@ class OcgDialect(excel): lineterminator = '\n' -class CsvConverter(AbstractConverter): +class CsvConverter(AbstractTabularConverter): _ext = 'csv' def _build_(self, coll): - headers = [h.upper() for h in coll.headers] + headers = self.get_headers(coll) f = open(self.path, 'w') writer = csv.DictWriter(f, headers, dialect=OcgDialect) writer.writeheader() @@ -29,7 +29,7 @@ def _build_(self, coll): def _write_coll_(self, f, coll): writer = f['csv_writer'] - for geom, row in coll.get_iter_dict(use_upper_keys=True): + for geom, row in self.get_iter_from_spatial_collection(coll): writer.writerow(row) def _finalize_(self, f): @@ -84,37 +84,43 @@ def _write_coll_(self, f, coll): rstore = self._ugid_gid_store is_aggregated = self.ops.aggregate - for geom, row in coll.get_iter_dict(use_upper_keys=True): + for geom, row in self.get_iter_from_spatial_collection(coll): writer.writerow(row) - if not is_aggregated: - did, gid, ugid = row['DID'], row['GID'], row['UGID'] - try: - if gid in rstore[did][ugid]: - continue - else: - raise KeyError - except KeyError: - if did not in rstore: - rstore[did] = {} - if ugid not in rstore[did]: - rstore[did][ugid] = [] - if gid not in rstore[did][ugid]: - rstore[did][ugid].append(gid) - - # for multivariate calculation outputs the dataset identifier is None. - try: - converted_did = int(did) - except TypeError: - converted_did = None - - feature = {'properties': {'GID': int(gid), 'UGID': int(ugid), 'DID': converted_did}, - 'geometry': mapping(geom)} - try: - file_fiona.write(feature) - except ValueError as e: - if feature['geometry']['type'] != file_fiona.meta['schema']['geometry']: - msg = 'Spatial abstractions do not match. You may need to override "abstraction" and/or "s_abstraction"' - msg = '{0}. Original error message from Fiona is "ValueError({1})".'.format(msg, e.message) - raise ValueError(msg) - else: - raise + + if not is_aggregated: + for ugid, field_dict in coll.iteritems(): + for field in field_dict.itervalues(): + did = field.uid + for _, _, geom, gid in field.spatial.get_geom_iter(): + # did, gid, ugid = row['DID'], row['GID'], row['UGID'] + try: + if gid in rstore[did][ugid]: + continue + else: + raise KeyError + except KeyError: + if did not in rstore: + rstore[did] = {} + if ugid not in rstore[did]: + rstore[did][ugid] = [] + if gid not in rstore[did][ugid]: + rstore[did][ugid].append(gid) + + # for multivariate calculation outputs the dataset identifier is None. + try: + converted_did = int(did) + except TypeError: + converted_did = None + + feature = {'properties': {'GID': int(gid), 'UGID': int(ugid), 'DID': converted_did}, + 'geometry': mapping(geom)} + try: + file_fiona.write(feature) + except ValueError as e: + if feature['geometry']['type'] != file_fiona.meta['schema']['geometry']: + msg = 'Spatial abstractions do not match. You may need to override "abstraction" and/or "s_abstraction"' + msg = '{0}. Original error message from Fiona is "ValueError({1})".'.format(msg, + e.message) + raise ValueError(msg) + else: + raise diff --git a/src/ocgis/conv/fiona_.py b/src/ocgis/conv/fiona_.py index 1e37c97bd..158271c16 100644 --- a/src/ocgis/conv/fiona_.py +++ b/src/ocgis/conv/fiona_.py @@ -1,19 +1,16 @@ import numpy as np from types import NoneType -from collections import OrderedDict import abc import fiona -from shapely.geometry.geo import mapping -from ocgis.conv.base import AbstractConverter +from ocgis.conv.base import AbstractTabularConverter import datetime -from ocgis.util.logging_ocgis import ocgis_lh -class FionaConverter(AbstractConverter): +class AbstractFionaConverter(AbstractTabularConverter): __metaclass__ = abc.ABCMeta - + _add_ugeom = True _add_ugeom_nest = False _fiona_conversion = {np.int32: int, @@ -93,8 +90,8 @@ def _finalize_(self, f): :param dict f: A dictionary containing file-level metadata and potentially the file object itself. """ - f['fiona_object'].close() - + f['fobject'].close() + def _build_(self, coll): """ :param coll: An archetypical spatial collection that will be written to file. @@ -103,66 +100,15 @@ def _build_(self, coll): :rtype: dict """ - fiona_conversion = {} - - # pull the fiona schema properties together by mapping fiona types to the data types of the first row of the - # output data file - archetype_field = coll._archetype_field - - try: - crs = archetype_field.spatial.crs - fiona_crs = crs.value - except AttributeError: - if crs is None: - raise ValueError('"crs" is None. A coordinate system is required for writing to Fiona output.') - else: - raise + field = coll.first().values()[0] + ugid = 1 if self.melted is True else None + arch = field.get_iter(melted=self.melted, use_upper_keys=self._use_upper_keys, headers=coll.headers, + ugid=ugid).next() + fdict = field.get_fiona_dict(field, arch[1]) + fdict['fobject'] = fiona.open(self.path, driver=self._driver, schema=fdict['schema'], crs=fdict['crs'], + mode='w') + return fdict - geom, arch_row = coll.get_iter_dict().next() - fiona_properties = OrderedDict() - for header in coll.headers: - fiona_field_type = self.get_field_type(type(arch_row[header]), key=header, - fiona_conversion=fiona_conversion) - fiona_properties.update({header.upper(): fiona_field_type}) - - # we always want to convert the value. if the data is masked, it comes through as a float when unmasked data is - # in fact a numpy data type. however, this should only occur if 'value' is in the output headers! - if 'value' in coll.headers and 'value' not in fiona_conversion: - value_dtype = archetype_field.variables.values()[0].value.dtype - try: - to_update = self._fiona_conversion[value_dtype] - # may have to do type comparisons - except KeyError as e: - to_update = None - for k, v in self._fiona_conversion.iteritems(): - if value_dtype == k: - to_update = v - break - if to_update is None: - ocgis_lh(exc=e, logger='fiona_') - fiona_conversion.update({'value': to_update}) - - # polygon geometry types are always converted to multipolygons to avoid later collections having multipolygon - # geometries. - geometry_type = archetype_field.spatial.abstraction_geometry.geom_type - if geometry_type == 'Polygon': - geometry_type = 'MultiPolygon' - - fiona_schema = {'geometry': geometry_type, - 'properties': fiona_properties} - - # if there is no data for a header, it may be empty. in this case, the value comes through as none and it should - # be replaced with bool. - for k, v in fiona_schema['properties'].iteritems(): - if v is None: - fiona_schema['properties'][k] = 'str:1' - - fiona_object = fiona.open(self.path, 'w', driver=self._driver, crs=fiona_crs, schema=fiona_schema) - - ret = {'fiona_object': fiona_object, 'fiona_conversion': fiona_conversion} - - return ret - def _write_coll_(self, f, coll): """ Write a spatial collection using file information from ``f``. @@ -173,17 +119,19 @@ def _write_coll_(self, f, coll): :type coll: :class:`~ocgis.SpatialCollection` """ - fiona_object = f['fiona_object'] - for geom, properties in coll.get_iter_dict(use_upper_keys=True, conversion_map=f['fiona_conversion']): - to_write = {'geometry': mapping(geom), 'properties': properties} - fiona_object.write(to_write) + for ugid, field_dict in coll.iteritems(): + ugid = ugid if self.melted is True else None + for field in field_dict.itervalues(): + fobject = f['fobject'] + field.write_fiona(melted=self.melted, fobject=fobject, use_upper_keys=self._use_upper_keys, + headers=coll.headers, ugid=ugid) -class ShpConverter(FionaConverter): +class ShpConverter(AbstractFionaConverter): _ext = 'shp' _driver = 'ESRI Shapefile' -class GeoJsonConverter(FionaConverter): +class GeoJsonConverter(AbstractFionaConverter): _ext = 'json' _driver = 'GeoJSON' diff --git a/src/ocgis/interface/base/dimension/base.py b/src/ocgis/interface/base/dimension/base.py index f84a39a2e..cbd6e15f3 100644 --- a/src/ocgis/interface/base/dimension/base.py +++ b/src/ocgis/interface/base/dimension/base.py @@ -6,7 +6,7 @@ from ocgis import constants from ocgis.constants import NAME_BOUNDS_DIMENSION_LOWER, NAME_BOUNDS_DIMENSION_UPPER, OCGIS_BOUNDS -from ocgis.util.helpers import get_none_or_1d, get_none_or_2d, get_none_or_slice,\ +from ocgis.util.helpers import get_none_or_1d, get_none_or_2d, get_none_or_slice, \ get_formatted_slice, get_bounds_from_1d from ocgis.exc import EmptySubsetError, ResolutionError, BoundsAlreadyAvailableError from ocgis.interface.base.variable import AbstractValueVariable, AbstractSourcedVariable @@ -95,7 +95,6 @@ def name_value(self, value): class AbstractUidDimension(AbstractDimension): - def __init__(self, *args, **kwargs): self.uid = kwargs.pop('uid', None) self.name_uid = kwargs.pop('name_uid', None) @@ -144,7 +143,8 @@ def __init__(self, *args, **kwds): try: assert (key in kwds_all) except AssertionError: - raise ValueError('"{0}" is not a valid keyword argument for "{1}".'.format(key, self.__class__.__name__)) + raise ValueError( + '"{0}" is not a valid keyword argument for "{1}".'.format(key, self.__class__.__name__)) kwds_value = {key: kwds.get(key, None) for key in kwds_value} kwds_uid = {key: kwds.get(key, None) for key in kwds_uid} @@ -156,7 +156,7 @@ def __init__(self, *args, **kwds): class VectorDimension(AbstractSourcedVariable, AbstractUidValueDimension): _attrs_slice = ('uid', '_value', '_src_idx') _ndims = 1 - + def __init__(self, *args, **kwargs): if kwargs.get('value') is None and kwargs.get('data') is None: msg = 'Without a "data" object, "value" is required.' @@ -185,16 +185,16 @@ def __init__(self, *args, **kwargs): if self.conform_units_to is not None: if not self.conform_units_to.equals(self.cfunits): self.cfunits_conform(self.conform_units_to) - + def __len__(self): return self.shape[0] - + @property def bounds(self): # always load the value first. any bounds read from source are set during this process. bounds without values # are meaningless! self.value - + # if no error is encountered, then the bounds should have been set during loading from source. simply return the # value. it will be none, if no bounds were present in the source data. return self._bounds @@ -206,7 +206,7 @@ def bounds(self, value): # validate the value if value is not None: self._validate_bounds_() - + @property def extent(self): if self.bounds is None: @@ -242,7 +242,7 @@ def name_bounds_tuple(self, value): value = tuple(value) assert len(value) == 2 self._name_bounds_tuple = value - + @property def resolution(self): if self.bounds is None and self.value.shape[0] < 2: @@ -255,11 +255,11 @@ def resolution(self): res_array = res_bounds[:, 1] - res_bounds[:, 0] ret = np.abs(res_array).mean() return ret - + @property def shape(self): return self.uid.shape - + def cfunits_conform(self, to_units): """ Convert and set value and bounds for the dimension object to new units. @@ -287,15 +287,15 @@ def cfunits_conform(self, to_units): # conform the bounds value AbstractValueVariable.cfunits_conform(self, to_units, value=self.bounds, from_units=from_units) - def get_between(self,lower,upper,return_indices=False,closed=False,use_bounds=True): - assert(lower <= upper) - - ## determine if data bounds are contiguous (if bounds exists for the - ## data). bounds must also have more than one row + def get_between(self, lower, upper, return_indices=False, closed=False, use_bounds=True): + assert (lower <= upper) + + # # determine if data bounds are contiguous (if bounds exists for the + # # data). bounds must also have more than one row is_contiguous = False if self.bounds is not None: try: - if len(set(self.bounds[0,:]).intersection(set(self.bounds[1,:]))) > 0: + if len(set(self.bounds[0, :]).intersection(set(self.bounds[1, :]))) > 0: is_contiguous = True except IndexError: ## there is likely not a second row @@ -303,54 +303,54 @@ def get_between(self,lower,upper,return_indices=False,closed=False,use_bounds=Tr pass else: raise - + ## subset operation when bounds are not present if self.bounds is None or use_bounds == False: if closed: - select = np.logical_and(self.value > lower,self.value < upper) + select = np.logical_and(self.value > lower, self.value < upper) else: - select = np.logical_and(self.value >= lower,self.value <= upper) + select = np.logical_and(self.value >= lower, self.value <= upper) ## subset operation in the presence of bounds else: ## determine which bound column contains the minimum - if self.bounds[0,0] <= self.bounds[0,1]: + if self.bounds[0, 0] <= self.bounds[0, 1]: lower_index = 0 upper_index = 1 else: lower_index = 1 upper_index = 0 ## reference the minimum and maximum bounds - bounds_min = self.bounds[:,lower_index] - bounds_max = self.bounds[:,upper_index] - + bounds_min = self.bounds[:, lower_index] + bounds_max = self.bounds[:, upper_index] + ## if closed is True, then we are working on a closed interval and ## are not concerned if the values at the bounds are equivalent. it ## does not matter if the bounds are contiguous. if closed: - select_lower = np.logical_or(bounds_min > lower,bounds_max > lower) - select_upper = np.logical_or(bounds_min < upper,bounds_max < upper) + select_lower = np.logical_or(bounds_min > lower, bounds_max > lower) + select_upper = np.logical_or(bounds_min < upper, bounds_max < upper) else: ## if the bounds are contiguous, then preference is given to the ## lower bound to avoid duplicate containers (contiguous bounds ## share a coordinate) if is_contiguous: - select_lower = np.logical_or(bounds_min >= lower,bounds_max > lower) - select_upper = np.logical_or(bounds_min <= upper,bounds_max < upper) + select_lower = np.logical_or(bounds_min >= lower, bounds_max > lower) + select_upper = np.logical_or(bounds_min <= upper, bounds_max < upper) else: - select_lower = np.logical_or(bounds_min >= lower,bounds_max >= lower) - select_upper = np.logical_or(bounds_min <= upper,bounds_max <= upper) - select = np.logical_and(select_lower,select_upper) - + select_lower = np.logical_or(bounds_min >= lower, bounds_max >= lower) + select_upper = np.logical_or(bounds_min <= upper, bounds_max <= upper) + select = np.logical_and(select_lower, select_upper) + if select.any() == False: - raise(EmptySubsetError(origin=self.name)) - + raise (EmptySubsetError(origin=self.name)) + ret = self[select] - + if return_indices: indices = np.arange(select.shape[0]) - ret = (ret,indices[select]) - - return(ret) + ret = (ret, indices[select]) + + return ret def get_iter(self, with_bounds=True): ref_value, ref_bounds = self._get_iter_value_bounds_() @@ -438,42 +438,42 @@ def write_to_netcdf_dataset(self, dataset, unlimited=False, bounds_dimension_nam def _format_private_value_(self, value): value = self._get_none_or_array_(value, masked=False) return value - - def _format_slice_state_(self,state,slc): - state.bounds = get_none_or_slice(state._bounds,(slc,slice(None))) - return(state) - - def _format_src_idx_(self,value): - return(self._get_none_or_array_(value)) - + + def _format_slice_state_(self, state, slc): + state.bounds = get_none_or_slice(state._bounds, (slc, slice(None))) + return (state) + + def _format_src_idx_(self, value): + return (self._get_none_or_array_(value)) + def _get_iter_value_bounds_(self): - return(self.value,self.bounds) - + return (self.value, self.bounds) + def _get_uid_(self): if self._value is not None: shp = self._value.shape[0] else: shp = self._src_idx.shape[0] - ret = np.arange(1,shp+1,dtype=constants.NP_INT) + ret = np.arange(1, shp + 1, dtype=constants.NP_INT) ret = np.atleast_1d(ret) - return(ret) - + return (ret) + def _set_value_from_source_(self): if self._value is None: - raise(NotImplementedError) + raise (NotImplementedError) else: self._value = self._value - + def _validate_bounds_(self): - ## bounds must be two-dimensional + # # bounds must be two-dimensional if self._bounds.shape[1] != 2: - raise(ValueError('Bounds array must be two-dimensional.')) - ## bounds and value arrays must have matching data types. if they do + raise (ValueError('Bounds array must be two-dimensional.')) + # # bounds and value arrays must have matching data types. if they do ## not match, attempt to cast the bounds. try: - assert(self._bounds.dtype == self._value.dtype) + assert (self._bounds.dtype == self._value.dtype) except AssertionError: try: - self._bounds = np.array(self._bounds,dtype=self._value.dtype) + self._bounds = np.array(self._bounds, dtype=self._value.dtype) except: - raise(ValueError('Value and bounds data types do not match and types could not be casted.')) + raise (ValueError('Value and bounds data types do not match and types could not be casted.')) diff --git a/src/ocgis/interface/base/dimension/spatial.py b/src/ocgis/interface/base/dimension/spatial.py index f04969dcf..609316329 100644 --- a/src/ocgis/interface/base/dimension/spatial.py +++ b/src/ocgis/interface/base/dimension/spatial.py @@ -15,7 +15,7 @@ import base from ocgis.interface.base.crs import CFWGS84, CoordinateReferenceSystem, WGS84 -from ocgis.util.helpers import iter_array, get_formatted_slice, get_reduced_slice, get_trimmed_array_by_mask,\ +from ocgis.util.helpers import iter_array, get_formatted_slice, get_reduced_slice, get_trimmed_array_by_mask, \ get_added_slice, make_poly, set_name_attributes, get_extrapolated_corners_esmf, get_ocgis_corners_from_esmf_corners, \ get_none_or_2d from ocgis import constants, env @@ -45,7 +45,7 @@ class SingleElementRetriever(object): def __init__(self, sdim): try: - assert(sdim.shape == (1, 1)) + assert (sdim.shape == (1, 1)) except AssertionError: raise MultipleElementsFound(sdim) self.sdim = sdim @@ -96,7 +96,7 @@ def __init__(self, *args, **kwargs): kwargs['name'] = kwargs.get('name') or 'spatial' kwargs['name_uid'] = kwargs.get('name_uid') or 'gid' - ## attempt to build the geometry dimension + # # attempt to build the geometry dimension point = kwargs.pop('point', None) polygon = kwargs.pop('polygon', None) geom_kwds = dict(point=point, polygon=polygon) @@ -142,7 +142,7 @@ def grid(self): @grid.setter def grid(self, value): if value is not None: - assert(isinstance(value, SpatialGridDimension)) + assert (isinstance(value, SpatialGridDimension)) self._grid = value @property @@ -304,11 +304,12 @@ def from_records(cls, records, crs=None): return sdim def get_clip(self, polygon, return_indices=False, use_spatial_index=True, select_nearest=False): - assert(type(polygon) in (Polygon, MultiPolygon)) + assert (type(polygon) in (Polygon, MultiPolygon)) - ret, slc = self.get_intersects(polygon, return_indices=True, use_spatial_index=use_spatial_index, select_nearest=select_nearest) + ret, slc = self.get_intersects(polygon, return_indices=True, use_spatial_index=use_spatial_index, + select_nearest=select_nearest) - ## clipping with points is okay... + # # clipping with points is okay... if ret.geom.polygon is not None: ref_value = ret.geom.polygon.value else: @@ -319,7 +320,7 @@ def get_clip(self, polygon, return_indices=False, use_spatial_index=True, select if return_indices: ret = (ret, slc) - return(ret) + return (ret) def get_fiona_schema(self): """ @@ -329,11 +330,11 @@ def get_fiona_schema(self): fproperties = OrderedDict() if self.properties is not None: - from ocgis.conv.fiona_ import FionaConverter + from ocgis.conv.fiona_ import AbstractFionaConverter dtype = self.properties.dtype for idx, name in enumerate(dtype.names): - fproperties[name] = FionaConverter.get_field_type(dtype[idx]) + fproperties[name] = AbstractFionaConverter.get_field_type(dtype[idx]) schema = {'geometry': self.abstraction_geometry.geom_type, 'properties': fproperties} return schema @@ -423,7 +424,8 @@ def get_intersects(self, polygon, return_indices=False, use_spatial_index=True, use_spatial_index=use_spatial_index) grid_mask = ret.geom.polygon.value.mask else: - ret._geom._point = ret.geom.point.get_intersects_masked(polygon, use_spatial_index=use_spatial_index) + ret._geom._point = ret.geom.point.get_intersects_masked(polygon, + use_spatial_index=use_spatial_index) grid_mask = ret.geom.point.value.mask assert not self.uid.mask.any() ret.grid.value.unshare_mask() @@ -520,7 +522,7 @@ def unwrap(self): except AttributeError: if self.crs is None or self.crs != WGS84(): msg = 'Only WGS84 coordinate systems may be unwrapped.' - raise(SpatialWrappingError(msg)) + raise (SpatialWrappingError(msg)) def update_crs(self, to_crs): """ @@ -584,23 +586,23 @@ def wrap(self): except AttributeError: if self.crs is None or self.crs != WGS84(): msg = 'Only WGS84 coordinate systems may be wrapped.' - raise(SpatialWrappingError(msg)) + raise (SpatialWrappingError(msg)) - def write_fiona(self,path,target='polygon',driver='ESRI Shapefile'): - attr = getattr(self.geom,target) - attr.write_fiona(path,self.crs.value,driver=driver) - return(path) + def write_fiona(self, path, target='polygon', driver='ESRI Shapefile'): + attr = getattr(self.geom, target) + attr.write_fiona(path, self.crs.value, driver=driver) + return (path) - def _format_uid_(self,value): - return(np.atleast_2d(value)) + def _format_uid_(self, value): + return (np.atleast_2d(value)) - def _get_sliced_properties_(self,slc): + def _get_sliced_properties_(self, slc): if self.properties is not None: - ## determine major axis + # # determine major axis major = self.shape.index(max(self.shape)) - return(self.properties[slc[major]]) + return (self.properties[slc[major]]) else: - return(None) + return (None) def _get_uid_(self): if self._geom is not None: @@ -661,13 +663,13 @@ def __init__(self, *args, **kwargs): name_mapping = {self.row: 'yc', self.col: 'xc'} set_name_attributes(name_mapping) - def __getitem__(self,slc): - slc = get_formatted_slice(slc,2) + def __getitem__(self, slc): + slc = get_formatted_slice(slc, 2) uid = self.uid[slc] if self._value is not None: - value = self._value[:,slc[0],slc[1]] + value = self._value[:, slc[0], slc[1]] else: value = None @@ -688,7 +690,7 @@ def __getitem__(self,slc): ret.row = row ret.col = col - return(ret) + return (ret) @property def corners(self): @@ -707,7 +709,7 @@ def corners(self): elif self.row.bounds is None or self.col.bounds is None: pass else: - fill = np.zeros([2]+list(self.shape)+[4], dtype=self.row.value.dtype) + fill = np.zeros([2] + list(self.shape) + [4], dtype=self.row.value.dtype) col_bounds = self.col.bounds row_bounds = self.row.bounds for ii, jj in itertools.product(range(self.shape[0]), range(self.shape[1])): @@ -743,7 +745,7 @@ def corners_esmf(self): range_col = range(self.shape[1]) _corners = self.corners for ii, jj in itertools.product(range_row, range_col): - ref = fill[:, ii:ii+2, jj:jj+2] + ref = fill[:, ii:ii + 2, jj:jj + 2] ref[:, 0, 0] = _corners[:, ii, jj, 0] ref[:, 0, 1] = _corners[:, ii, jj, 1] ref[:, 1, 1] = _corners[:, ii, jj, 2] @@ -759,10 +761,10 @@ def extent(self): maxx = self.corners[1].max() maxy = self.corners[0].max() else: - minx = self.value[1,:,:].min() - miny = self.value[0,:,:].min() - maxx = self.value[1,:,:].max() - maxy = self.value[0,:,:].max() + minx = self.value[1, :, :].min() + miny = self.value[0, :, :].min() + maxx = self.value[1, :, :].max() + maxy = self.value[0, :, :].max() else: if self.row.bounds is None: minx = self.col.value.min() @@ -784,14 +786,14 @@ def extent_polygon(self): @property def resolution(self): try: - ret = np.mean([self.row.resolution,self.col.resolution]) + ret = np.mean([self.row.resolution, self.col.resolution]) except AttributeError: - resolution_limit = int(constants.RESOLUTION_LIMIT)/2 - r_value = self.value[:,0:resolution_limit,0:resolution_limit] - rows = np.mean(np.diff(r_value[0,:,:],axis=0)) - cols = np.mean(np.diff(r_value[1,:,:],axis=1)) - ret = np.mean([rows,cols]) - return(ret) + resolution_limit = int(constants.RESOLUTION_LIMIT) / 2 + r_value = self.value[:, 0:resolution_limit, 0:resolution_limit] + rows = np.mean(np.diff(r_value[0, :, :], axis=0)) + cols = np.mean(np.diff(r_value[1, :, :], axis=1)) + ret = np.mean([rows, cols]) + return (ret) @property def shape(self): @@ -802,16 +804,16 @@ def shape(self): ret = (self.uid.shape[0], self.uid.shape[1]) return ret - def get_subset_bbox(self,min_col,min_row,max_col,max_row,return_indices=False,closed=True, + def get_subset_bbox(self, min_col, min_row, max_col, max_row, return_indices=False, closed=True, use_bounds=True): - assert(min_row <= max_row) - assert(min_col <= max_col) + assert (min_row <= max_row) + assert (min_col <= max_col) if self.row is None: - r_row = self.value[0,:,:] - real_idx_row = np.arange(0,r_row.shape[0]) - r_col = self.value[1,:,:] - real_idx_col = np.arange(0,r_col.shape[1]) + r_row = self.value[0, :, :] + real_idx_row = np.arange(0, r_row.shape[0]) + r_col = self.value[1, :, :] + real_idx_col = np.arange(0, r_col.shape[1]) if closed: lower_row = r_row > min_row @@ -824,44 +826,46 @@ def get_subset_bbox(self,min_col,min_row,max_col,max_row,return_indices=False,cl lower_col = r_col >= min_col upper_col = r_col <= max_col - idx_row = np.logical_and(lower_row,upper_row) - idx_col = np.logical_and(lower_col,upper_col) + idx_row = np.logical_and(lower_row, upper_row) + idx_col = np.logical_and(lower_col, upper_col) - keep_row = np.any(idx_row,axis=1) - keep_col = np.any(idx_col,axis=0) + keep_row = np.any(idx_row, axis=1) + keep_col = np.any(idx_col, axis=0) - ## slice reduction may fail due to empty bounding box returns. catch - ## these value errors and repurpose as subset errors. + # # slice reduction may fail due to empty bounding box returns. catch + # # these value errors and repurpose as subset errors. try: row_slc = get_reduced_slice(real_idx_row[keep_row]) except ValueError: if real_idx_row[keep_row].shape[0] == 0: - raise(EmptySubsetError(origin='Y')) + raise (EmptySubsetError(origin='Y')) else: raise try: col_slc = get_reduced_slice(real_idx_col[keep_col]) except ValueError: if real_idx_col[keep_col].shape[0] == 0: - raise(EmptySubsetError(origin='X')) + raise (EmptySubsetError(origin='X')) else: raise - new_mask = np.invert(np.logical_or(idx_row,idx_col)[row_slc,col_slc]) + new_mask = np.invert(np.logical_or(idx_row, idx_col)[row_slc, col_slc]) else: - new_row,row_indices = self.row.get_between(min_row,max_row,return_indices=True,closed=closed,use_bounds=use_bounds) - new_col,col_indices = self.col.get_between(min_col,max_col,return_indices=True,closed=closed,use_bounds=use_bounds) + new_row, row_indices = self.row.get_between(min_row, max_row, return_indices=True, closed=closed, + use_bounds=use_bounds) + new_col, col_indices = self.col.get_between(min_col, max_col, return_indices=True, closed=closed, + use_bounds=use_bounds) row_slc = get_reduced_slice(row_indices) col_slc = get_reduced_slice(col_indices) - ret = self[row_slc,col_slc] + ret = self[row_slc, col_slc] try: - grid_mask = np.zeros((2,new_mask.shape[0],new_mask.shape[1]),dtype=bool) - grid_mask[:,:,:] = new_mask - ret._value = np.ma.array(ret._value,mask=grid_mask) - ret.uid = np.ma.array(ret.uid,mask=new_mask) + grid_mask = np.zeros((2, new_mask.shape[0], new_mask.shape[1]), dtype=bool) + grid_mask[:, :, :] = new_mask + ret._value = np.ma.array(ret._value, mask=grid_mask) + ret.uid = np.ma.array(ret.uid, mask=new_mask) except UnboundLocalError: if self.row is not None: pass @@ -869,9 +873,9 @@ def get_subset_bbox(self,min_col,min_row,max_col,max_row,return_indices=False,cl raise if return_indices: - ret = (ret,(row_slc,col_slc)) + ret = (ret, (row_slc, col_slc)) - return(ret) + return (ret) def set_extrapolated_corners(self): """ @@ -885,7 +889,7 @@ def set_extrapolated_corners(self): else: data = self.value.data corners_esmf = get_extrapolated_corners_esmf(data[0]) - corners_esmf.resize(*list([2]+list(corners_esmf.shape))) + corners_esmf.resize(*list([2] + list(corners_esmf.shape))) corners_esmf[1, :, :] = get_extrapolated_corners_esmf(data[1]) corners = get_ocgis_corners_from_esmf_corners(corners_esmf) @@ -1032,7 +1036,7 @@ def shape(self): ret = self.polygon.shape else: ret = self.point.shape - return(ret) + return (ret) def get_highest_order_abstraction(self): """ @@ -1192,20 +1196,20 @@ def update_crs(self, to_crs, from_crs): ogr_geom.TransformTo(to_sr) r_value[idx_row, idx_col] = r_loads(ogr_geom.ExportToWkb()) - def write_fiona(self,path,crs,driver='ESRI Shapefile'): - schema = {'geometry':self.geom_type, - 'properties':{'UGID':'int'}} + def write_fiona(self, path, crs, driver='ESRI Shapefile'): + schema = {'geometry': self.geom_type, + 'properties': {'UGID': 'int'}} ref_prep = self._write_fiona_prep_geom_ ref_uid = self.uid - with fiona.open(path,'w',driver=driver,crs=crs,schema=schema) as f: - for (ii,jj),geom in iter_array(self.value,return_value=True): + with fiona.open(path, 'w', driver=driver, crs=crs, schema=schema) as f: + for (ii, jj), geom in iter_array(self.value, return_value=True): geom = ref_prep(geom) - uid = int(ref_uid[ii,jj]) - feature = {'properties':{'UGID':uid},'geometry':mapping(geom)} + uid = int(ref_uid[ii, jj]) + feature = {'properties': {'UGID': uid}, 'geometry': mapping(geom)} f.write(feature) - return(path) + return (path) @staticmethod def _write_fiona_prep_geom_(geom): @@ -1290,7 +1294,7 @@ def area(self): @property def weights(self): - return self.area/self.area.max() + return self.area / self.area.max() def write_to_netcdf_dataset_ugrid(self, dataset): """ diff --git a/src/ocgis/interface/base/field.py b/src/ocgis/interface/base/field.py index 0e4d6f851..8cc298959 100644 --- a/src/ocgis/interface/base/field.py +++ b/src/ocgis/interface/base/field.py @@ -11,6 +11,7 @@ from shapely.geometry.multipolygon import MultiPolygon from shapely.geometry.point import Point +from ocgis import constants from ocgis.constants import NAME_DIMENSION_REALIZATION, NAME_DIMENSION_LEVEL, NAME_DIMENSION_TEMPORAL, \ NAME_UID_DIMENSION_TEMPORAL, NAME_UID_DIMENSION_LEVEL, NAME_UID_DIMENSION_REALIZATION, NAME_UID_FIELD from ocgis.interface.base.attributes import Attributes @@ -155,13 +156,14 @@ def variables(self, value): if v._value is not None: assert v._value.shape == self.shape - def as_spatial_collection(self): + def as_spatial_collection(self, **kwargs): """ + :param kwargs: Keyword arguments for creating the :class:`~ocgis.SpatialCollection`. :returns: A spatial collection containing the field. :rtype: :class:`~ocgis.SpatialCollection` """ - coll = SpatialCollection() + coll = SpatialCollection(**kwargs) # if there are no vector dimensions, there is no need for a melted representation coll.add_field(1, None, self, properties=self.spatial.properties, name=self.name) return coll @@ -184,16 +186,24 @@ def get_intersects(self, polygon, use_spatial_index=True, select_nearest=False): return(self._get_spatial_operation_('get_intersects', polygon, use_spatial_index=use_spatial_index, select_nearest=select_nearest)) - def get_iter(self, add_masked_value=True, value_keys=None, melted=True): + def get_iter(self, add_masked_value=True, value_keys=None, melted=True, use_upper_keys=False, headers=None, + ugid=None): """ :param bool add_masked_value: If ``False``, do not yield masked variable values. :param value_keys: A sequence of keys if the variable is a structure array. :type value_keys: [str, ...] :param bool melted: If ``True``, do not use a melted format but place variable values as columns. - :returns: A dictionary containing variable values. - :rtype: dict + :param bool use_upper_keys: If ``True``, capitalize the keys of the yielded dictionary. + :param headers: A sequence of strings to limit the output data dictionary. + :type headers: [str, ...] + :param int ugid: If provided, insert a unique key for the selection geometry. + :returns: A tuple with the first element being a shapely geometry object and the second element a + dictionary. + :rtype: tuple(:class:`shapely.geometry.base.BaseGeometry`, dict) """ + id_selection_geometry = constants.HEADERS.ID_SELECTION_GEOMETRY + def _get_dimension_iterator_1d_(target): attr = getattr(self, target) if attr is None: @@ -206,6 +216,15 @@ def _get_dimension_iterator_1d_(target): ret = attr.get_iter(with_bounds=with_bounds) return ret + def _process_yield_(g, dict_to_yld): + if ugid is not None: + dict_to_yld[id_selection_geometry] = ugid + if headers is not None: + dict_to_yld = OrderedDict([(k, dict_to_yld.get(k)) for k in headers]) + if use_upper_keys: + dict_to_yld = OrderedDict([(k.upper(), v) for k, v in dict_to_yld.iteritems()]) + return g, dict_to_yld + is_masked = np.ma.is_masked # value keys occur when the value array is in fact a structured array with field definitions. this occurs with @@ -240,7 +259,6 @@ def _get_dimension_iterator_1d_(target): to_yld.update(l) # add geometries to the output - to_yld['geom'] = geom to_yld[r_gid_name] = gid # the target value is a structure array, multiple value elements need to be added. these outputs do @@ -254,10 +272,10 @@ def _get_dimension_iterator_1d_(target): # attempt to access the data directly. masked determination is done above. except ValueError: to_yld[vk] = ref_idx.data[vk][ii] - yield (to_yld) + yield _process_yield_(geom, to_yld) else: to_yld['value'] = ref_idx - yield to_yld + yield _process_yield_(geom, to_yld) else: iters = map(_get_dimension_iterator_1d_, ['realization', 'temporal', 'level']) iters.append(self.spatial.get_geom_iter()) @@ -266,14 +284,13 @@ def _get_dimension_iterator_1d_(target): yld = OrderedDict() for element in [rlz, t, l]: yld.update(element) - yld['geom'] = geom for variable_alias, variable in self.variables.iteritems(): ref_idx = variable.value[ridx, tidx, lidx, sridx, scidx] # determine if the data is masked if is_masked(ref_idx): ref_idx = variable.value.fill_value yld[variable_alias] = ref_idx - yield yld + yield _process_yield_(geom, yld) def get_shallow_copy(self): return copy(self) @@ -378,7 +395,8 @@ def iter(self): yld[variable.alias] = value yield yld - def write_fiona(self, path=None, driver='ESRI Shapefile', melted=False, fobject=None): + def write_fiona(self, path=None, driver='ESRI Shapefile', melted=False, fobject=None, use_upper_keys=False, + headers=None, ugid=None): """ Write a ``fiona``-enabled format. This may go to a newly created location specified by ``path`` or an open collection object set by ``fobject``. @@ -388,39 +406,76 @@ def write_fiona(self, path=None, driver='ESRI Shapefile', melted=False, fobject= :param bool melted: If ``True``, use a melted iterator. :param fobject: The collection object to write to. This will overload ``path``. :type fobject: :class:`fiona.collection.Collection` + :param use_upper_keys: See :meth:`ocgis.interface.base.field.Field.get_iter`. + :param headers: See :meth:`ocgis.interface.base.field.Field.get_iter`. + :param ugid: See :meth:`ocgis.interface.base.field.Field.get_iter`. """ + # if a collection is passed in, do not close it when done writing + should_close = True if fobject is None else False + build = True - geom_type = self.spatial.abstraction_geometry.geom_type try: - for row in self.get_iter(melted=melted): - geom = row.pop('geom') + for geom, row in self.get_iter(melted=melted, use_upper_keys=use_upper_keys, headers=headers, ugid=ugid): for k, v in row.iteritems(): try: row[k] = v.tolist() except AttributeError: continue if build: - from ocgis.conv.fiona_ import FionaConverter - fproperties = OrderedDict() - fconvert = {} - for k, v in row.iteritems(): - ftype = FionaConverter.get_field_type(type(v)) - fproperties[k] = 'int' if ftype is None else ftype - if ftype == 'str': - fconvert[k] = str + fdict = self.get_fiona_dict(self, row) + fconvert = fdict['fconvert'] if fobject is None: - schema = {'geometry': geom_type, 'properties': fproperties} - fobject = fiona.open(path, driver=driver, schema=schema, crs=self.spatial.crs.value, mode='w') + fobject = fiona.open(path, driver=driver, schema=fdict['schema'], crs=fdict['crs'], mode='w') build = False for k, v in fconvert.iteritems(): row[k] = v(row[k]) frow = {'properties': row, 'geometry': mapping(geom)} fobject.write(frow) finally: - if fobject is not None: + if should_close and fobject is not None: fobject.close() + @staticmethod + def get_fiona_dict(field, arch): + """ + :param field: The field object. + :type field: :class:`ocgis.Field` + :param dict arch: An archetype data dictionary. + :returns: A dictionary with fiona types and conversion mappings. + :rtype: dict + """ + + arch = arch.copy() + for k, v in arch.iteritems(): + try: + arch[k] = v.tolist() + except AttributeError: + continue + + from ocgis.conv.fiona_ import AbstractFionaConverter + + ret = {} + fproperties = OrderedDict() + fconvert = {} + for k, v in arch.iteritems(): + ftype = AbstractFionaConverter.get_field_type(type(v)) + fproperties[k] = 'int' if ftype is None else ftype + if ftype == 'str': + fconvert[k] = str + schema = {'geometry': field.spatial.abstraction_geometry.geom_type, 'properties': fproperties} + + try: + ret['crs'] = field.spatial.crs.value + except AttributeError: + if field.spatial.crs is None: + msg = 'A coordinate system is required when writing to fiona formats.' + raise ValueError(msg) + + ret['schema'] = schema + ret['fconvert'] = fconvert + return ret + def write_to_netcdf_dataset(self, dataset, file_only=False, **kwargs): """ Write the field object to an open netCDF dataset object. diff --git a/src/ocgis/interface/base/variable.py b/src/ocgis/interface/base/variable.py index 9adb087fb..d4f258aa8 100644 --- a/src/ocgis/interface/base/variable.py +++ b/src/ocgis/interface/base/variable.py @@ -28,7 +28,7 @@ class AbstractValueVariable(Attributes): __metaclass__ = abc.ABCMeta _value = None _conform_units_to = None - + def __init__(self, value=None, units=None, dtype=None, fill_value=None, name=None, conform_units_to=None, alias=None, attrs=None): self.name = name @@ -48,6 +48,7 @@ def __init__(self, value=None, units=None, dtype=None, fill_value=None, name=Non def cfunits(self): # the cfunits-python module is not a dependency of ocgis and should be imported on demand from cfunits import Units + return Units(self.units) def _conform_units_to_getter_(self): @@ -56,6 +57,7 @@ def _conform_units_to_getter_(self): def _conform_units_to_setter_(self, value): if value is not None: from cfunits import Units + if not isinstance(value, Units): value = Units(value) self._conform_units_to = value @@ -72,22 +74,22 @@ def dtype(self): else: ret = self._dtype return ret - + @property def fill_value(self): if self._fill_value is None: if self._value is None: - raise(ValueError('fill_value not specified at object initialization and value has not been loaded.')) + raise (ValueError('fill_value not specified at object initialization and value has not been loaded.')) else: ret = self.value.fill_value else: ret = self._fill_value return ret - + @property def shape(self): return self.value.shape - + @property def value(self): if self._value is None: @@ -158,7 +160,7 @@ def cfunits_conform(self, to_units, value=None, from_units=None): class AbstractSourcedVariable(object): __metaclass__ = abc.ABCMeta - + def __init__(self, data, src_idx): self._data = data self._src_idx = src_idx @@ -170,7 +172,7 @@ def _src_idx(self): @_src_idx.setter def _src_idx(self, value): self.__src_idx = self._format_src_idx_(value) - + def _format_src_idx_(self, value): return np.array(value) @@ -178,7 +180,7 @@ def _get_value_(self): if self._value is None: self._set_value_from_source_() return self._value - + @abc.abstractmethod def _set_value_from_source_(self): """Should set ``_value`` using the data source and index.""" @@ -225,8 +227,17 @@ def __init__(self, name=None, alias=None, units=None, meta=None, uid=None, value def __getitem__(self, slc): ret = copy(self) if ret._value is not None: + # store the previous number of dimension to ensure this does not change following a slice + prev_ndim = ret._value.ndim ret._value = self._value[slc] - return (ret) + if prev_ndim != ret._value.ndim: + # if the number of dimensions has changed but they are all singleton, add one back in. + if all([xx == 1 for xx in ret._value.shape]): + ret._value = ret._value.reshape(*[1] * prev_ndim) + else: + msg = 'Array has changed shaped following slicing.' + raise IndexError(msg) + return ret def __str__(self): units = '{0}' if self.units is None else '"{0}"' @@ -300,26 +311,25 @@ def _set_value_from_source_(self): self._value = self._field._get_value_from_source_(self._data, self.name) # ensure the new value has the geometry masked applied self._field._set_new_value_mask_(self._field, self._field.spatial.get_mask()) - - + + class VariableCollection(AbstractCollection): - def __init__(self, variables=None): super(VariableCollection, self).__init__() if variables is not None: for variable in get_iter(variables, dtype=Variable): self.add_variable(variable) - + def add_variable(self, variable, assign_new_uid=False): """ :param :class:`ocgis.interface.base.variable.Variable` : :param bool assign_new_uid: If ``True``, assign a new unique identifier to the incoming variable. This will modify the variable inplace. """ - assert(isinstance(variable, Variable)) + assert (isinstance(variable, Variable)) try: - assert(variable.alias not in self) + assert (variable.alias not in self) except AssertionError: raise VariableInCollectionError(variable) @@ -329,10 +339,10 @@ def add_variable(self, variable, assign_new_uid=False): if variable.uid is None: variable.uid = self._storage_id_next else: - assert(variable.uid not in self._storage_id) + assert (variable.uid not in self._storage_id) self._storage_id.append(variable.uid) self.update({variable.alias: variable}) - + def get_sliced_variables(self, slc): variables = [v.__getitem__(slc) for v in self.itervalues()] ret = VariableCollection(variables=variables) diff --git a/src/ocgis/test/base.py b/src/ocgis/test/base.py index 4581ee2b7..7f72706b1 100644 --- a/src/ocgis/test/base.py +++ b/src/ocgis/test/base.py @@ -90,7 +90,7 @@ def assertDictEqual(self, d1, d2, msg=None): msg = 'Issue with key "{0}". Values are {1}.'.format(k, (v, d2[k])) except KeyError: msg = 'The key "{0}" was not found in the second dictionary.'.format(k) - raise KeyError(msg) + raise AssertionError(msg) self.assertEqual(v, d2[k], msg=msg) self.assertEqual(set(d1.keys()), set(d2.keys())) @@ -198,7 +198,7 @@ def assertNcEqual(self, uri_src, uri_dest, check_types=True, close=False, metada continue # notify if an attribute is missing - msg = 'The attribute "{0}" is not found on the variable "{1}" for URI "{2}".'\ + msg = 'The attribute "{0}" is not found on the variable "{1}" for URI "{2}".' \ .format(k, dvar._name, uri_dest) raise AttributeError(msg) try: @@ -217,7 +217,7 @@ def assertNcEqual(self, uri_src, uri_dest, check_types=True, close=False, metada continue # notify if an attribute is missing - msg = 'The attribute "{0}" is not found on the variable "{1}" for URI "{2}".'\ + msg = 'The attribute "{0}" is not found on the variable "{1}" for URI "{2}".' \ .format(k, var._name, uri_src) raise AttributeError(msg) try: @@ -345,13 +345,13 @@ def get_field(self, nlevel=None, nrlz=None, crs=None): nlevel = 1 level = None else: - level = VectorDimension(value=range(1, nlevel+1), name='level') + level = VectorDimension(value=range(1, nlevel + 1), name='level') if nrlz is None: nrlz = 1 realization = None else: - realization = VectorDimension(value=range(1, nrlz+1), name='realization') + realization = VectorDimension(value=range(1, nrlz + 1), name='realization') variable = Variable(name='foo', value=np.random.rand(nrlz, 2, nlevel, 2, 2)) field = Field(spatial=sdim, temporal=temporal, variables=variable, level=level, realization=realization) @@ -424,30 +424,58 @@ def get_tst_data(): test_data = TestData() test_data.update(['nc', 'CMIP3'], 'Tavg', 'Extraction_Tavg.nc', key='cmip3_extraction') - test_data.update(['nc', 'CanCM4'], 'rhs', 'rhs_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', key='cancm4_rhs') - test_data.update(['nc', 'CanCM4'], 'rhsmax', 'rhsmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', key='cancm4_rhsmax') - test_data.update(['nc', 'CanCM4'], 'tas', 'tas_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', key='cancm4_tas') - test_data.update(['nc', 'CanCM4'], 'tasmax', 'tasmax_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', key='cancm4_tasmax_2001') - test_data.update(['nc', 'CanCM4'], 'tasmax', 'tasmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', key='cancm4_tasmax_2011') - test_data.update(['nc', 'CanCM4'], 'tasmin', 'tasmin_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', key='cancm4_tasmin_2001') + test_data.update(['nc', 'CanCM4'], 'rhs', 'rhs_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', + key='cancm4_rhs') + test_data.update(['nc', 'CanCM4'], 'rhsmax', 'rhsmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', + key='cancm4_rhsmax') + test_data.update(['nc', 'CanCM4'], 'tas', 'tas_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', + key='cancm4_tas') + test_data.update(['nc', 'CanCM4'], 'tasmax', 'tasmax_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', + key='cancm4_tasmax_2001') + test_data.update(['nc', 'CanCM4'], 'tasmax', 'tasmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', + key='cancm4_tasmax_2011') + test_data.update(['nc', 'CanCM4'], 'tasmin', 'tasmin_day_CanCM4_decadal2000_r2i1p1_20010101-20101231.nc', + key='cancm4_tasmin_2001') test_data.update(['nc', 'daymet'], 'tmax', 'tmax.nc', key='daymet_tmax') - test_data.update(['nc', 'maurer', '2010'], 'pr', ['nldas_met_update.obs.daily.pr.1990.nc', 'nldas_met_update.obs.daily.pr.1991.nc'], key='maurer_2010_pr') - test_data.update(['nc', 'maurer', '2010'], 'tas', ['nldas_met_update.obs.daily.tas.1990.nc', 'nldas_met_update.obs.daily.tas.1991.nc'], key='maurer_2010_tas') - test_data.update(['nc', 'maurer', '2010'], 'tasmax', ['nldas_met_update.obs.daily.tasmax.1990.nc', 'nldas_met_update.obs.daily.tasmax.1991.nc'], key='maurer_2010_tasmax') - test_data.update(['nc', 'maurer', '2010'], 'tasmin', ['nldas_met_update.obs.daily.tasmin.1990.nc', 'nldas_met_update.obs.daily.tasmin.1991.nc'], key='maurer_2010_tasmin') - test_data.update(['nc', 'maurer', '2010-concatenated'], 'tasmax', 'Maurer02new_OBS_tasmax_daily.1971-2000.nc', key='maurer_2010_concatenated_tasmax') - test_data.update(['nc', 'maurer', '2010-concatenated'], 'tasmin', 'Maurer02new_OBS_tasmin_daily.1971-2000.nc', key='maurer_2010_concatenated_tasmin') - test_data.update(['nc', 'maurer', '2010-concatenated'], 'tas', 'Maurer02new_OBS_tas_daily.1971-2000.nc', key='maurer_2010_concatenated_tas') - test_data.update(['nc', 'maurer', '2010-concatenated'], 'pr', 'Maurer02new_OBS_pr_daily.1971-2000.nc', key='maurer_2010_concatenated_pr') - test_data.update(['nc', 'maurer', 'bcca'], 'tasmax', 'gridded_obs.tasmax.OBS_125deg.daily.1991.nc', key='maurer_bcca_1991') - test_data.update(['nc', 'maurer', 'bccr'], 'Prcp', 'bccr_bcm2_0.1.sresa1b.monthly.Prcp.1950.nc', key='maurer_bccr_1950') + test_data.update(['nc', 'maurer', '2010'], 'pr', + ['nldas_met_update.obs.daily.pr.1990.nc', 'nldas_met_update.obs.daily.pr.1991.nc'], + key='maurer_2010_pr') + test_data.update(['nc', 'maurer', '2010'], 'tas', + ['nldas_met_update.obs.daily.tas.1990.nc', 'nldas_met_update.obs.daily.tas.1991.nc'], + key='maurer_2010_tas') + test_data.update(['nc', 'maurer', '2010'], 'tasmax', + ['nldas_met_update.obs.daily.tasmax.1990.nc', 'nldas_met_update.obs.daily.tasmax.1991.nc'], + key='maurer_2010_tasmax') + test_data.update(['nc', 'maurer', '2010'], 'tasmin', + ['nldas_met_update.obs.daily.tasmin.1990.nc', 'nldas_met_update.obs.daily.tasmin.1991.nc'], + key='maurer_2010_tasmin') + test_data.update(['nc', 'maurer', '2010-concatenated'], 'tasmax', 'Maurer02new_OBS_tasmax_daily.1971-2000.nc', + key='maurer_2010_concatenated_tasmax') + test_data.update(['nc', 'maurer', '2010-concatenated'], 'tasmin', 'Maurer02new_OBS_tasmin_daily.1971-2000.nc', + key='maurer_2010_concatenated_tasmin') + test_data.update(['nc', 'maurer', '2010-concatenated'], 'tas', 'Maurer02new_OBS_tas_daily.1971-2000.nc', + key='maurer_2010_concatenated_tas') + test_data.update(['nc', 'maurer', '2010-concatenated'], 'pr', 'Maurer02new_OBS_pr_daily.1971-2000.nc', + key='maurer_2010_concatenated_pr') + test_data.update(['nc', 'maurer', 'bcca'], 'tasmax', 'gridded_obs.tasmax.OBS_125deg.daily.1991.nc', + key='maurer_bcca_1991') + test_data.update(['nc', 'maurer', 'bccr'], 'Prcp', 'bccr_bcm2_0.1.sresa1b.monthly.Prcp.1950.nc', + key='maurer_bccr_1950') test_data.update(['nc', 'misc', 'month_in_time_units'], 'clt', 'clt.nc', key='clt_month_units') - test_data.update(['nc', 'misc', 'rotated_pole'], 'pr', 'pr_EUR-11_CNRM-CERFACS-CNRM-CM5_historical_r1i1p1_CLMcom-CCLM4-8-17_v1_mon_198101-199012.nc', key='rotated_pole_cnrm_cerfacs') - test_data.update(['nc', 'misc', 'rotated_pole'], 'tas', 'tas_EUR-44_CCCma-CanESM2_rcp85_r1i1p1_SMHI-RCA4_v1_sem_209012-210011.nc', key='rotated_pole_cccma') - test_data.update(['nc', 'misc', 'rotated_pole'], 'tas', 'tas_EUR-44_ICHEC-EC-EARTH_historical_r12i1p1_SMHI-RCA4_v1_day_19710101-19751231.nc', key='rotated_pole_ichec') - test_data.update(['nc', 'misc', 'subset_test'], 'Prcp', 'sresa2.ncar_pcm1.3.monthly.Prcp.RAW.1950-2099.nc', key='subset_test_Prcp') + test_data.update(['nc', 'misc', 'rotated_pole'], 'pr', + 'pr_EUR-11_CNRM-CERFACS-CNRM-CM5_historical_r1i1p1_CLMcom-CCLM4-8-17_v1_mon_198101-199012.nc', + key='rotated_pole_cnrm_cerfacs') + test_data.update(['nc', 'misc', 'rotated_pole'], 'tas', + 'tas_EUR-44_CCCma-CanESM2_rcp85_r1i1p1_SMHI-RCA4_v1_sem_209012-210011.nc', + key='rotated_pole_cccma') + test_data.update(['nc', 'misc', 'rotated_pole'], 'tas', + 'tas_EUR-44_ICHEC-EC-EARTH_historical_r12i1p1_SMHI-RCA4_v1_day_19710101-19751231.nc', + key='rotated_pole_ichec') + test_data.update(['nc', 'misc', 'subset_test'], 'Prcp', 'sresa2.ncar_pcm1.3.monthly.Prcp.RAW.1950-2099.nc', + key='subset_test_Prcp') test_data.update(['nc', 'misc', 'subset_test'], 'Tavg', 'Tavg_bccr_bcm2_0.1.sresa2.nc', key='subset_test_Tavg') - test_data.update(['nc', 'misc', 'subset_test'], 'Tavg', 'sresa2.bccr_bcm2_0.1.monthly.Tavg.RAW.1950-2099.nc', key='subset_test_Tavg_sresa2') + test_data.update(['nc', 'misc', 'subset_test'], 'Tavg', 'sresa2.bccr_bcm2_0.1.monthly.Tavg.RAW.1950-2099.nc', + key='subset_test_Tavg_sresa2') test_data.update(['nc', 'misc', 'subset_test'], 'slp', 'slp.1955.nc', key='subset_test_slp') test_data.update(['nc', 'narccap'], 'pr', 'pr_CRCM_ccsm_1981010103.nc', key='narccap_crcm') test_data.update(['nc', 'narccap'], 'pr', 'pr_CRCM_ccsm_1981010103.nc', key='narccap_polar_stereographic') @@ -455,17 +483,29 @@ def get_tst_data(): test_data.update(['nc', 'narccap'], 'pr', 'pr_RCM3_gfdl_1981010103.nc', key='narccap_rcm3') test_data.update(['nc', 'narccap'], 'pr', 'pr_WRFG_ccsm_1986010103.nc', key='narccap_lambert_conformal') test_data.update(['nc', 'narccap'], 'pr', 'pr_WRFG_ccsm_1986010103.nc', key='narccap_wrfg') - test_data.update(['nc', 'narccap'], 'pr', ['pr_WRFG_ncep_1981010103.nc', 'pr_WRFG_ncep_1986010103.nc'], key='narccap_pr_wrfg_ncep') + test_data.update(['nc', 'narccap'], 'pr', ['pr_WRFG_ncep_1981010103.nc', 'pr_WRFG_ncep_1986010103.nc'], + key='narccap_pr_wrfg_ncep') test_data.update(['nc', 'narccap'], 'tas', 'tas_HRM3_gfdl_1981010103.nc', key='narccap_rotated_pole') test_data.update(['nc', 'narccap'], 'tas', 'tas_RCM3_gfdl_1981010103.nc', key='narccap_tas_rcm3_gfdl') - test_data.update(['nc', 'QED-2013'], 'climatology_TNn_monthly_max', 'climatology_TNn_monthly_max.nc', key='qed_2013_TNn_monthly_max') - test_data.update(['nc', 'QED-2013'], 'climatology_TNn_annual_min', 'climatology_TNn_annual_min.nc', key='qed_2013_TNn_annual_min') - test_data.update(['nc', 'QED-2013'], 'climatology_TasMin_seasonal_max_of_seasonal_means', 'climatology_TasMin_seasonal_max_of_seasonal_means.nc', key='qed_2013_TasMin_seasonal_max_of_seasonal_means') - test_data.update(['nc', 'QED-2013'], 'climatology_Tas_annual_max_of_annual_means', 'climatology_Tas_annual_max_of_annual_means.nc', key='qed_2013_climatology_Tas_annual_max_of_annual_means') - test_data.update(['nc', 'QED-2013', 'multifile'], 'txxmmedm', 'maurer02v2_median_txxmmedm_january_1971-2000.nc', key='qed_2013_maurer02v2_median_txxmmedm_january_1971-2000') - test_data.update(['nc', 'QED-2013', 'multifile'], 'txxmmedm', 'maurer02v2_median_txxmmedm_february_1971-2000.nc', key='qed_2013_maurer02v2_median_txxmmedm_february_1971-2000') - test_data.update(['nc', 'QED-2013', 'multifile'], 'txxmmedm', 'maurer02v2_median_txxmmedm_march_1971-2000.nc', key='qed_2013_maurer02v2_median_txxmmedm_march_1971-2000') - test_data.update(['nc', 'snippets'], 'dtr', 'snippet_Maurer02new_OBS_dtr_daily.1971-2000.nc', key='snippet_maurer_dtr') + test_data.update(['nc', 'QED-2013'], 'climatology_TNn_monthly_max', 'climatology_TNn_monthly_max.nc', + key='qed_2013_TNn_monthly_max') + test_data.update(['nc', 'QED-2013'], 'climatology_TNn_annual_min', 'climatology_TNn_annual_min.nc', + key='qed_2013_TNn_annual_min') + test_data.update(['nc', 'QED-2013'], 'climatology_TasMin_seasonal_max_of_seasonal_means', + 'climatology_TasMin_seasonal_max_of_seasonal_means.nc', + key='qed_2013_TasMin_seasonal_max_of_seasonal_means') + test_data.update(['nc', 'QED-2013'], 'climatology_Tas_annual_max_of_annual_means', + 'climatology_Tas_annual_max_of_annual_means.nc', + key='qed_2013_climatology_Tas_annual_max_of_annual_means') + test_data.update(['nc', 'QED-2013', 'multifile'], 'txxmmedm', 'maurer02v2_median_txxmmedm_january_1971-2000.nc', + key='qed_2013_maurer02v2_median_txxmmedm_january_1971-2000') + test_data.update(['nc', 'QED-2013', 'multifile'], 'txxmmedm', + 'maurer02v2_median_txxmmedm_february_1971-2000.nc', + key='qed_2013_maurer02v2_median_txxmmedm_february_1971-2000') + test_data.update(['nc', 'QED-2013', 'multifile'], 'txxmmedm', 'maurer02v2_median_txxmmedm_march_1971-2000.nc', + key='qed_2013_maurer02v2_median_txxmmedm_march_1971-2000') + test_data.update(['nc', 'snippets'], 'dtr', 'snippet_Maurer02new_OBS_dtr_daily.1971-2000.nc', + key='snippet_maurer_dtr') test_data.update(['nc', 'snippets'], 'bias', 'seasonalbias.nc', key='snippet_seasonalbias') # test_data.update(['shp', 'state_boundaries'], None, 'state_boundaries.shp', key='state_boundaries') @@ -474,6 +514,7 @@ def get_tst_data(): def inspect(self, uri, variable=None): from ocgis.util.inspect import Inspect + print Inspect(uri, variable=None) def iter_product_keywords(self, keywords, as_namedtuple=True): @@ -512,7 +553,6 @@ def tearDown(self): class TestData(OrderedDict): - @property def size(self): """ diff --git a/src/ocgis/test/test_misc/test_conversion.py b/src/ocgis/test/test_misc/test_conversion.py index 29e73d04d..cc6a97084 100644 --- a/src/ocgis/test/test_misc/test_conversion.py +++ b/src/ocgis/test/test_misc/test_conversion.py @@ -1,4 +1,3 @@ -import unittest import netCDF4 as nc import os @@ -9,10 +8,9 @@ class Test(TestBase): - def test_nc_projection_writing(self): rd = self.test_data.get_rd('daymet_tmax') - ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format='nc') + ops = ocgis.OcgOperations(dataset=rd, snippet=True, output_format='nc') ret = ops.execute() ds = nc.Dataset(ret) self.assertTrue('lambert_conformal_conic' in ds.variables) @@ -41,36 +39,31 @@ def test_csv_shp_custom_headers(self): line = f.readline() fheaders = [h.strip() for h in line.split(',')] self.assertEqual(fheaders, [h.upper() for h in headers]) - + def test_shp_custom_headers(self): rd1 = self.test_data.get_rd('cancm4_tasmax_2011') rd2 = self.test_data.get_rd('maurer_bccr_1950') - headers = ['did','ugid','gid','alias','value','time'] - ops = ocgis.OcgOperations(dataset=[rd1,rd2],snippet=True,output_format='shp', - geom='state_boundaries',agg_selection=True, - select_ugid=[32],headers=headers) + headers = ['did', 'ugid', 'gid', 'alias', 'value', 'time'] + ops = ocgis.OcgOperations(dataset=[rd1, rd2], snippet=True, output_format='shp', geom='state_boundaries', + agg_selection=True, select_ugid=[32], headers=headers) ret = ops.execute() - + with fiona.open(ret) as f: - self.assertEqual(f.meta['schema']['properties'].keys(),[h.upper() for h in headers]) - + self.assertEqual(f.meta['schema']['properties'].keys(), [h.upper() for h in headers]) + def test_meta(self): rd = self.test_data.get_rd('cancm4_tasmax_2011') - ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format='meta', - geom='state_boundaries',agg_selection=True) + ops = ocgis.OcgOperations(dataset=rd, snippet=True, output_format='meta', geom='state_boundaries', + agg_selection=True) ret = ops.execute() - self.assertTrue(isinstance(ret,basestring)) - + self.assertTrue(isinstance(ret, basestring)) + def test_meta_with_source(self): rd = self.test_data.get_rd('cancm4_tasmax_2011') - ops = ocgis.OcgOperations(dataset=rd,snippet=True,output_format='csv', - geom='state_boundaries',agg_selection=True) + ops = ocgis.OcgOperations(dataset=rd, snippet=True, output_format='csv', geom='state_boundaries', + agg_selection=True) ret = ops.execute() - with open(os.path.join(os.path.split(ret)[0],'ocgis_output_metadata.txt')) as f: + with open(os.path.join(os.path.split(ret)[0], 'ocgis_output_metadata.txt')) as f: lines = f.readlines() - self.assertEqual(lines[3],'This is OpenClimateGIS-related metadata. Data-level metadata may be found in the file named: ocgis_output_source_metadata.txt\n') - - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() \ No newline at end of file + msg = 'This is OpenClimateGIS-related metadata. Data-level metadata may be found in the file named: ocgis_output_source_metadata.txt\n' + self.assertEqual(lines[3], msg) diff --git a/src/ocgis/test/test_ocgis/test_api/test_collection.py b/src/ocgis/test/test_ocgis/test_api/test_collection.py index 800a5cd78..af21c116c 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_collection.py +++ b/src/ocgis/test/test_ocgis/test_api/test_collection.py @@ -1,20 +1,23 @@ +from collections import OrderedDict import os from copy import copy, deepcopy import numpy as np import fiona from shapely.geometry import Point, shape, MultiPoint +from shapely.geometry.base import BaseGeometry from shapely.geometry.multipolygon import MultiPolygon import datetime from ocgis.api.collection import SpatialCollection, AbstractCollection from ocgis.interface.base.crs import CoordinateReferenceSystem, Spherical from ocgis.test.base import TestBase +from ocgis.util.addict import Dict from ocgis.util.shp_cabinet import ShpCabinet from ocgis import constants from ocgis.calc.library.statistics import Mean from ocgis.interface.base.variable import Variable -from ocgis.interface.base.field import DerivedField, DerivedMultivariateField,\ +from ocgis.interface.base.field import DerivedField, DerivedMultivariateField, \ Field from ocgis.calc.library.math import Divide from ocgis.test.test_ocgis.test_interface.test_base.test_field import AbstractTestField @@ -92,12 +95,11 @@ def test_iter(self): class TestSpatialCollection(AbstractTestField): - def get_collection(self): field = self.get_field(with_value=True) sc = ShpCabinet() meta = sc.get_meta('state_boundaries') - sp = SpatialCollection(meta=meta,key='state_boundaries') + sp = SpatialCollection(meta=meta, key='state_boundaries') for row in sc.iter_geoms('state_boundaries'): sp.add_field(row['properties']['UGID'], row['geom'], field, properties=row['properties']) return sp @@ -121,10 +123,12 @@ def get_collection_for_write_ugeom(self, crs): def test_init(self): sp = self.get_collection() - self.assertEqual(len(sp),51) - self.assertIsInstance(sp.geoms[25],MultiPolygon) - self.assertIsInstance(sp.properties[25],dict) - self.assertEqual(sp[25]['tmax'].variables['tmax'].value.shape,(2, 31, 2, 3, 4)) + self.assertIsInstance(sp, AbstractCollection) + self.assertIsNone(sp.headers) + self.assertEqual(len(sp), 51) + self.assertIsInstance(sp.geoms[25], MultiPolygon) + self.assertIsInstance(sp.properties[25], dict) + self.assertEqual(sp[25]['tmax'].variables['tmax'].value.shape, (2, 31, 2, 3, 4)) def test_calculation_iteration(self): field = self.get_field(with_value=True, month_count=2) @@ -157,7 +161,7 @@ def test_calculation_iteration(self): sp = SpatialCollection(meta=meta, key='state_boundaries', headers=constants.HEADERS_CALC) for row in sc.iter_geoms('state_boundaries'): sp.add_field(row['properties']['UGID'], row['geom'], cfield, properties=row['properties']) - for ii, row in enumerate(sp.get_iter_dict()): + for ii, row in enumerate(sp.get_iter_dict(melted=True)): if ii == 0: self.assertEqual(row[0].bounds, (-100.5, 39.5, -99.5, 40.5)) self.assertDictEqual(row[1], {'lid': 1, 'ugid': 1, 'vid': 1, 'cid': 1, 'did': 1, 'year': 2000, @@ -204,7 +208,7 @@ def test_calculation_iteration_two_calculations(self): sp.add_field(row['properties']['UGID'], row['geom'], cfield, properties=row['properties']) cids = set() - for ii, row in enumerate(sp.get_iter_dict()): + for ii, row in enumerate(sp.get_iter_dict(melted=True)): cids.update([row[1]['cid']]) if ii == 0: self.assertEqual(row[0].bounds, (-100.5, 39.5, -99.5, 40.5)) @@ -218,15 +222,46 @@ def test_calculation_iteration_two_calculations(self): self.assertEqual(ii + 1, 2 * 2 * 2 * 3 * 4 * 51 * 4) self.assertEqual(len(cids), 4) + def test_get_iter_dict(self): + field = self.get_field(with_value=True) + new_var = deepcopy(field.variables.first()) + new_var.alias = 'hi' + field.variables.add_variable(new_var, assign_new_uid=True) + coll = field.as_spatial_collection() + rows = list(coll.get_iter_dict()) + self.assertEqual(len(rows[4]), 2) + self.assertIsInstance(rows[5], tuple) + self.assertEqual(len(rows), 1488) + self.assertEqual(len(list(coll.get_iter_dict(melted=True))), 1488 * 2) + + # test headers applied for non-melted iteration + keywords = dict(melted=[False, True], + use_upper_keys=['NULL', False, True]) + for k in self.iter_product_keywords(keywords): + headers = ['time', 'tmax'] + coll = field.as_spatial_collection(headers=headers) + if k.use_upper_keys is True: + headers = [xx.upper() for xx in headers] + kwargs = Dict(melted=k.melted) + if k.use_upper_keys != 'NULL': + kwargs.use_upper_keys = k.use_upper_keys + itr = coll.get_iter_dict(**kwargs) + for ii, row in enumerate(itr): + if ii == 3: + break + self.assertIsInstance(row[0], BaseGeometry) + self.assertIsInstance(row[1], OrderedDict) + self.assertEqual(row[1].keys(), headers) + def test_get_iter_melted(self): sp = self.get_collection() for row in sp.get_iter_melted(): - self.assertEqual(set(['ugid','field_alias','field','variable_alias','variable']),set(row.keys())) - self.assertIsInstance(row['ugid'],int) - self.assertIsInstance(row['field_alias'],basestring) - self.assertIsInstance(row['field'],Field) - self.assertIsInstance(row['variable_alias'],basestring) - self.assertIsInstance(row['variable'],Variable) + self.assertEqual(set(['ugid', 'field_alias', 'field', 'variable_alias', 'variable']), set(row.keys())) + self.assertIsInstance(row['ugid'], int) + self.assertIsInstance(row['field_alias'], basestring) + self.assertIsInstance(row['field'], Field) + self.assertIsInstance(row['variable_alias'], basestring) + self.assertIsInstance(row['variable'], Variable) def test_iteration_methods(self): field = self.get_field(with_value=True) @@ -238,42 +273,49 @@ def test_iteration_methods(self): sc = ShpCabinet() meta = sc.get_meta('state_boundaries') - sp = SpatialCollection(meta=meta,key='state_boundaries') + sp = SpatialCollection(meta=meta, key='state_boundaries', headers=constants.HEADERS_RAW) for row in sc.iter_geoms('state_boundaries'): - sp.add_field(row['properties']['UGID'],row['geom'],field,properties=row['properties']) - for ii,row in enumerate(sp.get_iter_dict()): + sp.add_field(row['properties']['UGID'], row['geom'], field, properties=row['properties']) + for ii, row in enumerate(sp.get_iter_dict(melted=True)): if ii == 1: - self.assertDictEqual(row[1],{'lid': 1, 'ugid': 1, 'vid': 1, 'alias': 'tmax', 'did': 1, 'year': 2000, 'value': 0.7203244934421581, 'month': 1, 'variable': 'tmax', 'gid': 2, 'time': datetime.datetime(2000, 1, 1, 12, 0), 'tid': 1, 'level': 50, 'day': 1}) - self.assertIsInstance(row[0],MultiPolygon) - self.assertEqual(len(row),2) - self.assertEqual(len(row[1]),len(constants.HEADERS_RAW)) + self.assertDictEqual(row[1], {'lid': 1, 'ugid': 1, 'vid': 1, 'alias': 'tmax', 'did': 1, 'year': 2000, + 'value': 0.7203244934421581, 'month': 1, 'variable': 'tmax', 'gid': 2, + 'time': datetime.datetime(2000, 1, 1, 12, 0), 'tid': 1, 'level': 50, + 'day': 1}) + self.assertIsInstance(row[0], MultiPolygon) + self.assertEqual(len(row), 2) + self.assertEqual(len(row[1]), len(constants.HEADERS_RAW)) def test_multivariate_iteration(self): - field = self.get_field(with_value=True,month_count=1) - field.variables.add_variable(Variable(value=field.variables['tmax'].value+5, - name='tmin',alias='tmin')) + field = self.get_field(with_value=True, month_count=1) + field.variables.add_variable(Variable(value=field.variables['tmax'].value + 5, + name='tmin', alias='tmin')) field.temporal.name_uid = 'tid' field.level.name_uid = 'lid' field.spatial.geom.name_uid = 'gid' - div = Divide(field=field,parms={'arr1':'tmin','arr2':'tmax'},alias='some_division', + div = Divide(field=field, parms={'arr1': 'tmin', 'arr2': 'tmax'}, alias='some_division', dtype=np.float64) ret = div.execute() - cfield = DerivedMultivariateField(variables=ret,realization=field.realization,temporal=field.temporal,level=field.level, - spatial=field.spatial,meta=field.meta,uid=field.uid) + cfield = DerivedMultivariateField(variables=ret, realization=field.realization, temporal=field.temporal, + level=field.level, + spatial=field.spatial, meta=field.meta, uid=field.uid) cfield.spatial.name_uid = 'gid' sc = ShpCabinet() meta = sc.get_meta('state_boundaries') - sp = SpatialCollection(meta=meta,key='state_boundaries',headers=constants.HEADERS_MULTI) + sp = SpatialCollection(meta=meta, key='state_boundaries', headers=constants.HEADERS_MULTI) for row in sc.iter_geoms('state_boundaries'): - sp.add_field(row['properties']['UGID'],row['geom'],cfield,properties=row['properties']) + sp.add_field(row['properties']['UGID'], row['geom'], cfield, properties=row['properties']) - for ii,row in enumerate(sp.get_iter_dict()): + for ii, row in enumerate(sp.get_iter_dict(melted=True)): if ii == 0: - self.assertDictEqual(row[1],{'lid': 1, 'ugid': 1, 'cid': 1, 'did': None, 'year': 2000, 'time': datetime.datetime(2000, 1, 1, 12, 0), 'calc_alias': 'some_division', 'value': 12.989774984574424, 'month': 1, 'gid': 1, 'calc_key': 'divide', 'tid': 1, 'level': 50, 'day': 1}) - self.assertEqual(ii+1,2*31*2*3*4*51) + self.assertDictEqual(row[1], {'lid': 1, 'ugid': 1, 'cid': 1, 'did': None, 'year': 2000, + 'time': datetime.datetime(2000, 1, 1, 12, 0), + 'calc_alias': 'some_division', 'value': 12.989774984574424, 'month': 1, + 'gid': 1, 'calc_key': 'divide', 'tid': 1, 'level': 50, 'day': 1}) + self.assertEqual(ii + 1, 2 * 31 * 2 * 3 * 4 * 51) def test_write_ugeom(self): keywords = dict(crs=[None, Spherical()], diff --git a/src/ocgis/test/test_ocgis/test_api/test_interpreter.py b/src/ocgis/test/test_ocgis/test_api/test_interpreter.py index 1a38b6df2..1f8b2b851 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_interpreter.py +++ b/src/ocgis/test/test_ocgis/test_api/test_interpreter.py @@ -1,25 +1,46 @@ import os + +from ocgis import constants +from ocgis.api.interpreter import OcgInterpreter from ocgis import OcgOperations +from ocgis.api.subset import SubsetOperation +from ocgis.conv.fiona_ import ShpConverter +from ocgis.conv.numpy_ import NumpyConverter from ocgis.exc import ExtentError from ocgis.test.base import TestBase from ocgis.util.itester import itr_products_keywords class TestOcgInterpreter(TestBase): - def test_execute_directory(self): """Test that the output directory is removed appropriately following an operations failure.""" kwds = dict(add_auxiliary_files=[True, False]) rd = self.test_data.get_rd('cancm4_tas') - ## this geometry is outside the domain and will result in an exception + # this geometry is outside the domain and will result in an exception geom = [1000, 1000, 1100, 1100] for k in itr_products_keywords(kwds, as_namedtuple=True): ops = OcgOperations(dataset=rd, output_format='csv', add_auxiliary_files=k.add_auxiliary_files, geom=geom) try: - ret = ops.execute() + ops.execute() except ExtentError: contents = os.listdir(self.current_dir_output) self.assertEqual(len(contents), 0) + + def test_get_converter(self): + rd = self.test_data.get_rd('cancm4_tas') + ops = OcgOperations(dataset=rd) + outdir = self.current_dir_output + prefix = 'foo' + interp = OcgInterpreter(ops) + so = SubsetOperation(ops) + ret = interp._get_converter_(outdir, prefix, so) + self.assertIsInstance(ret, NumpyConverter) + + ops = OcgOperations(dataset=rd, melted=True, output_format=constants.OUTPUT_FORMAT_SHAPEFILE) + interp = OcgInterpreter(ops) + ret = interp._get_converter_(outdir, prefix, so) + self.assertIsInstance(ret, ShpConverter) + self.assertTrue(ret.melted) diff --git a/src/ocgis/test/test_ocgis/test_api/test_operations.py b/src/ocgis/test/test_ocgis/test_api/test_operations.py index 4132b5a61..963341b62 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_operations.py +++ b/src/ocgis/test/test_ocgis/test_api/test_operations.py @@ -6,6 +6,7 @@ import ESMF +from ocgis import env from datetime import datetime as dt import datetime from ocgis.api.request.base import RequestDataset @@ -22,7 +23,6 @@ class TestOcgOperations(TestBase): - def setUp(self): TestBase.setUp(self) @@ -44,6 +44,12 @@ def test_init(self): self.assertEqual(ops.regrid_destination, None) self.assertDictEqual(ops.regrid_options, RegridOptions.default) + self.assertFalse(ops.melted) + env.MELTED = True + ops = OcgOperations(dataset=self.datasets) + self.assertEqual(ops.melted, env.MELTED) + self.assertTrue(ops.melted) + def test_str(self): rd = self.test_data.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd) @@ -55,7 +61,15 @@ def test_get_base_request_size(self): rd = self.test_data.get_rd('cancm4_tas') ops = OcgOperations(dataset=rd) size = ops.get_base_request_size() - self.assertEqual(size,{'variables': {'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 116800.0, 'shape': (1, 3650, 1, 64, 128), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 1.0, 'shape': (128,), 'dtype': dtype('float64')}, 'row': {'kb': 0.5, 'shape': (64,), 'dtype': dtype('float64')}}}, 'total': 116830.015625}) + self.assertEqual(size, {'variables': {'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, + 'temporal': {'kb': 28.515625, 'shape': (3650,), + 'dtype': dtype('float64')}, + 'value': {'kb': 116800.0, 'shape': (1, 3650, 1, 64, 128), + 'dtype': dtype('float32')}, + 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, + 'col': {'kb': 1.0, 'shape': (128,), 'dtype': dtype('float64')}, + 'row': {'kb': 0.5, 'shape': (64,), 'dtype': dtype('float64')}}}, + 'total': 116830.015625}) with self.assertRaises(DefinitionValidationError): OcgOperations(dataset=rd, regrid_destination=rd).get_base_request_size() @@ -63,18 +77,52 @@ def test_get_base_request_size(self): def test_get_base_request_size_multifile(self): rd1 = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('narccap_pr_wrfg_ncep') - rds = [rd1,rd2] + rds = [rd1, rd2] ops = OcgOperations(dataset=rds) size = ops.get_base_request_size() - self.assertEqual({'variables': {'pr': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 228.25, 'shape': (29216,), 'dtype': dtype('float64')}, 'value': {'kb': 1666909.75, 'shape': (1, 29216, 1, 109, 134), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 1.046875, 'shape': (134,), 'dtype': dtype('float64')}, 'row': {'kb': 0.8515625, 'shape': (109,), 'dtype': dtype('float64')}}, 'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 116800.0, 'shape': (1, 3650, 1, 64, 128), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 1.0, 'shape': (128,), 'dtype': dtype('float64')}, 'row': {'kb': 0.5, 'shape': (64,), 'dtype': dtype('float64')}}}, 'total': 1783969.9140625},size) + self.assertEqual({'variables': {'pr': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, + 'temporal': {'kb': 228.25, 'shape': (29216,), 'dtype': dtype('float64')}, + 'value': {'kb': 1666909.75, 'shape': (1, 29216, 1, 109, 134), + 'dtype': dtype('float32')}, + 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, + 'col': {'kb': 1.046875, 'shape': (134,), 'dtype': dtype('float64')}, + 'row': {'kb': 0.8515625, 'shape': (109,), 'dtype': dtype('float64')}}, + 'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, + 'temporal': {'kb': 28.515625, 'shape': (3650,), + 'dtype': dtype('float64')}, + 'value': {'kb': 116800.0, 'shape': (1, 3650, 1, 64, 128), + 'dtype': dtype('float32')}, + 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, + 'col': {'kb': 1.0, 'shape': (128,), 'dtype': dtype('float64')}, + 'row': {'kb': 0.5, 'shape': (64,), 'dtype': dtype('float64')}}}, + 'total': 1783969.9140625}, size) def test_get_base_request_size_multifile_with_geom(self): rd1 = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('narccap_pr_wrfg_ncep') - rds = [rd1,rd2] - ops = OcgOperations(dataset=rds,geom='state_boundaries',select_ugid=[23]) + rds = [rd1, rd2] + ops = OcgOperations(dataset=rds, geom='state_boundaries', select_ugid=[23]) size = ops.get_base_request_size() - self.assertEqual(size,{'variables': {'pr': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 228.25, 'shape': (29216,), 'dtype': dtype('float64')}, 'value': {'kb': 21341.375, 'shape': (1, 29216, 1, 17, 11), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 0.0859375, 'shape': (11,), 'dtype': dtype('float64')}, 'row': {'kb': 0.1328125, 'shape': (17,), 'dtype': dtype('float64')}}, 'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 171.09375, 'shape': (1, 3650, 1, 4, 3), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 0.0234375, 'shape': (3,), 'dtype': dtype('float64')}, 'row': {'kb': 0.03125, 'shape': (4,), 'dtype': dtype('float64')}}}, 'total': 21769.5078125}) + self.assertEqual(size, {'variables': {'pr': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, + 'temporal': {'kb': 228.25, 'shape': (29216,), + 'dtype': dtype('float64')}, + 'value': {'kb': 21341.375, 'shape': (1, 29216, 1, 17, 11), + 'dtype': dtype('float32')}, + 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, + 'col': {'kb': 0.0859375, 'shape': (11,), + 'dtype': dtype('float64')}, + 'row': {'kb': 0.1328125, 'shape': (17,), + 'dtype': dtype('float64')}}, + 'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, + 'temporal': {'kb': 28.515625, 'shape': (3650,), + 'dtype': dtype('float64')}, + 'value': {'kb': 171.09375, 'shape': (1, 3650, 1, 4, 3), + 'dtype': dtype('float32')}, + 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, + 'col': {'kb': 0.0234375, 'shape': (3,), + 'dtype': dtype('float64')}, + 'row': {'kb': 0.03125, 'shape': (4,), + 'dtype': dtype('float64')}}}, 'total': 21769.5078125}) def test_get_base_request_size_test_data(self): for key in self.test_data.keys(): @@ -91,16 +139,25 @@ def test_get_base_request_size_test_data(self): def test_get_base_request_size_with_calculation(self): rd = self.test_data.get_rd('cancm4_tas') - ops = OcgOperations(dataset=rd,calc=[{'func':'mean','name':'mean'}], + ops = OcgOperations(dataset=rd, calc=[{'func': 'mean', 'name': 'mean'}], calc_grouping=['month']) size = ops.get_base_request_size() - self.assertEqual(size['variables']['tas']['temporal']['shape'][0],3650) + self.assertEqual(size['variables']['tas']['temporal']['shape'][0], 3650) def test_get_base_request_size_with_geom(self): rd = self.test_data.get_rd('cancm4_tas') - ops = OcgOperations(dataset=rd,geom='state_boundaries',select_ugid=[23]) + ops = OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[23]) size = ops.get_base_request_size() - self.assertEqual(size,{'variables': {'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, 'temporal': {'kb': 28.515625, 'shape': (3650,), 'dtype': dtype('float64')}, 'value': {'kb': 171.09375, 'shape': (1, 3650, 1, 4, 3), 'dtype': dtype('float32')}, 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, 'col': {'kb': 0.0234375, 'shape': (3,), 'dtype': dtype('float64')}, 'row': {'kb': 0.03125, 'shape': (4,), 'dtype': dtype('float64')}}}, 'total': 199.6640625}) + self.assertEqual(size, {'variables': {'tas': {'level': {'kb': 0.0, 'shape': None, 'dtype': None}, + 'temporal': {'kb': 28.515625, 'shape': (3650,), + 'dtype': dtype('float64')}, + 'value': {'kb': 171.09375, 'shape': (1, 3650, 1, 4, 3), + 'dtype': dtype('float32')}, + 'realization': {'kb': 0.0, 'shape': None, 'dtype': None}, + 'col': {'kb': 0.0234375, 'shape': (3,), + 'dtype': dtype('float64')}, + 'row': {'kb': 0.03125, 'shape': (4,), + 'dtype': dtype('float64')}}}, 'total': 199.6640625}) def test_get_meta(self): ops = OcgOperations(dataset=self.datasets) @@ -108,24 +165,24 @@ def test_get_meta(self): self.assertTrue(len(meta) > 100) self.assertTrue('\n' in meta) - ops = OcgOperations(dataset=self.datasets,calc=[{'func':'mean','name':'my_mean'}], + ops = OcgOperations(dataset=self.datasets, calc=[{'func': 'mean', 'name': 'my_mean'}], calc_grouping=['month']) meta = ops.get_meta() self.assertTrue(len(meta) > 100) self.assertTrue('\n' in meta) def test_keyword_abstraction(self): - K = definition.Abstraction + kk = definition.Abstraction - k = K() - self.assertEqual(k.value,None) - self.assertEqual(str(k),'abstraction="None"') + k = kk() + self.assertEqual(k.value, None) + self.assertEqual(str(k), 'abstraction="None"') - k = K('point') - self.assertEqual(k.value,'point') + k = kk('point') + self.assertEqual(k.value, 'point') with self.assertRaises(DefinitionValidationError): - K('pt') + kk('pt') def test_keyword_aggregate(self): rd = self.test_data.get_rd('rotated_pole_cnrm_cerfacs') @@ -136,7 +193,7 @@ def test_keyword_aggregate(self): slc = [None, [0, 10], None, [0, 10], [0, 10]] for output_format in ['numpy', 'csv']: - ops = OcgOperations(dataset=rd, output_format=output_format, aggregate=True, slice=slc) + ops = OcgOperations(dataset=rd, output_format=output_format, aggregate=True, slice=slc, melted=True) # spatial operations on rotated pole require the output crs be wgs84 self.assertEqual(ops.output_crs, CFWGS84()) ret = ops.execute() @@ -153,11 +210,7 @@ def test_keyword_aggregate(self): self.assertAlmostEqual(float(rows[4]['VALUE']), manual_mean) def test_keyword_calc_grouping_none_date_parts(self): - _cg = [ - None, - ['day','month'], - 'day' - ] + _cg = [None, ['day', 'month'], 'day'] for cg in _cg: if cg is not None: @@ -166,19 +219,19 @@ def test_keyword_calc_grouping_none_date_parts(self): eq = cg obj = definition.CalcGrouping(cg) try: - self.assertEqual(obj.value,eq) + self.assertEqual(obj.value, eq) except AssertionError: - self.assertEqual(obj.value,('day',)) + self.assertEqual(obj.value, ('day',)) - ## only month, year, and day combinations are currently supported + # # only month, year, and day combinations are currently supported rd = self.test_data.get_rd('cancm4_tas') - calcs = [None,[{'func':'mean','name':'mean'}]] - acceptable = ['day','month','year'] + calcs = [None, [{'func': 'mean', 'name': 'mean'}]] + acceptable = ['day', 'month', 'year'] for calc in calcs: - for length in [1,2,3,4,5]: - for combo in itertools.combinations(['day','month','year','hour','minute'],length): + for length in [1, 2, 3, 4, 5]: + for combo in itertools.combinations(['day', 'month', 'year', 'hour', 'minute'], length): try: - ops = OcgOperations(dataset=rd,calc=calc,calc_grouping=combo) + OcgOperations(dataset=rd, calc=calc, calc_grouping=combo) except DefinitionValidationError: reraise = True for c in combo: @@ -199,17 +252,26 @@ def test_keyword_calc_grouping_seasonal_with_unique(self): rd2 = ocgis.RequestDataset(uri=ret, variable='mean') field = rd2.get() self.assertNotEqual(field.temporal.bounds, None) - self.assertEqual(field.temporal.bounds_datetime.tolist(), [[datetime.datetime(2001, 12, 1, 12, 0), datetime.datetime(2002, 2, 28, 12, 0)], [datetime.datetime(2002, 12, 1, 12, 0), datetime.datetime(2003, 2, 28, 12, 0)], [datetime.datetime(2003, 12, 1, 12, 0), datetime.datetime(2004, 2, 28, 12, 0)], [datetime.datetime(2004, 12, 1, 12, 0), datetime.datetime(2005, 2, 28, 12, 0)], [datetime.datetime(2005, 12, 1, 12, 0), datetime.datetime(2006, 2, 28, 12, 0)], [datetime.datetime(2006, 12, 1, 12, 0), datetime.datetime(2007, 2, 28, 12, 0)], [datetime.datetime(2007, 12, 1, 12, 0), datetime.datetime(2008, 2, 28, 12, 0)], [datetime.datetime(2008, 12, 1, 12, 0), datetime.datetime(2009, 2, 28, 12, 0)], [datetime.datetime(2009, 12, 1, 12, 0), datetime.datetime(2010, 2, 28, 12, 0)]]) - self.assertEqual(field.shape,(1, 9, 1, 3, 3)) + self.assertEqual(field.temporal.bounds_datetime.tolist(), + [[datetime.datetime(2001, 12, 1, 12, 0), datetime.datetime(2002, 2, 28, 12, 0)], + [datetime.datetime(2002, 12, 1, 12, 0), datetime.datetime(2003, 2, 28, 12, 0)], + [datetime.datetime(2003, 12, 1, 12, 0), datetime.datetime(2004, 2, 28, 12, 0)], + [datetime.datetime(2004, 12, 1, 12, 0), datetime.datetime(2005, 2, 28, 12, 0)], + [datetime.datetime(2005, 12, 1, 12, 0), datetime.datetime(2006, 2, 28, 12, 0)], + [datetime.datetime(2006, 12, 1, 12, 0), datetime.datetime(2007, 2, 28, 12, 0)], + [datetime.datetime(2007, 12, 1, 12, 0), datetime.datetime(2008, 2, 28, 12, 0)], + [datetime.datetime(2008, 12, 1, 12, 0), datetime.datetime(2009, 2, 28, 12, 0)], + [datetime.datetime(2009, 12, 1, 12, 0), datetime.datetime(2010, 2, 28, 12, 0)]]) + self.assertEqual(field.shape, (1, 9, 1, 3, 3)) def test_keyword_calc_grouping_seasonal_with_year(self): - calc_grouping = [[1,2,3],'year'] - calc = [{'func':'mean','name':'mean'}] + calc_grouping = [[1, 2, 3], 'year'] + calc = [{'func': 'mean', 'name': 'mean'}] rd = self.test_data.get_rd('cancm4_tas') - ops = OcgOperations(dataset=rd,calc=calc,calc_grouping=calc_grouping, - geom='state_boundaries',select_ugid=[25]) + ops = OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping, + geom='state_boundaries', select_ugid=[25]) ret = ops.execute() - self.assertEqual(ret[25]['tas'].shape,(1,10,1,5,4)) + self.assertEqual(ret[25]['tas'].shape, (1, 10, 1, 5, 4)) def test_keyword_calc_grouping_with_string_expression(self): """Test that no calculation grouping is allowed with a string expression.""" @@ -223,22 +285,25 @@ def test_keyword_calc_grouping_with_string_expression(self): def test_keyword_callback(self): app = [] - def callback(perc,msg,app=app): - app.append((perc,msg)) - # print(perc,msg) + + def callback(perc, msg, append=app): + append.append((perc, msg)) + + # print(perc,msg) rd = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('cancm4_tasmax_2011') - dataset = [rd,rd2] + dataset = [rd, rd2] for ds in dataset: - ds.time_region = {'month':[6]} - ops = ocgis.OcgOperations(dataset=dataset,geom='state_boundaries',select_ugid=[16,17], - calc_grouping=['month'],calc=[{'func':'mean','name':'mean'},{'func':'median','name':'median'}], + ds.time_region = {'month': [6]} + ops = ocgis.OcgOperations(dataset=dataset, geom='state_boundaries', select_ugid=[16, 17], + calc_grouping=['month'], + calc=[{'func': 'mean', 'name': 'mean'}, {'func': 'median', 'name': 'median'}], callback=callback) ops.execute() self.assertTrue(len(app) > 15) - self.assertEqual(app[-1][0],100.0) + self.assertEqual(app[-1][0], 100.0) def test_keyword_conform_units_to(self): rd1 = self.test_data.get_rd('cancm4_tas') @@ -248,7 +313,7 @@ def test_keyword_conform_units_to(self): for ds in ops.dataset.itervalues(): self.assertEqual(ds.conform_units_to, 'celsius') - ## test that the conform argument is updated + # # test that the conform argument is updated ops.conform_units_to = 'fahrenheit' for ds in ops.dataset.itervalues(): self.assertEqual(ds.conform_units_to, 'fahrenheit') @@ -266,76 +331,75 @@ def test_keyword_dataset_esmf(self): output_format = OutputFormat.iter_possible() for kk in output_format: ops = OcgOperations(dataset=efield, output_format=kk, prefix=kk) - ret = ops.execute() + ops.execute() # self.inspect(ret) raise - import ipdb;ipdb.set_trace() def test_keyword_geom(self): - geom = make_poly((37.762,38.222),(-102.281,-101.754)) + geom = make_poly((37.762, 38.222), (-102.281, -101.754)) g = definition.Geom(geom) - self.assertEqual(type(g.value),tuple) - self.assertEqual(g.value[0].single.geom.bounds,(-102.281, 37.762, -101.754, 38.222)) + self.assertEqual(type(g.value), tuple) + self.assertEqual(g.value[0].single.geom.bounds, (-102.281, 37.762, -101.754, 38.222)) g = definition.Geom(None) - self.assertEqual(g.value,None) - self.assertEqual(str(g),'geom=None') + self.assertEqual(g.value, None) + self.assertEqual(str(g), 'geom=None') g = definition.Geom('mi_watersheds') - self.assertEqual(str(g),'geom="mi_watersheds"') + self.assertEqual(str(g), 'geom="mi_watersheds"') geoms = ShpCabinetIterator('mi_watersheds') g = definition.Geom(geoms) - self.assertEqual(len(list(g.value)),60) - self.assertEqual(g._shp_key,'mi_watersheds') + self.assertEqual(len(list(g.value)), 60) + self.assertEqual(g._shp_key, 'mi_watersheds') def test_keyword_geom_having_changed_select_ugid(self): ops = OcgOperations(dataset=self.test_data.get_rd('cancm4_tas'), geom='state_boundaries') - self.assertEqual(len(list(ops.geom)),51) - ops.select_ugid = [16,17] - self.assertEqual(len(list(ops.geom)),2) + self.assertEqual(len(list(ops.geom)), 51) + ops.select_ugid = [16, 17] + self.assertEqual(len(list(ops.geom)), 2) def test_keyword_geom_string(self): - ops = OcgOperations(dataset=self.datasets,geom='state_boundaries') - self.assertEqual(len(list(ops.geom)),51) + ops = OcgOperations(dataset=self.datasets, geom='state_boundaries') + self.assertEqual(len(list(ops.geom)), 51) ops.geom = None - self.assertEqual(ops.geom,None) + self.assertEqual(ops.geom, None) ops.geom = 'mi_watersheds' - self.assertEqual(len(list(ops.geom)),60) - ops.geom = [-120,40,-110,50] - self.assertEqual(ops.geom[0].single.geom.bounds,(-120.0,40.0,-110.0,50.0)) + self.assertEqual(len(list(ops.geom)), 60) + ops.geom = [-120, 40, -110, 50] + self.assertEqual(ops.geom[0].single.geom.bounds, (-120.0, 40.0, -110.0, 50.0)) def test_keyword_headers(self): - headers = ['did','value'] - for htype in [list,tuple]: + headers = ['did', 'value'] + for htype in [list, tuple]: hvalue = htype(headers) hh = definition.Headers(hvalue) - self.assertEqual(hh.value,tuple(constants.HEADERS_REQUIRED+['value'])) + self.assertEqual(hh.value, tuple(constants.HEADERS_REQUIRED + ['value'])) headers = ['foo'] with self.assertRaises(DefinitionValidationError): - hh = definition.Headers(headers) + definition.Headers(headers) headers = [] hh = definition.Headers(headers) - self.assertEqual(hh.value,tuple(constants.HEADERS_REQUIRED)) + self.assertEqual(hh.value, tuple(constants.HEADERS_REQUIRED)) def test_keyword_level_range(self): rd = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('cancm4_tas') rd.alias = 'foo' - lr = [1,2] - ops = ocgis.OcgOperations(dataset=[rd,rd2],level_range=lr) - for r in [rd,rd2]: - self.assertEqual(r.level_range,None) + lr = [1, 2] + ops = ocgis.OcgOperations(dataset=[rd, rd2], level_range=lr) + for r in [rd, rd2]: + self.assertEqual(r.level_range, None) for r in ops.dataset.itervalues(): - self.assertEqual(r.level_range,tuple(lr)) + self.assertEqual(r.level_range, tuple(lr)) - lr = [2,3] + lr = [2, 3] ops.level_range = lr for r in ops.dataset.itervalues(): - self.assertEqual(r.level_range,tuple(lr)) + self.assertEqual(r.level_range, tuple(lr)) def test_keyword_prefix(self): # the meta output format should not create an output directory @@ -348,7 +412,7 @@ def test_keyword_prefix(self): def test_keyword_output_format_esmpy(self): """Test with the ESMPy output format.""" - #todo: test spatial subsetting + # todo: test spatial subsetting #todo: test calculations slc = [None, None, None, [0, 10], [0, 10]] kwds = dict(as_field=[False, True], @@ -417,7 +481,7 @@ def test_keyword_regrid_destination_to_shp_vector_wrap(self): for vector_wrap in [True, False]: ops = OcgOperations(dataset=rd1, regrid_destination=rd2, output_format='shp', snippet=True, geom='state_boundaries', select_ugid=[25], vector_wrap=vector_wrap, - prefix=str(vector_wrap)) + prefix=str(vector_wrap), melted=True) ret = ops.execute() sci = ShpCabinetIterator(path=ret) geoms = [element['geom'] for element in sci] @@ -428,13 +492,13 @@ def test_keyword_regrid_destination_to_shp_vector_wrap(self): self.assertGreater(geom.bounds[0], 0) def test_keyword_spatial_operation(self): - values = (None,'clip','intersects') - ast = ('intersects','clip','intersects') + values = (None, 'clip', 'intersects') + ast = ('intersects', 'clip', 'intersects') klass = definition.SpatialOperation - for v,a in zip(values,ast): + for v, a in zip(values, ast): obj = klass(v) - self.assertEqual(obj.value,a) + self.assertEqual(obj.value, a) def test_keyword_spatial_operations_bounding_box(self): geom = [-80, 22.5, 50, 70.0] @@ -450,42 +514,42 @@ def test_keyword_time_range(self): rd = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('cancm4_tas') rd.alias = 'foo' - tr = [datetime.datetime(2002,1,1),datetime.datetime(2002,3,1)] - ops = ocgis.OcgOperations(dataset=[rd,rd2],time_range=tr) - for r in [rd,rd2]: - self.assertEqual(r.time_range,None) + tr = [datetime.datetime(2002, 1, 1), datetime.datetime(2002, 3, 1)] + ops = ocgis.OcgOperations(dataset=[rd, rd2], time_range=tr) + for r in [rd, rd2]: + self.assertEqual(r.time_range, None) for r in ops.dataset.itervalues(): - self.assertEqual(r.time_range,tuple(tr)) + self.assertEqual(r.time_range, tuple(tr)) - tr = [datetime.datetime(2002,1,1),datetime.datetime(2003,3,1)] + tr = [datetime.datetime(2002, 1, 1), datetime.datetime(2003, 3, 1)] ops.time_range = tr for r in ops.dataset.itervalues(): - self.assertEqual(r.time_range,tuple(tr)) + self.assertEqual(r.time_range, tuple(tr)) def test_keyword_time_range_and_time_region_null_parms(self): ops = OcgOperations(dataset=self.datasets_no_range) - self.assertEqual(ops.geom,None) - self.assertEqual(len(ops.dataset),3) + self.assertEqual(ops.geom, None) + self.assertEqual(len(ops.dataset), 3) for ds in ops.dataset.itervalues(): - self.assertEqual(ds.time_range,None) - self.assertEqual(ds.level_range,None) + self.assertEqual(ds.time_range, None) + self.assertEqual(ds.level_range, None) ops.__repr__() def test_keyword_time_region(self): rd = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('cancm4_tas') rd.alias = 'foo' - tr = {'month':[6],'year':[2005]} - ops = ocgis.OcgOperations(dataset=[rd,rd2],time_region=tr) - for r in [rd,rd2]: - self.assertEqual(r.time_region,None) + tr = {'month': [6], 'year': [2005]} + ops = ocgis.OcgOperations(dataset=[rd, rd2], time_region=tr) + for r in [rd, rd2]: + self.assertEqual(r.time_region, None) for r in ops.dataset.itervalues(): - self.assertEqual(r.time_region,tr) + self.assertEqual(r.time_region, tr) - tr = {'month':[6],'year':[2006]} + tr = {'month': [6], 'year': [2006]} ops.time_region = tr for r in ops.dataset.itervalues(): - self.assertEqual(r.time_region,tr) + self.assertEqual(r.time_region, tr) def test_validate(self): # snippets should be allowed for field objects diff --git a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py index 7c37d3f97..66fccab57 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py +++ b/src/ocgis/test/test_ocgis/test_api/test_parms/test_definition.py @@ -4,6 +4,8 @@ from cfunits import Units +from ocgis.conv.numpy_ import NumpyConverter +from ocgis.api.parms.base import BooleanParameter from ocgis import env from ocgis.api.parms.definition import * from ocgis.interface.base.dimension.spatial import SpatialDimension, SpatialGeometryPointDimension @@ -16,78 +18,77 @@ class Test(TestBase): - def test_callback(self): c = Callback() - self.assertEqual(c.value,None) + self.assertEqual(c.value, None) with self.assertRaises(DefinitionValidationError): Callback('foo') - def callback(percent,message): + def callback(percent, message): pass c = Callback(callback) - self.assertEqual(callback,c.value) + self.assertEqual(callback, c.value) def test_optimizations(self): o = Optimizations() - self.assertEqual(o.value,None) + self.assertEqual(o.value, None) with self.assertRaises(DefinitionValidationError): Optimizations({}) with self.assertRaises(DefinitionValidationError): - Optimizations({'foo':'foo'}) - o = Optimizations({'tgds':{'tas':'TemporalGroupDimension'}}) - self.assertEqual(o.value,{'tgds':{'tas':'TemporalGroupDimension'}}) + Optimizations({'foo': 'foo'}) + o = Optimizations({'tgds': {'tas': 'TemporalGroupDimension'}}) + self.assertEqual(o.value, {'tgds': {'tas': 'TemporalGroupDimension'}}) def test_optimizations_deepcopy(self): - ## we should not deepcopy optimizations - arr = np.array([1,2,3,4]) - value = {'tgds':{'tas':arr}} + # # we should not deepcopy optimizations + arr = np.array([1, 2, 3, 4]) + value = {'tgds': {'tas': arr}} o = Optimizations(value) - self.assertTrue(np.may_share_memory(o.value['tgds']['tas'],arr)) + self.assertTrue(np.may_share_memory(o.value['tgds']['tas'], arr)) def test_add_auxiliary_files(self): - for val in [True,False]: + for val in [True, False]: p = AddAuxiliaryFiles(val) - self.assertEqual(p.value,val) + self.assertEqual(p.value, val) p = AddAuxiliaryFiles() - self.assertEqual(p.value,True) + self.assertEqual(p.value, True) def test_dir_output(self): - ## raise an exception if the directory does not exist + # # raise an exception if the directory does not exist do = '/does/not/exist' with self.assertRaises(DefinitionValidationError): DirOutput(do) ## make sure directory name does not change case do = 'Some' - new_dir = os.path.join(tempfile.gettempdir(),do) + new_dir = os.path.join(tempfile.gettempdir(), do) os.mkdir(new_dir) try: dd = DirOutput(new_dir) - self.assertEqual(new_dir,dd.value) + self.assertEqual(new_dir, dd.value) finally: os.rmdir(new_dir) def test_slice(self): slc = Slice(None) - self.assertEqual(slc.value,None) + self.assertEqual(slc.value, None) - slc = Slice([None,0,0,0,0]) - self.assertEqual(slc.value,(slice(None),slice(0,1),slice(0, 1),slice(0, 1),slice(0, 1))) + slc = Slice([None, 0, 0, 0, 0]) + self.assertEqual(slc.value, (slice(None), slice(0, 1), slice(0, 1), slice(0, 1), slice(0, 1))) - slc = Slice([None,0,None,[0,1],[0,100]]) - self.assertEqual(slc.value,(slice(None),slice(0,1),slice(None),slice(0,1),slice(0,100))) + slc = Slice([None, 0, None, [0, 1], [0, 100]]) + self.assertEqual(slc.value, (slice(None), slice(0, 1), slice(None), slice(0, 1), slice(0, 100))) with self.assertRaises(DefinitionValidationError): slc.value = 4 with self.assertRaises(DefinitionValidationError): - slc.value = [None,None] + slc.value = [None, None] def test_snippet(self): self.assertFalse(Snippet().value) - for ii in ['t','TRUE','tRue',1,'1',' 1 ']: + for ii in ['t', 'TRUE', 'tRue', 1, '1', ' 1 ']: self.assertTrue(Snippet(ii).value) s = Snippet() s.value = False @@ -101,42 +102,42 @@ def test_snippet(self): def test_spatial_operation(self): so = SpatialOperation() - self.assertEqual(so.value,'intersects') + self.assertEqual(so.value, 'intersects') with self.assertRaises(DefinitionValidationError): so.value = 'clips' so.value = 'clip' def test_output_format(self): so = OutputFormat('csv') - self.assertEqual(so.value,'csv') + self.assertEqual(so.value, 'csv') so.value = 'NUMPY' - self.assertEqual(so.value,'numpy') + self.assertEqual(so.value, 'numpy') def test_select_ugid(self): so = SelectUgid() - self.assertEqual(so.value,None) + self.assertEqual(so.value, None) with self.assertRaises(DefinitionValidationError): so.value = 98.5 so.value = 'none' - self.assertEqual(so.value,None) + self.assertEqual(so.value, None) with self.assertRaises(DefinitionValidationError): so.value = 1 so = SelectUgid('10') - self.assertEqual(so.value,(10,)) + self.assertEqual(so.value, (10,)) with self.assertRaises(DefinitionValidationError): so.value = ('1|1|2') with self.assertRaises(DefinitionValidationError): so.value = '22.5' so = SelectUgid('22|23|24') - self.assertEqual(so.value,(22,23,24)) + self.assertEqual(so.value, (22, 23, 24)) with self.assertRaises(DefinitionValidationError): so.value = '22|23.5|24' def test_prefix(self): pp = Prefix() - self.assertEqual(pp.value,'ocgis_output') + self.assertEqual(pp.value, 'ocgis_output') pp.value = ' Old__man ' - self.assertEqual(pp.value,'Old__man') + self.assertEqual(pp.value, 'Old__man') class TestAbstraction(TestBase): @@ -146,11 +147,11 @@ def test_init_(self): K = Abstraction k = K() - self.assertEqual(k.value,None) - self.assertEqual(str(k),'abstraction="None"') + self.assertEqual(k.value, None) + self.assertEqual(str(k), 'abstraction="None"') k = K('point') - self.assertEqual(k.value,'point') + self.assertEqual(k.value, 'point') with self.assertRaises(DefinitionValidationError): K('pt') @@ -163,13 +164,13 @@ def test_init(self): A = Aggregate a = A(True) - self.assertEqual(a.value,True) + self.assertEqual(a.value, True) a = A(False) - self.assertEqual(a.value,False) + self.assertEqual(a.value, False) a = A('True') - self.assertEqual(a.value,True) + self.assertEqual(a.value, True) class TestCalc(TestBase): @@ -180,7 +181,8 @@ def test_meta_attrs(self): kwds = dict( meta_attr=[None, {}, {'something_else': 'is_here with us'}], - calc=[{'func': 'mean', 'name': 'my_mean'}, 'foo=tas+4', {'func': 'foo=tas+4', 'meta_attrs': {'something': 'special'}}], + calc=[{'func': 'mean', 'name': 'my_mean'}, 'foo=tas+4', + {'func': 'foo=tas+4', 'meta_attrs': {'something': 'special'}}], add_meta_attrs_if_none=[True, False] ) for k in itr_products_keywords(kwds, as_namedtuple=True): @@ -200,33 +202,35 @@ def test_meta_attrs(self): self.assertEqual(set(calc.value[0].keys()), set(['ref', 'meta_attrs', 'name', 'func', 'kwds'])) def test_init(self): - calc = [{'func':'mean','name':'my_mean'}] + calc = [{'func': 'mean', 'name': 'my_mean'}] cc = Calc(calc) - eq = [{'ref':Mean,'name':'my_mean','func':'mean','kwds':{}, 'meta_attrs': None}] + eq = [{'ref': Mean, 'name': 'my_mean', 'func': 'mean', 'kwds': {}, 'meta_attrs': None}] - self.assertEqual(cc.value,eq) + self.assertEqual(cc.value, eq) cc.value = 'mean~my_mean' - self.assertEqual(cc.value,eq) + self.assertEqual(cc.value, eq) cc.value = 'mean~my_mean|max~my_max|between~between5_10!lower~5!upper~10' with self.assertRaises(NotImplementedError): - self.assertEqual(cc.get_url_string(),'mean~my_mean|max~my_max|between~between5_10!lower~5.0!upper~10.0') + self.assertEqual(cc.get_url_string(), 'mean~my_mean|max~my_max|between~between5_10!lower~5.0!upper~10.0') def test_bad_key(self): - calc = [{'func':'bad_mean','name':'my_mean'}] + calc = [{'func': 'bad_mean', 'name': 'my_mean'}] with self.assertRaises(DefinitionValidationError): Calc(calc) def test_str(self): calc = [{'func': 'mean', 'name': 'my_mean'}] cc = Calc(calc) - self.assertEqual(str(cc), "calc=[{'meta_attrs': None, 'name': 'my_mean', 'func': 'mean', 'kwds': OrderedDict()}]") + self.assertEqual(str(cc), + "calc=[{'meta_attrs': None, 'name': 'my_mean', 'func': 'mean', 'kwds': OrderedDict()}]") cc = Calc(None) self.assertEqual(str(cc), 'calc=None') calc = [{'func': 'mean', 'name': 'my_mean', 'kwds': {'a': np.zeros(1000)}}] cc = Calc(calc) - self.assertEqual(str(cc), "calc=[{'meta_attrs': None, 'name': 'my_mean', 'func': 'mean', 'kwds': OrderedDict([('a', )])}]") + self.assertEqual(str(cc), + "calc=[{'meta_attrs': None, 'name': 'my_mean', 'func': 'mean', 'kwds': OrderedDict([('a', )])}]") def test_get_meta(self): for poss in Calc._possible: @@ -235,27 +239,30 @@ def test_get_meta(self): def test_eval_underscores_in_variable_names(self): value = 'tas_4=tasmin_2+tasmin' - self.assertEqual(Calc(value).value,[{'func':value,'ref':MultivariateEvalFunction, 'meta_attrs': None, 'name': None, 'kwds': OrderedDict()}]) + self.assertEqual(Calc(value).value, [ + {'func': value, 'ref': MultivariateEvalFunction, 'meta_attrs': None, 'name': None, 'kwds': OrderedDict()}]) def test_eval_string(self): value = [ - 'es=tas+4', - ['es=tas+4'] - ] - actual = [{'func':'es=tas+4','ref':EvalFunction, 'meta_attrs': None, 'name': None, 'kwds': OrderedDict()}] + 'es=tas+4', + ['es=tas+4'] + ] + actual = [{'func': 'es=tas+4', 'ref': EvalFunction, 'meta_attrs': None, 'name': None, 'kwds': OrderedDict()}] for v in value: cc = Calc(v) - self.assertEqual(cc.value,actual) + self.assertEqual(cc.value, actual) def test_eval_string_multivariate(self): value = [ - 'es=exp(tas)+tasmax+log(4)', - ['es=exp(tas)+tasmax+log(4)'] - ] - actual = [{'func':'es=exp(tas)+tasmax+log(4)','ref':MultivariateEvalFunction, 'meta_attrs': None, 'name': None, 'kwds': OrderedDict()}] + 'es=exp(tas)+tasmax+log(4)', + ['es=exp(tas)+tasmax+log(4)'] + ] + actual = [ + {'func': 'es=exp(tas)+tasmax+log(4)', 'ref': MultivariateEvalFunction, 'meta_attrs': None, 'name': None, + 'kwds': OrderedDict()}] for v in value: cc = Calc(v) - self.assertEqual(cc.value,actual) + self.assertEqual(cc.value, actual) def test_eval_string_number_after_variable_alias(self): value = 'tas2=tas1+tas2' @@ -269,19 +276,19 @@ def test_eval_string_malformed(self): Calc('estas+4') def test(self): - calc = [{'func':'mean','name':'my_mean'}] + calc = [{'func': 'mean', 'name': 'my_mean'}] cc = Calc(calc) - eq = [{'ref':Mean,'name':'my_mean','func':'mean','kwds':{},'meta_attrs': None}] + eq = [{'ref': Mean, 'name': 'my_mean', 'func': 'mean', 'kwds': {}, 'meta_attrs': None}] - self.assertEqual(cc.value,eq) + self.assertEqual(cc.value, eq) cc.value = 'mean~my_mean' - self.assertEqual(cc.value,eq) + self.assertEqual(cc.value, eq) cc.value = 'mean~my_mean|max~my_max|between~between5_10!lower~5!upper~10' with self.assertRaises(NotImplementedError): - self.assertEqual(cc.get_url_string(),'mean~my_mean|max~my_max|between~between5_10!lower~5.0!upper~10.0') + self.assertEqual(cc.get_url_string(), 'mean~my_mean|max~my_max|between~between5_10!lower~5.0!upper~10.0') def test_bad_key(self): - calc = [{'func':'bad_mean','name':'my_mean'}] + calc = [{'func': 'bad_mean', 'name': 'my_mean'}] with self.assertRaises(DefinitionValidationError): Calc(calc) @@ -307,7 +314,7 @@ def test_seasonal_aggregation(self): with self.assertRaises(DefinitionValidationError): CalcGrouping([[1, 2, 3], [4, 4, 6]]) - ## element groups must have an empty intersection + # # element groups must have an empty intersection with self.assertRaises(DefinitionValidationError): CalcGrouping([[1, 2, 3], [1, 4, 6]]) @@ -366,6 +373,27 @@ def test_init(self): self.assertEqual(hh.value, tuple(constants.HEADERS_REQUIRED)) +class TestMelted(TestBase): + create_dir = False + + def test_init(self): + rd = self.test_data.get_rd('cancm4_tas') + dataset = Dataset(rd) + mm = Melted(dataset=dataset, output_format=constants.OUTPUT_FORMAT_NUMPY) + self.assertIsInstance(mm, BooleanParameter) + self.assertFalse(mm.value) + + # test with multiple request dataset + ocgis_lh.configure(to_stream=True) + rd2 = self.test_data.get_rd('cancm4_tasmax_2011') + dataset = Dataset([rd, rd2]) + of = OutputFormat(constants.OUTPUT_FORMAT_SHAPEFILE) + mm = Melted(dataset=dataset, output_format=of) + self.assertTrue(mm.value) + self.assertTrue(len(ocgis_lh.duplicates) > 0) + ocgis_lh.shutdown() + + class TestDataset(TestBase): create_dir = False @@ -446,7 +474,7 @@ def test_init(self): @attr('esmpy7') def test_init_esmf(self): - #todo: what to do about time values, units, etc. + # todo: what to do about time values, units, etc. efield = self.get_esmf_field() dd = Dataset(efield) self.assertIsInstance(dd.value, RequestDatasetCollection) @@ -470,15 +498,15 @@ def test_get_meta(self): def test_unfiled(self): env.DIR_DATA = ocgis.env.DIR_TEST_DATA reference_rd = self.test_data.get_rd('cancm4_tas') - rd = RequestDataset(reference_rd.uri,reference_rd.variable) + rd = RequestDataset(reference_rd.uri, reference_rd.variable) ds = Dataset(rd) - self.assertEqual(ds.value,RequestDatasetCollection([rd])) + self.assertEqual(ds.value, RequestDatasetCollection([rd])) - dsa = {'uri':reference_rd.uri,'variable':reference_rd.variable} + dsa = {'uri': reference_rd.uri, 'variable': reference_rd.variable} Dataset(dsa) reference_rd2 = self.test_data.get_rd('narccap_crcm') - dsb = [dsa,{'uri':reference_rd2.uri,'variable':reference_rd2.variable,'alias':'knight'}] + dsb = [dsa, {'uri': reference_rd2.uri, 'variable': reference_rd2.variable, 'alias': 'knight'}] Dataset(dsb) @@ -486,28 +514,28 @@ class TestGeom(TestBase): create_dir = False def test_init(self): - geom = make_poly((37.762,38.222),(-102.281,-101.754)) + geom = make_poly((37.762, 38.222), (-102.281, -101.754)) g = Geom(geom) self.assertEqual(type(g.value), tuple) self.assertIsInstance(g.value[0], SpatialDimension) g.value = None - self.assertEqual(None,g.value) + self.assertEqual(None, g.value) g = Geom(None) - self.assertEqual(g.value,None) - self.assertEqual(str(g),'geom=None') + self.assertEqual(g.value, None) + self.assertEqual(str(g), 'geom=None') g = Geom('-120|40|-110|50') - self.assertEqual(g.value[0].geom.polygon.value[0, 0].bounds,(-120.0, 40.0, -110.0, 50.0)) - self.assertEqual(str(g),'geom=-120.0|40.0|-110.0|50.0') + self.assertEqual(g.value[0].geom.polygon.value[0, 0].bounds, (-120.0, 40.0, -110.0, 50.0)) + self.assertEqual(str(g), 'geom=-120.0|40.0|-110.0|50.0') g = Geom('state_boundaries') - self.assertEqual(str(g),'geom="state_boundaries"') + self.assertEqual(str(g), 'geom="state_boundaries"') geoms = list(ShpCabinetIterator('state_boundaries')) g = Geom('state_boundaries') - self.assertEqual(len(list(g.value)),len(geoms)) + self.assertEqual(len(list(g.value)), len(geoms)) sci = ShpCabinetIterator(key='state_boundaries') self.assertFalse(sci.as_spatial_dimension) @@ -517,16 +545,16 @@ def test_init(self): self.assertIsInstance(element, SpatialDimension) self.assertGreater(ii, 10) - su = SelectUgid([1,2,3]) - g = Geom('state_boundaries',select_ugid=su) - self.assertEqual(len(list(g.value)),3) + su = SelectUgid([1, 2, 3]) + g = Geom('state_boundaries', select_ugid=su) + self.assertEqual(len(list(g.value)), 3) - geoms = [{'geom':geom,'properties':{'UGID':1}},{'geom':geom,'properties':{'UGID':2}}] + geoms = [{'geom': geom, 'properties': {'UGID': 1}}, {'geom': geom, 'properties': {'UGID': 2}}] g = Geom(geoms) - bbox = [-120,40,-110,50] + bbox = [-120, 40, -110, 50] g = Geom(bbox) - self.assertEqual(g.value[0].geom.polygon.value[0, 0].bounds,tuple(map(float,bbox))) + self.assertEqual(g.value[0].geom.polygon.value[0, 0].bounds, tuple(map(float, bbox))) def test_spatial_dimension(self): """Test using a SpatialDimension as input value.""" @@ -540,27 +568,27 @@ def test_spatial_dimension(self): self.assertEqual(sdim.shape, (1, 1)) def test_using_shp_path(self): - ## pass a path to a shapefile as opposed to a key + # # pass a path to a shapefile as opposed to a key path = ShpCabinet().get_shp_path('state_boundaries') ocgis.env.DIR_SHPCABINET = None ## make sure there is path associated with the ShpCabinet with self.assertRaises(ValueError): ShpCabinet().keys() g = Geom(path) - self.assertEqual(g._shp_key,path) - self.assertEqual(len(list(g.value)),51) + self.assertEqual(g._shp_key, path) + self.assertEqual(len(list(g.value)), 51) def test_with_changing_select_ugid(self): - select_ugid = [16,17] - g = Geom('state_boundaries',select_ugid=select_ugid) - self.assertEqual(len(list(g.value)),2) + select_ugid = [16, 17] + g = Geom('state_boundaries', select_ugid=select_ugid) + self.assertEqual(len(list(g.value)), 2) select_ugid.append(22) - self.assertEqual(len(list(g.value)),3) + self.assertEqual(len(list(g.value)), 3) g = Geom('state_boundaries') - self.assertEqual(len(list(g.value)),51) - g.select_ugid = [16,17] - self.assertEqual(len(list(g.value)),2) + self.assertEqual(len(list(g.value)), 51) + g.select_ugid = [16, 17] + self.assertEqual(len(list(g.value)), 2) @staticmethod def get_geometry_dictionaries(): @@ -603,21 +631,21 @@ def test_constructor(self): LevelRange() def test_normal_int(self): - lr = LevelRange([5,10]) - self.assertEqual(lr.value,(5,10)) + lr = LevelRange([5, 10]) + self.assertEqual(lr.value, (5, 10)) def test_normal_float(self): - value = [4.5,6.5] + value = [4.5, 6.5] lr = LevelRange(value) - self.assertEqual(tuple(value),lr.value) + self.assertEqual(tuple(value), lr.value) def test_bad_length(self): with self.assertRaises(DefinitionValidationError): - LevelRange([5,6,7,8]) + LevelRange([5, 6, 7, 8]) def test_bad_ordination(self): with self.assertRaises(DefinitionValidationError): - LevelRange([11,10]) + LevelRange([11, 10]) class TestOutputFormat(TestBase): @@ -632,12 +660,16 @@ def test_init_esmpy(self): oo = OutputFormat(constants.OUTPUT_FORMAT_ESMPY_GRID) self.assertEqual(oo.value, constants.OUTPUT_FORMAT_ESMPY_GRID) + def test_get_converter_class(self): + of = OutputFormat(constants.OUTPUT_FORMAT_NUMPY) + self.assertEqual(of.get_converter_class(), NumpyConverter) + def test_valid(self): - self.assertAsSetEqual(OutputFormat.valid, ['csv', 'csv-shp', 'geojson', 'meta', 'nc', 'numpy', 'shp', constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH]) + self.assertAsSetEqual(OutputFormat.valid, ['csv', 'csv-shp', 'geojson', 'meta', 'nc', 'numpy', 'shp', + constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH]) class TestRegridDestination(TestBase): - @property def possible_datasets(self): # one dataset to be regridded @@ -714,7 +746,6 @@ def test_init(self): class TestRegridOptions(TestBase): - def test_init(self): ro = RegridOptions() self.assertDictEqual(ro.value, RegridOptions.default) @@ -747,7 +778,6 @@ def test_get_meta(self): class TestSpatialOperation(TestBase): - def test_init(self): values = (None, 'clip', 'intersects') ast = ('intersects', 'clip', 'intersects') @@ -765,17 +795,17 @@ def test_constructor(self): TimeRange() def test_range(self): - dt = [datetime.datetime(2000,1,1),datetime.datetime(2001,1,1)] + dt = [datetime.datetime(2000, 1, 1), datetime.datetime(2001, 1, 1)] tr = TimeRange(dt) - self.assertEqual(tr.value,tuple(dt)) + self.assertEqual(tr.value, tuple(dt)) def test_bad_ordination(self): - dt = [datetime.datetime(2000,1,1),datetime.datetime(1999,1,1)] + dt = [datetime.datetime(2000, 1, 1), datetime.datetime(1999, 1, 1)] with self.assertRaises(DefinitionValidationError): TimeRange(dt) def test_incorrect_number_of_values(self): - dt = [datetime.datetime(2000,1,1),datetime.datetime(1999,1,1),datetime.datetime(1999,1,1)] + dt = [datetime.datetime(2000, 1, 1), datetime.datetime(1999, 1, 1), datetime.datetime(1999, 1, 1)] with self.assertRaises(DefinitionValidationError): TimeRange(dt) @@ -787,31 +817,31 @@ def test_constructor(self): TimeRegion() def test_normal(self): - value = {'month':[6,7,8],'year':[4,5,6]} + value = {'month': [6, 7, 8], 'year': [4, 5, 6]} tr = TimeRegion(value) - self.assertEqual(value,tr.value) + self.assertEqual(value, tr.value) def test_month_only(self): - value = {'month':[6]} + value = {'month': [6]} tr = TimeRegion(value) - self.assertEqual(tr.value,{'month':[6],'year':None}) + self.assertEqual(tr.value, {'month': [6], 'year': None}) def test_year_only(self): - value = {'year':[6]} + value = {'year': [6]} tr = TimeRegion(value) - self.assertEqual(tr.value,{'month':None,'year':[6]}) + self.assertEqual(tr.value, {'month': None, 'year': [6]}) def test_both_none(self): - value = {'year':None,'month':None} + value = {'year': None, 'month': None} tr = TimeRegion(value) - self.assertEqual(tr.value,None) + self.assertEqual(tr.value, None) def test_bad_keys(self): - value = {'mnth':[4]} + value = {'mnth': [4]} with self.assertRaises(DefinitionValidationError): TimeRegion(value) if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] + # import sys;sys.argv = ['', 'Test.testName'] unittest.main() diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py index 24c0f1bd9..247610274 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_vector.py @@ -1,3 +1,4 @@ +from ocgis import constants from ocgis import RequestDataset, ShpCabinet, ShpCabinetIterator from ocgis.api.request.driver.base import AbstractDriver from ocgis.api.request.driver.vector import DriverVector @@ -6,7 +7,6 @@ class TestDriverVector(TestBase): - def get_driver(self, **kwargs): rd = self.get_rd(**kwargs) driver = DriverVector(rd) @@ -18,8 +18,11 @@ def get_rd(self, variable=None): return rd def test_init(self): - self.assertEqual(DriverVector.__bases__, (AbstractDriver,)) - self.assertIsInstance(self.get_driver(), DriverVector) + self.assertIsInstances(self.get_driver(), (DriverVector, AbstractDriver)) + + actual = [constants.OUTPUT_FORMAT_NUMPY, constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH, + constants.OUTPUT_FORMAT_SHAPEFILE] + self.assertAsSetEqual(actual, DriverVector.output_formats) def test_close(self): driver = self.get_driver() diff --git a/src/ocgis/test/test_ocgis/test_api/test_subset.py b/src/ocgis/test/test_ocgis/test_api/test_subset.py index 5c32c7bec..1505cc83b 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_subset.py +++ b/src/ocgis/test/test_ocgis/test_api/test_subset.py @@ -7,6 +7,7 @@ import ESMF +from ocgis.calc.library.index.duration import FrequencyDuration from ocgis.api.parms.definition import OutputFormat from ocgis.interface.base.field import Field from ocgis.api.operations import OcgOperations @@ -24,7 +25,6 @@ class TestSubsetOperation(TestBase): - def get_operations(self): rd = self.test_data.get_rd('cancm4_tas') slc = [None, [0, 100], None, [0, 10], [0, 10]] @@ -66,6 +66,39 @@ def test_process_subsettables(self): coll = res[0] self.assertIsNotNone(coll[1][field.name].spatial.grid.corners) + # test header assignment + rd = self.test_data.get_rd('cancm4_tas') + for melted in [False, True]: + ops = OcgOperations(dataset=rd, slice=[0, 0, 0, 0, 0], melted=melted) + rds = ops.dataset.values() + so = SubsetOperation(ops) + ret = so._process_subsettables_(rds) + for coll in ret: + if melted: + self.assertEqual(coll.headers, constants.HEADERS_RAW) + else: + self.assertIsNone(coll.headers) + + # test with value keys + calc = [{'func': 'freq_duration', 'name': 'freq_duration', 'kwds': {'operation': 'gt', 'threshold': 280}}] + rd = self.test_data.get_rd('cancm4_tas') + slc = [0, [0, 600], 0, [10, 20], [10, 20]] + ops = OcgOperations(dataset=rd, slice=slc, calc=calc, calc_grouping=['month', 'year']) + rds = ops.dataset.values() + so = SubsetOperation(ops) + ret = so._process_subsettables_(rds) + for coll in ret: + self.assertIsNone(coll.headers) + ops = OcgOperations(dataset=rd, slice=slc, calc=calc, calc_grouping=['month', 'year'], + output_format=constants.OUTPUT_FORMAT_CSV, melted=True) + rds = ops.dataset.values() + so = SubsetOperation(ops) + ret = so._process_subsettables_(rds) + for coll in ret: + self.assertTrue(len(coll.value_keys) == 2) + for key in FrequencyDuration.structure_dtype['names']: + self.assertIn(key, coll.headers) + def test_abstraction_not_available(self): """Test appropriate exception is raised when a selected abstraction is not available.""" @@ -201,7 +234,7 @@ def test_regridding_bounding_box_wrapped(self): def test_regridding_same_field(self): """Test regridding operations with same field used to regrid the source.""" - #todo: what happens with multivariate calculations + # todo: what happens with multivariate calculations #todo: test with all masked values rd_dest = self.test_data.get_rd('cancm4_tas') @@ -357,7 +390,8 @@ def test_regridding_update_crs(self): else: raise NotImplementedError - ops = ocgis.OcgOperations(dataset=rd1, regrid_destination=destination, geom=geom, select_ugid=select_ugid, snippet=True) + ops = ocgis.OcgOperations(dataset=rd1, regrid_destination=destination, geom=geom, select_ugid=select_ugid, + snippet=True) subset = SubsetOperation(ops) colls = list(subset) @@ -373,8 +407,133 @@ def test_regridding_update_crs(self): rd1 = self.test_data.get_rd('cancm4_tas') rd2 = self.test_data.get_rd('narccap_lambert_conformal') - actual = np.ma.array([[[[[0.0, 0.0, 0.0, 0.0, 289.309326171875, 288.7110290527344, 287.92108154296875, 287.1899108886719, 286.51715087890625, 285.9024658203125, 0.0, 0.0, 0.0], [0.0, 288.77825927734375, 288.62823486328125, 288.3404541015625, 287.9151611328125, 287.32000732421875, 286.633544921875, 286.0067138671875, 285.43914794921875, 284.93060302734375, 284.48077392578125, 0.0, 0.0], [288.4192199707031, 288.18804931640625, 287.8165588378906, 287.30499267578125, 286.65362548828125, 285.86676025390625, 285.28515625, 284.7640686035156, 284.30316162109375, 283.90216064453125, 283.560791015625, 0.0, 0.0], [288.19488525390625, 287.74169921875, 287.14593505859375, 286.4078063964844, 285.52752685546875, 284.5051574707031, 283.87457275390625, 283.4606628417969, 283.1078186035156, 282.8158264160156, 282.58441162109375, 0.0, 0.0], [288.023193359375, 287.4422607421875, 286.6193542480469, 285.65179443359375, 284.5396728515625, 283.2830505371094, 282.4002685546875, 282.09503173828125, 281.8517761230469, 281.6702575683594, 281.55029296875, 0.0, 0.0], [287.8075866699219, 287.2928771972656, 286.2398986816406, 285.0399475097656, 283.6930236816406, 282.19915771484375, 280.86077880859375, 280.66571044921875, 280.5335388183594, 280.4640808105469, 280.4613952636719, 280.4708251953125, 0.0], [287.591552734375, 287.296875, 286.0108337402344, 284.5754089355469, 282.99066162109375, 281.2564392089844, 279.47003173828125, 279.34307861328125, 279.3382263183594, 279.3432922363281, 279.3581848144531, 279.3829040527344, 0.0], [287.3750305175781, 287.322265625, 285.8916931152344, 284.12139892578125, 282.3462829589844, 280.566162109375, 278.7807922363281, 278.1846618652344, 278.1950988769531, 278.2154846191406, 278.24578857421875, 278.2860107421875, 0.0], [286.864013671875, 286.48724365234375, 285.2509460449219, 283.4699401855469, 281.6840515136719, 279.8930358886719, 278.0966796875, 277.01617431640625, 277.0421142578125, 277.07806396484375, 277.1240234375, 277.1799621582031, 0.0], [286.0535583496094, 285.5471496582031, 284.6158752441406, 282.8240661621094, 281.0272521972656, 279.2252197265625, 277.4177551269531, 275.8373107910156, 275.8789367675781, 275.9306945800781, 275.9925231933594, 276.0644226074219, 0.0], [285.3349609375, 284.69732666015625, 283.9648132324219, 282.183837890625, 280.3759765625, 278.56280517578125, 276.74407958984375, 274.91961669921875, 274.7053527832031, 274.77313232421875, 274.85107421875, 274.9391784667969, 275.0374450683594], [284.7100830078125, 283.93963623046875, 283.07275390625, 281.54925537109375, 279.730224609375, 277.90576171875, 276.07568359375, 274.2397155761719, 273.5210266113281, 273.6050720214844, 273.69940185546875, 273.9654235839844, 274.24139404296875], [284.1809387207031, 283.27606201171875, 282.2731018066406, 280.9204406738281, 279.090087890625, 277.25421142578125, 275.41259765625, 273.7033996582031, 272.687744140625, 272.9641418457031, 273.2394104003906, 273.5135498046875, 273.78662109375], [283.7496337890625, 282.70855712890625, 281.56787109375, 280.3042907714844, 278.54541015625, 276.8524169921875, 275.22515869140625, 273.6634826660156, 272.24554443359375, 272.5191955566406, 272.7915954589844, 273.0628662109375, 273.3330078125], [283.39312744140625, 282.1578369140625, 280.91937255859375, 279.67755126953125, 278.1316223144531, 276.5411071777344, 275.017333984375, 273.56024169921875, 272.16973876953125, 272.07550048828125, 272.3450622558594, 272.6134338378906, 272.8805847167969], [282.7581481933594, 281.516845703125, 280.27227783203125, 279.0242614746094, 277.64892578125, 276.16229248046875, 274.743408203125, 273.3922424316406, 272.1087646484375, 271.63311767578125, 271.8998107910156, 272.1651916503906, 272.4293518066406], [282.1268615722656, 280.87945556640625, 279.6286926269531, 278.3744201660156, 277.095703125, 275.7143249511719, 274.4017639160156, 273.157958984375, 271.98297119140625, 271.1920471191406, 271.4557800292969, 271.71820068359375, 271.97930908203125], [281.499267578125, 280.24566650390625, 278.9886779785156, 277.7280578613281, 276.4637145996094, 275.19561767578125, 273.9908142089844, 272.85589599609375, 271.7908630371094, 270.79583740234375, 271.0130615234375, 271.26873779296875, 271.4607238769531], [280.8753662109375, 279.6155090332031, 278.3522033691406, 277.085205078125, 275.81439208984375, 274.6044921875, 273.50897216796875, 272.4844055175781, 271.58203125, 270.6971130371094, 270.5581359863281, 270.7032165527344, 270.7939453125], [280.25518798828125, 278.989013671875, 277.71929931640625, 276.4458312988281, 275.1974182128906, 274.173828125, 273.29473876953125, 272.4113464355469, 271.5235290527344, 270.63116455078125, 270.1499938964844, 270.1932678222656, 270.1813049316406], [0.0, 278.4078063964844, 277.3578186035156, 276.3003234863281, 275.2351379394531, 274.162109375, 273.2556457519531, 272.36480712890625, 271.4695129394531, 270.569580078125, 269.8001403808594, 269.7401428222656, 269.6240234375], [0.0, 278.4853820800781, 277.42474365234375, 276.3564758300781, 275.2804260253906, 274.1964416503906, 273.2213134765625, 272.3229675292969, 271.4200744628906, 270.5124816894531, 269.6001281738281, 269.3451232910156, 269.1233825683594], [0.0, 278.5711669921875, 277.49969482421875, 276.4205017089844, 275.33343505859375, 274.23834228515625, 273.1918640136719, 272.2858581542969, 271.3752746582031, 270.4599304199219, 269.53973388671875, 269.00958251953125, 268.6806335449219], [0.0, 0.0, 277.5827941894531, 276.4925537109375, 275.3943176269531, 274.2879638671875, 273.1732482910156, 272.25360107421875, 271.335205078125, 270.4120178222656, 269.48388671875, 268.73492431640625, 0.0]]]]], - mask=[[[[[True, True, True, True, True, True, True, True, True, False, False, False, True], [True, True, True, True, True, True, True, False, False, False, False, False, True], [True, True, True, True, True, True, True, False, False, False, False, False, True], [True, True, True, True, True, True, False, False, False, False, False, False, False], [True, True, True, True, True, False, False, False, False, False, False, False, False], [True, True, True, True, False, False, False, False, False, False, False, False, True], [True, True, False, False, False, False, False, False, False, False, False, False, True], [True, True, False, False, False, False, False, False, False, False, False, True, True], [True, True, False, False, False, False, False, False, False, False, True, True, True], [True, True, False, False, False, False, False, False, False, False, True, True, True], [True, False, False, False, False, False, False, False, False, True, True, True, True], [True, False, False, False, False, False, False, False, True, True, True, True, True], [True, False, False, False, False, False, False, False, True, True, True, True, True], [True, False, False, False, False, False, False, True, True, True, True, True, True], [True, False, False, False, False, False, False, True, True, True, True, True, True], [False, False, False, False, False, False, True, True, True, True, True, True, True], [False, False, False, False, False, False, True, True, True, True, True, True, True], [False, False, False, False, False, False, True, True, True, True, True, True, True], [False, False, False, False, False, False, False, True, True, True, True, True, True], [False, False, False, False, False, False, False, True, True, True, True, True, True], [False, False, False, False, False, False, False, True, True, True, True, True, True], [False, False, False, False, False, False, False, False, True, True, True, True, True], [True, False, False, False, False, False, True, True, True, True, True, True, True], [True, False, False, True, True, True, True, True, True, True, True, True, True]]]]], + actual = np.ma.array([[[[[0.0, 0.0, 0.0, 0.0, 289.309326171875, 288.7110290527344, 287.92108154296875, + 287.1899108886719, 286.51715087890625, 285.9024658203125, 0.0, 0.0, 0.0], + [0.0, 288.77825927734375, 288.62823486328125, 288.3404541015625, 287.9151611328125, + 287.32000732421875, 286.633544921875, 286.0067138671875, 285.43914794921875, + 284.93060302734375, 284.48077392578125, 0.0, 0.0], + [288.4192199707031, 288.18804931640625, 287.8165588378906, 287.30499267578125, + 286.65362548828125, 285.86676025390625, 285.28515625, 284.7640686035156, + 284.30316162109375, 283.90216064453125, 283.560791015625, 0.0, 0.0], + [288.19488525390625, 287.74169921875, 287.14593505859375, 286.4078063964844, + 285.52752685546875, 284.5051574707031, 283.87457275390625, 283.4606628417969, + 283.1078186035156, 282.8158264160156, 282.58441162109375, 0.0, 0.0], + [288.023193359375, 287.4422607421875, 286.6193542480469, 285.65179443359375, + 284.5396728515625, 283.2830505371094, 282.4002685546875, 282.09503173828125, + 281.8517761230469, 281.6702575683594, 281.55029296875, 0.0, 0.0], + [287.8075866699219, 287.2928771972656, 286.2398986816406, 285.0399475097656, + 283.6930236816406, 282.19915771484375, 280.86077880859375, 280.66571044921875, + 280.5335388183594, 280.4640808105469, 280.4613952636719, 280.4708251953125, 0.0], + [287.591552734375, 287.296875, 286.0108337402344, 284.5754089355469, + 282.99066162109375, 281.2564392089844, 279.47003173828125, 279.34307861328125, + 279.3382263183594, 279.3432922363281, 279.3581848144531, 279.3829040527344, 0.0], + [287.3750305175781, 287.322265625, 285.8916931152344, 284.12139892578125, + 282.3462829589844, 280.566162109375, 278.7807922363281, 278.1846618652344, + 278.1950988769531, 278.2154846191406, 278.24578857421875, 278.2860107421875, 0.0], + [286.864013671875, 286.48724365234375, 285.2509460449219, 283.4699401855469, + 281.6840515136719, 279.8930358886719, 278.0966796875, 277.01617431640625, + 277.0421142578125, 277.07806396484375, 277.1240234375, 277.1799621582031, 0.0], + [286.0535583496094, 285.5471496582031, 284.6158752441406, 282.8240661621094, + 281.0272521972656, 279.2252197265625, 277.4177551269531, 275.8373107910156, + 275.8789367675781, 275.9306945800781, 275.9925231933594, 276.0644226074219, 0.0], + [285.3349609375, 284.69732666015625, 283.9648132324219, 282.183837890625, + 280.3759765625, 278.56280517578125, 276.74407958984375, 274.91961669921875, + 274.7053527832031, 274.77313232421875, 274.85107421875, 274.9391784667969, + 275.0374450683594], + [284.7100830078125, 283.93963623046875, 283.07275390625, 281.54925537109375, + 279.730224609375, 277.90576171875, 276.07568359375, 274.2397155761719, + 273.5210266113281, 273.6050720214844, 273.69940185546875, 273.9654235839844, + 274.24139404296875], + [284.1809387207031, 283.27606201171875, 282.2731018066406, 280.9204406738281, + 279.090087890625, 277.25421142578125, 275.41259765625, 273.7033996582031, + 272.687744140625, 272.9641418457031, 273.2394104003906, 273.5135498046875, + 273.78662109375], + [283.7496337890625, 282.70855712890625, 281.56787109375, 280.3042907714844, + 278.54541015625, 276.8524169921875, 275.22515869140625, 273.6634826660156, + 272.24554443359375, 272.5191955566406, 272.7915954589844, 273.0628662109375, + 273.3330078125], + [283.39312744140625, 282.1578369140625, 280.91937255859375, 279.67755126953125, + 278.1316223144531, 276.5411071777344, 275.017333984375, 273.56024169921875, + 272.16973876953125, 272.07550048828125, 272.3450622558594, 272.6134338378906, + 272.8805847167969], + [282.7581481933594, 281.516845703125, 280.27227783203125, 279.0242614746094, + 277.64892578125, 276.16229248046875, 274.743408203125, 273.3922424316406, + 272.1087646484375, 271.63311767578125, 271.8998107910156, 272.1651916503906, + 272.4293518066406], + [282.1268615722656, 280.87945556640625, 279.6286926269531, 278.3744201660156, + 277.095703125, 275.7143249511719, 274.4017639160156, 273.157958984375, + 271.98297119140625, 271.1920471191406, 271.4557800292969, 271.71820068359375, + 271.97930908203125], + [281.499267578125, 280.24566650390625, 278.9886779785156, 277.7280578613281, + 276.4637145996094, 275.19561767578125, 273.9908142089844, 272.85589599609375, + 271.7908630371094, 270.79583740234375, 271.0130615234375, 271.26873779296875, + 271.4607238769531], + [280.8753662109375, 279.6155090332031, 278.3522033691406, 277.085205078125, + 275.81439208984375, 274.6044921875, 273.50897216796875, 272.4844055175781, + 271.58203125, 270.6971130371094, 270.5581359863281, 270.7032165527344, + 270.7939453125], + [280.25518798828125, 278.989013671875, 277.71929931640625, 276.4458312988281, + 275.1974182128906, 274.173828125, 273.29473876953125, 272.4113464355469, + 271.5235290527344, 270.63116455078125, 270.1499938964844, 270.1932678222656, + 270.1813049316406], + [0.0, 278.4078063964844, 277.3578186035156, 276.3003234863281, 275.2351379394531, + 274.162109375, 273.2556457519531, 272.36480712890625, 271.4695129394531, + 270.569580078125, 269.8001403808594, 269.7401428222656, 269.6240234375], + [0.0, 278.4853820800781, 277.42474365234375, 276.3564758300781, 275.2804260253906, + 274.1964416503906, 273.2213134765625, 272.3229675292969, 271.4200744628906, + 270.5124816894531, 269.6001281738281, 269.3451232910156, 269.1233825683594], + [0.0, 278.5711669921875, 277.49969482421875, 276.4205017089844, 275.33343505859375, + 274.23834228515625, 273.1918640136719, 272.2858581542969, 271.3752746582031, + 270.4599304199219, 269.53973388671875, 269.00958251953125, 268.6806335449219], + [0.0, 0.0, 277.5827941894531, 276.4925537109375, 275.3943176269531, 274.2879638671875, + 273.1732482910156, 272.25360107421875, 271.335205078125, 270.4120178222656, + 269.48388671875, 268.73492431640625, 0.0]]]]], + mask=[[[[[True, True, True, True, True, True, True, True, True, False, False, False, True], + [True, True, True, True, True, True, True, False, False, False, False, False, + True], + [True, True, True, True, True, True, True, False, False, False, False, False, + True], + [True, True, True, True, True, True, False, False, False, False, False, False, + False], + [True, True, True, True, True, False, False, False, False, False, False, False, + False], + [True, True, True, True, False, False, False, False, False, False, False, False, + True], + [True, True, False, False, False, False, False, False, False, False, False, False, + True], + [True, True, False, False, False, False, False, False, False, False, False, True, + True], + [True, True, False, False, False, False, False, False, False, False, True, True, + True], + [True, True, False, False, False, False, False, False, False, False, True, True, + True], + [True, False, False, False, False, False, False, False, False, True, True, True, + True], + [True, False, False, False, False, False, False, False, True, True, True, True, + True], + [True, False, False, False, False, False, False, False, True, True, True, True, + True], + [True, False, False, False, False, False, False, True, True, True, True, True, + True], + [True, False, False, False, False, False, False, True, True, True, True, True, + True], + [False, False, False, False, False, False, True, True, True, True, True, True, + True], + [False, False, False, False, False, False, True, True, True, True, True, True, + True], + [False, False, False, False, False, False, True, True, True, True, True, True, + True], + [False, False, False, False, False, False, False, True, True, True, True, True, + True], + [False, False, False, False, False, False, False, True, True, True, True, True, + True], + [False, False, False, False, False, False, False, True, True, True, True, True, + True], + [False, False, False, False, False, False, False, False, True, True, True, True, + True], + [True, False, False, False, False, False, True, True, True, True, True, True, + True], [True, False, False, True, True, True, True, True, True, True, True, True, + True]]]]], dtype=np.float32, fill_value=np.float32(1e20)) diff --git a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py index ae33dbcd9..79f063b1b 100644 --- a/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py +++ b/src/ocgis/test/test_ocgis/test_calc/test_library/test_index/test_duration.py @@ -10,88 +10,87 @@ class TestDuration(AbstractCalcBase): - def test_duration(self): duration = Duration() - - ## three consecutive days over 3 - values = np.array([1,2,3,3,3,1,1],dtype=float) + + # # three consecutive days over 3 + values = np.array([1, 2, 3, 3, 3, 1, 1], dtype=float) values = self.get_reshaped(values) - ret = duration.calculate(values,2,operation='gt',summary='max') - self.assertEqual(3.0,ret.flatten()[0]) - - ## no duration over the threshold - values = np.array([1,2,1,2,1,2,1],dtype=float) + ret = duration.calculate(values, 2, operation='gt', summary='max') + self.assertEqual(3.0, ret.flatten()[0]) + + # # no duration over the threshold + values = np.array([1, 2, 1, 2, 1, 2, 1], dtype=float) values = self.get_reshaped(values) - ret = duration.calculate(values,2,operation='gt',summary='max') - self.assertEqual(0.,ret.flatten()[0]) - + ret = duration.calculate(values, 2, operation='gt', summary='max') + self.assertEqual(0., ret.flatten()[0]) + ## no duration over the threshold - values = np.array([1,2,1,2,1,2,1],dtype=float) + values = np.array([1, 2, 1, 2, 1, 2, 1], dtype=float) values = self.get_reshaped(values) - ret = duration.calculate(values,2,operation='gte',summary='max') - self.assertEqual(1.,ret.flatten()[0]) - + ret = duration.calculate(values, 2, operation='gte', summary='max') + self.assertEqual(1., ret.flatten()[0]) + ## average duration - values = np.array([1,5,5,2,5,5,5],dtype=float) + values = np.array([1, 5, 5, 2, 5, 5, 5], dtype=float) values = self.get_reshaped(values) - ret = duration.calculate(values,4,operation='gte',summary='mean') - self.assertEqual(2.5,ret.flatten()[0]) - + ret = duration.calculate(values, 4, operation='gte', summary='mean') + self.assertEqual(2.5, ret.flatten()[0]) + ## add some masked values - values = np.array([1,5,5,2,5,5,5],dtype=float) - mask = [0,0,0,0,0,1,0] - values = np.ma.array(values,mask=mask) + values = np.array([1, 5, 5, 2, 5, 5, 5], dtype=float) + mask = [0, 0, 0, 0, 0, 1, 0] + values = np.ma.array(values, mask=mask) values = self.get_reshaped(values) - ret = duration.calculate(values,4,operation='gte',summary='max') - self.assertEqual(2.,ret.flatten()[0]) - + ret = duration.calculate(values, 4, operation='gte', summary='max') + self.assertEqual(2., ret.flatten()[0]) + ## test with an actual matrix - values = np.array([1,5,5,2,5,5,5,4,4,0,2,4,4,4,3,3,5,5,6,9],dtype=float) - values = values.reshape(5,2,2) - values = np.ma.array(values,mask=False) - ret = duration.calculate(values,4,operation='gte',summary='mean') - self.assertNumpyAll(np.ma.array([ 4. , 2. , 1.5, 1.5],dtype=ret.dtype),ret.flatten()) - + values = np.array([1, 5, 5, 2, 5, 5, 5, 4, 4, 0, 2, 4, 4, 4, 3, 3, 5, 5, 6, 9], dtype=float) + values = values.reshape(5, 2, 2) + values = np.ma.array(values, mask=False) + ret = duration.calculate(values, 4, operation='gte', summary='mean') + self.assertNumpyAll(np.ma.array([4., 2., 1.5, 1.5], dtype=ret.dtype), ret.flatten()) + def test_standard_operations(self): ret = self.run_standard_operations( - [{'func':'duration','name':'max_duration','kwds':{'operation':'gt','threshold':2,'summary':'max'}}], - capture=True) + [{'func': 'duration', 'name': 'max_duration', + 'kwds': {'operation': 'gt', 'threshold': 2, 'summary': 'max'}}], + capture=True) for cap in ret: reraise = True - if isinstance(cap['exception'],DefinitionValidationError): - if cap['parms']['calc_grouping'] in [['month'],'all']: + if isinstance(cap['exception'], DefinitionValidationError): + if cap['parms']['calc_grouping'] in [['month'], 'all']: reraise = False if reraise: - raise(cap['exception']) - - + raise (cap['exception']) + + class TestFrequencyDuration(AbstractCalcBase): - def test_constructor(self): FrequencyDuration() - - def test_calculate(self): + + def test_calculate(self): fduration = FrequencyDuration() - - values = np.array([1,2,3,3,3,1,1,3,3,3,4,4,1,4,4,1,10,10],dtype=float) + + values = np.array([1, 2, 3, 3, 3, 1, 1, 3, 3, 3, 4, 4, 1, 4, 4, 1, 10, 10], dtype=float) values = self.get_reshaped(values) - ret = fduration.calculate(values,threshold=2,operation='gt') - self.assertEqual(ret.flatten()[0].dtype.names,('duration','count')) - self.assertNumpyAll(np.ma.array([2,3,5],dtype=np.int32),ret.flatten()[0]['duration']) - self.assertNumpyAll(np.ma.array([2,1,1],dtype=np.int32),ret.flatten()[0]['count']) - - calc = [{'func':'freq_duration','name':'freq_duration','kwds':{'operation':'gt','threshold':280}}] - ret = self.run_standard_operations(calc,capture=True,output_format=None) + ret = fduration.calculate(values, threshold=2, operation='gt') + self.assertEqual(ret.flatten()[0].dtype.names, ('duration', 'count')) + self.assertNumpyAll(np.ma.array([2, 3, 5], dtype=np.int32), ret.flatten()[0]['duration']) + self.assertNumpyAll(np.ma.array([2, 1, 1], dtype=np.int32), ret.flatten()[0]['count']) + + calc = [{'func': 'freq_duration', 'name': 'freq_duration', 'kwds': {'operation': 'gt', 'threshold': 280}}] + ret = self.run_standard_operations(calc, capture=True, output_format=None) for dct in ret: - if isinstance(dct['exception'],NotImplementedError) and dct['parms']['aggregate']: + if isinstance(dct['exception'], NotImplementedError) and dct['parms']['aggregate']: pass - elif isinstance(dct['exception'],DefinitionValidationError): + elif isinstance(dct['exception'], DefinitionValidationError): if dct['parms']['output_format'] == 'nc' or dct['parms']['calc_grouping'] == ['month']: pass else: - raise(dct['exception']) - + raise (dct['exception']) + @attr('slow') def test_real_data_multiple_datasets(self): kwds = {'time_region': {'year': [1991], 'month': [7]}} @@ -112,7 +111,7 @@ def test_real_data_multiple_datasets(self): reader = csv.DictReader(f) variables = [row['VARIABLE'] for row in reader] self.assertEqual(set(variables), set(['tasmax', 'tasmin'])) - + def test_real_data(self): """Test calculations on real data.""" @@ -129,7 +128,8 @@ def test_real_data(self): geom='us_counties', select_ugid=[2778], aggregate=True, calc_raw=False, spatial_operation='clip', headers=['did', 'ugid', 'gid', 'year', 'month', 'day', 'variable', 'calc_key', - 'value'], ) + 'value'], + melted=True) ret = ops.execute() if output_format == 'numpy': diff --git a/src/ocgis/test/test_ocgis/test_conv/test_base.py b/src/ocgis/test/test_ocgis/test_conv/test_base.py index f8f78953f..86a172467 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_base.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_base.py @@ -5,6 +5,7 @@ import tempfile import numpy as np +from ocgis.conv.base import AbstractTabularConverter from ocgis import constants from ocgis.conv.esmpy import AbstractConverter from ocgis.test.base import TestBase, nc_scope @@ -16,7 +17,6 @@ class AbstractTestConverter(TestBase): - def get_spatial_collection(self, field=None): rd = self.test_data.get_rd('cancm4_tas') field = field or rd.get()[:, 0, :, 0, 0] @@ -27,51 +27,51 @@ def get_spatial_collection(self, field=None): class TestAbstractConverter(AbstractTestConverter): _auxiliary_file_list = ['ocgis_output_metadata.txt', 'ocgis_output_source_metadata.txt', 'ocgis_output_did.csv'] - - def run_auxiliary_file_tst(self,Converter,file_list,auxiliary_file_list=None): + + def run_auxiliary_file_tst(self, Converter, file_list, auxiliary_file_list=None): auxiliary_file_list = auxiliary_file_list or self._auxiliary_file_list rd = self.test_data.get_rd('cancm4_tas') - ops = ocgis.OcgOperations(dataset=rd,output_format='numpy',slice=[None,0,None,[0,10],[0,10]]) + ops = ocgis.OcgOperations(dataset=rd, output_format='numpy', slice=[None, 0, None, [0, 10], [0, 10]]) coll = ops.execute() - - _ops = [None,ops] - _add_auxiliary_files = [True,False] - for ops_arg,add_auxiliary_files in itertools.product(_ops,_add_auxiliary_files): - ## make a new output directory as to not deal with overwrites + + _ops = [None, ops] + _add_auxiliary_files = [True, False] + for ops_arg, add_auxiliary_files in itertools.product(_ops, _add_auxiliary_files): + # # make a new output directory as to not deal with overwrites outdir = tempfile.mkdtemp(dir=self.current_dir_output) try: - conv = Converter([coll],outdir,'ocgis_output',add_auxiliary_files=add_auxiliary_files,ops=ops_arg) - ## CsvShapefileConverter requires an operations argument + conv = Converter([coll], outdir, 'ocgis_output', add_auxiliary_files=add_auxiliary_files, ops=ops_arg) + # # CsvShapefileConverter requires an operations argument except ValueError as e: if Converter == CsvShapefileConverter and ops_arg is None: continue else: - raise(e) + raise (e) conv.write() files = os.listdir(outdir) - ## auxiliary files require an operations argument + # # auxiliary files require an operations argument if add_auxiliary_files == True and ops_arg is not None: to_test = deepcopy(file_list) to_test.extend(auxiliary_file_list) else: to_test = file_list - self.assertEqual(set(files),set(to_test)) - - def run_overwrite_true_tst(self,Converter,include_ops=False): + self.assertEqual(set(files), set(to_test)) + + def run_overwrite_true_tst(self, Converter, include_ops=False): rd = self.test_data.get_rd('cancm4_tas') - _ops = ocgis.OcgOperations(dataset=rd,output_format='numpy',slice=[None,0,None,[0,10],[0,10]]) + _ops = ocgis.OcgOperations(dataset=rd, output_format='numpy', slice=[None, 0, None, [0, 10], [0, 10]]) coll = _ops.execute() - + ops = _ops if include_ops else None outdir = tempfile.mkdtemp(dir=self.current_dir_output) - conv = Converter([coll],outdir,'ocgis_output',ops=ops) + conv = Converter([coll], outdir, 'ocgis_output', ops=ops) conv.write() - mtimes = [os.path.getmtime(os.path.join(outdir,f)) for f in os.listdir(outdir)] + mtimes = [os.path.getmtime(os.path.join(outdir, f)) for f in os.listdir(outdir)] - Converter([coll],outdir,'ocgis_output',overwrite=True,ops=ops).write() - mtimes2 = [os.path.getmtime(os.path.join(outdir,f)) for f in os.listdir(outdir)] - ## if the file is overwritten the modification time will be more recent! - self.assertTrue(all([m2 > m for m2,m in zip(mtimes2,mtimes)])) + Converter([coll], outdir, 'ocgis_output', overwrite=True, ops=ops).write() + mtimes2 = [os.path.getmtime(os.path.join(outdir, f)) for f in os.listdir(outdir)] + # # if the file is overwritten the modification time will be more recent! + self.assertTrue(all([m2 > m for m2, m in zip(mtimes2, mtimes)])) def test_get_converter_map(self): self.assertEqual(AbstractConverter.get_converter_map()[constants.OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH], @@ -87,7 +87,11 @@ def test_multiple_variables(self): field = field[:, 0:2, :, 0:5, 0:5] coll = self.get_spatial_collection(field=field) for conv_klass in conv_klasses: - conv = conv_klass([coll], self.current_dir_output, 'ocgis_output_{0}'.format(conv_klass.__name__)) + if conv_klass == CsvConverter: + kwds = {'melted': True} + else: + kwds = {} + conv = conv_klass([coll], self.current_dir_output, 'ocgis_output_{0}'.format(conv_klass.__name__), **kwds) ret = conv.write() if conv_klass == CsvConverter: with open(ret, 'r') as f: @@ -103,37 +107,53 @@ def test_overwrite_false_csv(self): rd = self.test_data.get_rd('cancm4_tas') ops = ocgis.OcgOperations(dataset=rd, output_format='numpy', slice=[None, 0, None, [0, 10], [0, 10]]) coll = ops.execute() - + outdir = tempfile.mkdtemp(dir=self.current_dir_output) conv = CsvConverter([coll], outdir, 'ocgis_output') conv.write() - + with self.assertRaises(IOError): - CsvConverter([coll],outdir,'ocgis_output') - + CsvConverter([coll], outdir, 'ocgis_output') + def test_overwrite_true_csv(self): self.run_overwrite_true_tst(CsvConverter) - + def test_overwrite_true_nc(self): self.run_overwrite_true_tst(NcConverter) - + def test_overwrite_true_shp(self): self.run_overwrite_true_tst(ShpConverter) - + def test_overwrite_true_csv_shp(self): - self.run_overwrite_true_tst(CsvShapefileConverter,include_ops=True) - - def test_add_auxiliary_files_csv(self): - self.run_auxiliary_file_tst(CsvConverter,['ocgis_output.csv']) - - def test_add_auxiliary_files_geojson(self): - self.run_auxiliary_file_tst(GeoJsonConverter,['ocgis_output.json']) - - def test_add_auxiliary_files_nc(self): - self.run_auxiliary_file_tst(NcConverter,['ocgis_output.nc']) - + self.run_overwrite_true_tst(CsvShapefileConverter, include_ops=True) + + def test_add_auxiliary_files_csv(self): + self.run_auxiliary_file_tst(CsvConverter, ['ocgis_output.csv']) + + def test_add_auxiliary_files_geojson(self): + self.run_auxiliary_file_tst(GeoJsonConverter, ['ocgis_output.json']) + + def test_add_auxiliary_files_nc(self): + self.run_auxiliary_file_tst(NcConverter, ['ocgis_output.nc']) + def test_add_auxiliary_files_csv_shp(self): - self.run_auxiliary_file_tst(CsvShapefileConverter,['ocgis_output.csv', 'shp']) - - def test_add_auxiliary_files_shp(self): - self.run_auxiliary_file_tst(ShpConverter,['ocgis_output.dbf', 'ocgis_output.shx', 'ocgis_output.shp', 'ocgis_output.cpg', 'ocgis_output.prj']) + self.run_auxiliary_file_tst(CsvShapefileConverter, ['ocgis_output.csv', 'shp']) + + def test_add_auxiliary_files_shp(self): + self.run_auxiliary_file_tst(ShpConverter, + ['ocgis_output.dbf', 'ocgis_output.shx', 'ocgis_output.shp', 'ocgis_output.cpg', + 'ocgis_output.prj']) + + +class FakeAbstractTabularConverter(AbstractTabularConverter): + pass + + +class TestAbstractTabularConverter(AbstractTestConverter): + def test_init(self): + ff = FakeAbstractTabularConverter(None) + self.assertIsInstance(ff, AbstractConverter) + self.assertFalse(ff.melted) + + ff = FakeAbstractTabularConverter(None, melted=True) + self.assertTrue(ff.melted) diff --git a/src/ocgis/test/test_ocgis/test_conv/test_csv_.py b/src/ocgis/test/test_ocgis/test_conv/test_csv_.py index f61af2e8a..6b9ed8649 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_csv_.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_csv_.py @@ -1,5 +1,59 @@ +import os +import tempfile + +from ocgis.conv.csv_ import CsvShapefileConverter, CsvConverter +from ocgis import OcgOperations, RequestDataset +from ocgis.api.subset import SubsetOperation from ocgis.test.test_ocgis.test_conv.test_base import AbstractTestConverter +from ocgis.util.addict import Dict class TestCsvConverter(AbstractTestConverter): pass + + +class TestCsvShpConverter(AbstractTestConverter): + def get(self, kwargs_conv=None, kwargs_ops=None): + rd = self.test_data.get_rd('cancm4_tas') + + kwds_ops = Dict(dataset=rd, geom='state_boundaries', select_ugid=[15, 18], snippet=True) + if kwargs_ops is not None: + kwds_ops.update(kwargs_ops) + + ops = OcgOperations(**kwds_ops) + so = SubsetOperation(ops) + + kwds_conv = Dict() + kwds_conv.outdir = self.current_dir_output + kwds_conv.prefix = 'foo' + kwds_conv.ops = ops + if kwargs_conv is not None: + kwds_conv.update(kwargs_conv) + + conv = CsvShapefileConverter(so, **kwds_conv) + + return conv + + def test_init(self): + conv = self.get() + self.assertIsInstance(conv, CsvConverter) + + def test(self): + for melted in [False, True]: + kwargs_ops = dict(melted=melted) + kwargs_conv = dict(outdir=tempfile.mkdtemp(dir=self.current_dir_output)) + + conv = self.get(kwargs_ops=kwargs_ops, kwargs_conv=kwargs_conv) + csv_path = conv.write() + self.assertTrue(os.path.exists(csv_path)) + self.assertEqual(conv._ugid_gid_store, + {1: {18: [5988, 5989, 5990, 6116, 6117, 6118], 15: [5992, 6119, 6120]}}) + + shp_path = os.path.split(csv_path)[0] + shp_path = os.path.join(shp_path, 'shp') + shp_path_gid = os.path.join(shp_path, 'foo_gid.shp') + target = RequestDataset(shp_path_gid).get() + self.assertEqual(target.shape[-1], 9) + shp_path_ugid = os.path.join(shp_path, 'foo_ugid.shp') + target = RequestDataset(shp_path_ugid).get() + self.assertEqual(target.shape[-1], 2) \ No newline at end of file diff --git a/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py b/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py index e2e2e1455..ee3e411d0 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_fiona_.py @@ -1,21 +1,19 @@ from collections import OrderedDict import os -import datetime +import numpy as np import fiona -from ocgis.api.request.base import RequestDataset -from ocgis.util.shp_cabinet import ShpCabinet +import datetime +from ocgis.interface.base.crs import WGS84 import ocgis from ocgis.api.subset import SubsetOperation -from ocgis.conv.fiona_ import ShpConverter +from ocgis.conv.fiona_ import ShpConverter, AbstractFionaConverter from ocgis.test.base import TestBase from ocgis.test.test_ocgis.test_api.test_parms.test_definition import TestGeom -import numpy as np class TestShpConverter(TestBase): - def get_subset_operation(self): geom = TestGeom.get_geometry_dictionaries() rd = self.test_data.get_rd('cancm4_tas') @@ -23,6 +21,16 @@ def get_subset_operation(self): subset = SubsetOperation(ops) return subset + def test_init(self): + field = self.get_field() + coll = field.as_spatial_collection() + conv = ShpConverter([coll], outdir=self.current_dir_output, prefix='foo') + self.assertIsInstance(conv, AbstractFionaConverter) + self.assertFalse(conv.melted) + + conv = ShpConverter([coll], outdir=self.current_dir_output, prefix='foo', melted=True) + self.assertTrue(conv.melted) + def test_attributes_copied(self): """Test attributes in geometry dictionaries are properly accounted for in the converter.""" @@ -30,11 +38,35 @@ def test_attributes_copied(self): conv = ShpConverter(subset, self.current_dir_output, prefix='shpconv') ret = conv.write() - path_ugid = os.path.join(self.current_dir_output, conv.prefix+'_ugid.shp') + path_ugid = os.path.join(self.current_dir_output, conv.prefix + '_ugid.shp') with fiona.open(path_ugid) as source: self.assertEqual(source.schema['properties'], OrderedDict([(u'COUNTRY', 'str:80'), (u'UGID', 'int:10')])) + def test_build(self): + field = self.get_field() + coll = field.as_spatial_collection() + conv = ShpConverter([coll], outdir=self.current_dir_output, prefix='foo') + # no coordinate system... + with self.assertRaises(ValueError): + conv._build_(coll) + + field = self.get_field(crs=WGS84()) + coll = field.as_spatial_collection() + conv = ShpConverter([coll], outdir=self.current_dir_output, prefix='foo') + self.assertTrue(conv._use_upper_keys) + ret = conv._build_(coll) + schema_keys = ret['fobject'].meta['schema']['properties'].keys() + for key in schema_keys: + self.assertFalse(key.islower()) + self.assertNotIn('VALUE', ret['schema']['properties']) + + field = self.get_field(crs=WGS84()) + coll = field.as_spatial_collection() + conv = ShpConverter([coll], outdir=self.current_dir_output, prefix='foo2', melted=True) + ret = conv._build_(coll) + self.assertIn('VALUE', ret['schema']['properties']) + def test_get_field_type(self): target = ShpConverter.get_field_type(np.int32) self.assertEqual(target, 'int') @@ -65,6 +97,34 @@ def test_none_geom(self): ops = ocgis.OcgOperations(dataset=rd, slice=slc) subset = SubsetOperation(ops) conv = ShpConverter(subset, self.current_dir_output, prefix='shpconv') - ret = conv.write() + conv.write() contents = os.listdir(self.current_dir_output) self.assertEqual(len(contents), 5) + + def test_write_coll(self): + + def _test_key_case_(path, upper=True): + with fiona.open(path, 'r') as source: + for row in source: + keys = row['properties'].keys() + for key in keys: + if upper: + self.assertTrue(key.isupper(), key) + else: + self.assertFalse(key.isupper(), key) + + field = self.get_field(crs=WGS84()) + coll = field.as_spatial_collection() + conv = ShpConverter([coll], outdir=self.current_dir_output, prefix='foo') + f = conv._build_(coll) + conv._write_coll_(f, coll) + conv._finalize_(f) + _test_key_case_(conv.path, upper=True) + + field = self.get_field(crs=WGS84()) + coll = field.as_spatial_collection() + conv = ShpConverter([coll], outdir=self.current_dir_output, prefix='foo2', melted=True) + f = conv._build_(coll) + conv._write_coll_(f, coll) + conv._finalize_(f) + _test_key_case_(conv.path, upper=True) \ No newline at end of file diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py index 9e8da2de2..418a9025a 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py @@ -4,6 +4,7 @@ from collections import OrderedDict import numpy as np +from shapely import wkb import fiona from shapely import wkt from shapely.geometry import shape @@ -28,10 +29,9 @@ class AbstractTestField(TestBase): - def setUp(self): np.random.seed(1) - super(AbstractTestField,self).setUp() + super(AbstractTestField, self).setUp() def get_col(self, bounds=True, with_name=True): value = [-100., -99., -98., -97.] @@ -122,7 +122,6 @@ def get_field(self, with_bounds=True, with_value=False, with_level=True, with_te class TestField(AbstractTestField): - def test_init(self): for b, wv in itertools.product([True, False], [True, False]): field = self.get_field(with_bounds=b, with_value=wv, with_dimension_names=False) @@ -179,20 +178,98 @@ def test_deepcopy(self): def test_fancy_indexing(self): field = self.get_field(with_value=True) - sub = field[:,(3,5,10,15),:,:,:] - self.assertEqual(sub.shape,(2,4,2,3,4)) - self.assertNumpyAll(sub.variables['tmax'].value,field.variables['tmax'].value[:,(3,5,10,15),:,:,:]) + sub = field[:, (3, 5, 10, 15), :, :, :] + self.assertEqual(sub.shape, (2, 4, 2, 3, 4)) + self.assertNumpyAll(sub.variables['tmax'].value, field.variables['tmax'].value[:, (3, 5, 10, 15), :, :, :]) + + sub = field[:, (3, 15), :, :, :] + self.assertEqual(sub.shape, (2, 2, 2, 3, 4)) + self.assertNumpyAll(sub.variables['tmax'].value, field.variables['tmax'].value[:, (3, 15), :, :, :]) + + sub = field[:, 3:15, :, :, :] + self.assertEqual(sub.shape, (2, 12, 2, 3, 4)) + self.assertNumpyAll(sub.variables['tmax'].value, field.variables['tmax'].value[:, 3:15, :, :, :]) + + def test_getitem(self): + field = self.get_field(with_value=True) + with self.assertRaises(IndexError): + field[0] + sub = field[0, 0, 0, 0, 0] + self.assertEqual(sub.shape, (1, 1, 1, 1, 1)) + self.assertEqual(sub.variables['tmax'].value.shape, (1, 1, 1, 1, 1)) + + # test with a one-dimensional variable + field = self.get_field(with_value=True) + sub = field[0, 0, 0, 0, :] + for variable in sub.variables.itervalues(): + self.assertEqual(variable.value.shape, sub.shape) + sub2 = field[0, 0, 0, 0, 2] + for variable in sub2.variables.itervalues(): + self.assertEqual(variable.value.shape, sub2.shape) + + def test_getitem_general(self): + """Test slicing on different types of fields.""" + + ibounds = [True, False] + ivalue = [True, False] + ilevel = [True, False] + itemporal = [True, False] + irealization = [True, False] + for ib, iv, il, it, ir in itertools.product(ibounds, ivalue, ilevel, itemporal, irealization): + field = self.get_field(with_bounds=ib, with_value=iv, with_level=il, with_temporal=it, with_realization=ir) - sub = field[:,(3,15),:,:,:] - self.assertEqual(sub.shape,(2,2,2,3,4)) - self.assertNumpyAll(sub.variables['tmax'].value,field.variables['tmax'].value[:,(3,15),:,:,:]) + if il: + self.assertEqual(field.shape[2], 2) + else: + self.assertEqual(field.shape[2], 1) + + # # try a bad slice + with self.assertRaises(IndexError): + field[0] - sub = field[:,3:15,:,:,:] - self.assertEqual(sub.shape,(2,12,2,3,4)) - self.assertNumpyAll(sub.variables['tmax'].value,field.variables['tmax'].value[:,3:15,:,:,:]) + # # now good slices + + # # if data is loaded prior to slicing then memory is shared + field.spatial.geom.point.value + field_slc = field[:, :, :, :, :] + self.assertTrue(np.may_share_memory(field.spatial.grid.value, field_slc.spatial.grid.value)) + self.assertTrue(np.may_share_memory(field.spatial.geom.point.value, field_slc.spatial.geom.point.value)) + + field_value = field.variables['tmax']._value + field_slc_value = field_slc.variables['tmax']._value + try: + self.assertNumpyAll(field_value, field_slc_value) + except AttributeError: + # with no attached value to the field, the private value will be nones + if iv is None: + self.assertIsNone(field_value) + self.assertIsNone(field_slc_value) + + if iv == True: + self.assertTrue(np.may_share_memory(field_value, field_slc_value)) + else: + self.assertEqual(field_slc_value, None) + + field_slc = field[0, 0, 0, 0, 0] + self.assertEqual(field_slc.shape, (1, 1, 1, 1, 1)) + if iv: + self.assertEqual(field_slc.variables['tmax'].value.shape, (1, 1, 1, 1, 1)) + self.assertNumpyAll(field_slc.variables['tmax'].value, + np.ma.array(field.variables['tmax'].value[0, 0, 0, 0, 0]).reshape(1, 1, 1, 1, 1)) + else: + self.assertEqual(field_slc.variables['tmax']._value, None) + self.assertEqual(field_slc.variables['tmax']._value, field.variables['tmax']._value) + + def test_getitem_specific(self): + field = self.get_field(with_value=True) + field_slc = field[:, 0:2, 0, :, :] + self.assertEqual(field_slc.shape, (2, 2, 1, 3, 4)) + self.assertEqual(field_slc.variables['tmax'].value.shape, (2, 2, 1, 3, 4)) + ref_field_real_slc = field.variables['tmax'].value[:, 0:2, 0, :, :] + self.assertNumpyAll(ref_field_real_slc.flatten(), field_slc.variables['tmax'].value.flatten()) def test_get_aggregated_all(self): - for wv in [True,False]: + for wv in [True, False]: field = self.get_field(with_value=wv) try: agg = field.get_spatially_aggregated() @@ -201,79 +278,90 @@ def test_get_aggregated_all(self): continue else: raise - self.assertNotEqual(field.spatial.grid,None) - self.assertEqual(agg.spatial.grid,None) - self.assertEqual(agg.shape,(2,31,2,1,1)) - self.assertNumpyAll(field.variables['tmax'].value,agg._raw.variables['tmax'].value) - self.assertTrue(np.may_share_memory(field.variables['tmax'].value,agg._raw.variables['tmax'].value)) + self.assertNotEqual(field.spatial.grid, None) + self.assertEqual(agg.spatial.grid, None) + self.assertEqual(agg.shape, (2, 31, 2, 1, 1)) + self.assertNumpyAll(field.variables['tmax'].value, agg._raw.variables['tmax'].value) + self.assertTrue(np.may_share_memory(field.variables['tmax'].value, agg._raw.variables['tmax'].value)) - to_test = field.variables['tmax'].value[0,0,0,:,:].mean() - self.assertNumpyAll(to_test,agg.variables['tmax'].value[0,0,0,0,0]) + to_test = field.variables['tmax'].value[0, 0, 0, :, :].mean() + self.assertNumpyAll(to_test, agg.variables['tmax'].value[0, 0, 0, 0, 0]) def test_get_aggregated_irregular(self): - single = wkt.loads('POLYGON((-99.894355 40.230645,-98.725806 40.196774,-97.726613 40.027419,-97.032258 39.942742,-97.681452 39.626613,-97.850806 39.299194,-98.178226 39.643548,-98.844355 39.920161,-99.894355 40.230645))') + single = wkt.loads( + 'POLYGON((-99.894355 40.230645,-98.725806 40.196774,-97.726613 40.027419,-97.032258 39.942742,-97.681452 39.626613,-97.850806 39.299194,-98.178226 39.643548,-98.844355 39.920161,-99.894355 40.230645))') field = self.get_field(with_value=True) - for b in [True,False]: - ret = field.get_clip(single,use_spatial_index=b) + for b in [True, False]: + ret = field.get_clip(single, use_spatial_index=b) agg = ret.get_spatially_aggregated() - to_test = agg.spatial.geom.polygon.value[0,0] - self.assertAlmostEqual(to_test.area,single.area) - self.assertAlmostEqual(to_test.bounds,single.bounds) - self.assertAlmostEqual(to_test.exterior.length,single.exterior.length) + to_test = agg.spatial.geom.polygon.value[0, 0] + self.assertAlmostEqual(to_test.area, single.area) + self.assertAlmostEqual(to_test.bounds, single.bounds) + self.assertAlmostEqual(to_test.exterior.length, single.exterior.length) def test_get_clip_single_cell(self): - single = wkt.loads('POLYGON((-97.997731 39.339322,-97.709012 39.292322,-97.742584 38.996888,-97.668726 38.641026,-98.158876 38.708170,-98.340165 38.916316,-98.273021 39.218463,-97.997731 39.339322))') + single = wkt.loads( + 'POLYGON((-97.997731 39.339322,-97.709012 39.292322,-97.742584 38.996888,-97.668726 38.641026,-98.158876 38.708170,-98.340165 38.916316,-98.273021 39.218463,-97.997731 39.339322))') field = self.get_field(with_value=True) - for b in [True,False]: - ret = field.get_clip(single,use_spatial_index=b) - self.assertEqual(ret.shape,(2,31,2,1,1)) - self.assertEqual(ret.spatial.grid._value.sum(),-59.0) - self.assertTrue(ret.spatial.geom.polygon.value[0,0].almost_equals(single)) - self.assertEqual(ret.spatial.uid,np.array([[7]])) - - self.assertEqual(ret.spatial.geom.point.value.shape,ret.spatial.geom.polygon.shape) - ref_pt = ret.spatial.geom.point.value[0,0] - ref_poly = ret.spatial.geom.polygon.value[0,0] + for b in [True, False]: + ret = field.get_clip(single, use_spatial_index=b) + self.assertEqual(ret.shape, (2, 31, 2, 1, 1)) + self.assertEqual(ret.spatial.grid._value.sum(), -59.0) + self.assertTrue(ret.spatial.geom.polygon.value[0, 0].almost_equals(single)) + self.assertEqual(ret.spatial.uid, np.array([[7]])) + + self.assertEqual(ret.spatial.geom.point.value.shape, ret.spatial.geom.polygon.shape) + ref_pt = ret.spatial.geom.point.value[0, 0] + ref_poly = ret.spatial.geom.polygon.value[0, 0] self.assertTrue(ref_poly.intersects(ref_pt)) def test_get_clip_irregular(self): - for wv in [True,False]: - single = wkt.loads('POLYGON((-99.894355 40.230645,-98.725806 40.196774,-97.726613 40.027419,-97.032258 39.942742,-97.681452 39.626613,-97.850806 39.299194,-98.178226 39.643548,-98.844355 39.920161,-99.894355 40.230645))') + for wv in [True, False]: + single = wkt.loads( + 'POLYGON((-99.894355 40.230645,-98.725806 40.196774,-97.726613 40.027419,-97.032258 39.942742,-97.681452 39.626613,-97.850806 39.299194,-98.178226 39.643548,-98.844355 39.920161,-99.894355 40.230645))') field = self.get_field(with_value=wv) - for b in [True,False]: - ret = field.get_clip(single,use_spatial_index=b) - self.assertEqual(ret.shape,(2,31,2,2,4)) + for b in [True, False]: + ret = field.get_clip(single, use_spatial_index=b) + self.assertEqual(ret.shape, (2, 31, 2, 2, 4)) unioned = cascaded_union([geom for geom in ret.spatial.geom.polygon.value.compressed().flat]) - self.assertAlmostEqual(unioned.area,single.area) - self.assertAlmostEqual(unioned.bounds,single.bounds) - self.assertAlmostEqual(unioned.exterior.length,single.exterior.length) - self.assertAlmostEqual(ret.spatial.weights[1,2],0.064016424) - self.assertAlmostEqual(ret.spatial.weights.sum(),1.776435) + self.assertAlmostEqual(unioned.area, single.area) + self.assertAlmostEqual(unioned.bounds, single.bounds) + self.assertAlmostEqual(unioned.exterior.length, single.exterior.length) + self.assertAlmostEqual(ret.spatial.weights[1, 2], 0.064016424) + self.assertAlmostEqual(ret.spatial.weights.sum(), 1.776435) if not wv: with self.assertRaises(NotImplementedError): ret.variables['tmax'].value + def test_get_fiona_dict(self): + field = self.get_field(with_value=True, crs=WGS84()) + _, arch = field.get_iter().next() + target = Field.get_fiona_dict(field, arch) + self.assertAsSetEqual(target.keys(), ['crs', 'fconvert', 'schema']) + def test_get_iter(self): field = self.get_field(with_value=True) rows = list(field.get_iter()) self.assertEqual(len(rows), 2 * 31 * 2 * 3 * 4) - rows[100]['geom'] = rows[100]['geom'].bounds + self.assertEqual(len(rows[0]), 2) + self.assertEqual(rows[100][0].bounds, (-100.5, 38.5, -99.5, 39.5)) real = {'vid': 1, 'ub_time': datetime.datetime(2000, 1, 6, 0, 0), 'year': 2000, 'gid': 5, 'ub_level': 100, - 'rid': 1, 'realization': 1, 'geom': (-100.5, 38.5, -99.5, 39.5), 'lb_level': 0, + 'rid': 1, 'realization': 1, 'lb_level': 0, 'variable': 'tmax', 'month': 1, 'lb_time': datetime.datetime(2000, 1, 5, 0, 0), 'day': 5, 'level': 50, 'did': None, 'value': 0.32664490177209615, 'alias': 'tmax', 'lid': 1, 'time': datetime.datetime(2000, 1, 5, 12, 0), 'tid': 5, 'name': 'tmax'} - self.assertAsSetEqual(rows[100].keys(), real.keys()) - for k, v in rows[100].iteritems(): + self.assertAsSetEqual(rows[100][1].keys(), real.keys()) + for k, v in rows[100][1].iteritems(): self.assertEqual(real[k], v) - self.assertEqual(set(real.keys()), set(rows[100].keys())) - self.assertEqual(set(field.variables['tmax'].value.flatten().tolist()), set([r['value'] for r in rows])) + self.assertEqual(set(field.variables['tmax'].value.flatten().tolist()), set([r[1]['value'] for r in rows])) # test without names field = self.get_field(with_value=True, with_dimension_names=False) rows = list(field.get_iter()) - self.assertAsSetEqual(rows[10].keys(), ['lid', 'name', 'vid', 'ub_time', 'did', 'lb_level', 'time', 'year', 'value', 'month', 'alias', 'tid', 'geom', 'ub_level', 'rlz', 'variable', 'gid', 'rid', 'level', 'lb_time', 'day']) + self.assertAsSetEqual(rows[10][1].keys(), + ['lid', 'name', 'vid', 'ub_time', 'did', 'lb_level', 'time', 'year', 'value', 'month', + 'alias', 'tid', 'ub_level', 'rlz', 'variable', 'gid', 'rid', 'level', 'lb_time', 'day']) # test not melted field = self.get_field(with_value=True) @@ -285,7 +373,35 @@ def test_get_iter(self): self.assertEqual(len(rows), 1488) for row in rows: for variable in field.variables.itervalues(): - self.assertIn(variable.alias, row) + self.assertIn(variable.alias, row[1]) + + # test for upper keys + field = self.get_field(with_value=True)[0, 0, 0, 0, 0] + for row in field.get_iter(use_upper_keys=True): + for key in row[1].keys(): + self.assertTrue(key.isupper()) + + # test passing limiting headers + field = self.get_field(with_value=True) + headers = ['time', 'tid'] + for _, row in field.get_iter(headers=headers): + self.assertEqual(row.keys(), headers) + + # test passing a ugid + field = self.get_field(with_value=True)[0, 0, 0, 0, 0] + _, row = field.get_iter(ugid=5).next() + self.assertEqual(row[constants.HEADERS.ID_SELECTION_GEOMETRY], 5) + + # test value_keys + field = self.get_field(with_value=True)[0, 0, 0, 0, 0] + fill = np.ma.array(np.zeros(2, dtype=[('a', float), ('b', float)])) + value = np.ma.array(np.zeros(field.shape, dtype=object), mask=False) + value.data[0, 0, 0, 0, 0] = fill + field.variables['tmax']._value = value + value_keys = ['a', 'b'] + _, row = field.get_iter(value_keys=value_keys, melted=True) + for vk in value_keys: + self.assertIn(vk, row[1]) def test_get_iter_spatial_only(self): """Test with only a spatial dimension.""" @@ -294,39 +410,56 @@ def test_get_iter_spatial_only(self): self.assertIsNone(field.level) rows = list(field.get_iter()) for xx in ['lid', 'level']: - self.assertNotIn(xx, rows[0].keys()) + self.assertNotIn(xx, rows[0][1].keys()) self.assertEqual(len(rows), 12) + def test_get_intersects(self): + # test with vector geometries + path = ShpCabinet().get_shp_path('state_boundaries') + rd = RequestDataset(path) + field = rd.get() + polygon = wkb.loads( + '\x01\x06\x00\x00\x00\x03\x00\x00\x00\x01\x03\x00\x00\x00\x01\x00\x00\x00Y\x00\x00\x00\x06]\xcf\xdfo\xd4Q\xc0AV:L\xd7\xe2D@i\x856A\xbf\xd5Q\xc0^`\xe7\x0ch\xe4D@\xd8\xcb\xd4e\x1c\xd6Q\xc0\xf7\x8a\xf1\xab\x15\xe8D@*}\xf3#i\xd5Q\xc0\xda\x02\x0b\xc7\xcf\xedD@P\xbd\xdch\xeb\xd5Q\xc0R\xacZ\xab\x19\xf0D@Hd\x0bIQ\xd5Q\xc0K\x9e\xe3\'\xb1\xf2D@\xd4\xcd\xb4\xb0\x92\xd8Q\xc0^\xbf\x93a\xb8\xf1D@{u\xecSy\xd8Q\xc0o\xc6\x82\x80X\xfdD@\x80t%\xb5;\xd8Q\xc0\x84\x84\x0e\\\xc1\x01E@)\xe1\xbd\xe5\xd5\xdfQ\xc0Jp\xdd6/\x01E@\xf2\xd9\xdb\xaa\x0f\xf3Q\xc0dX\xfc\x0f\x8c\x00E@\x83\xbd\xf9\x8aY\xf3Q\xc0\x1e\xcd\x14\x15M\x02E@[\xbbY\x02\x14\x06R\xc0Z\xe9\xc5dM\x03E@\xc1\xbd\xad\xe5\xb9\x08R\xc0p\x8d\x1a\'a\x03E@\xfbw`\x10| R\xc0z3\xfd&\xf0\x03E@\xe4\x98\x9a\xf8\x8e$R\xc0r.\xe4%\xdb\x03E@\xb2\x07\xf7\xf7=%R\xc0cs\xba\x07\xc4\x02E@\x81\xa6\xef\x9b\xe6&R\xc0\xf8qV\x1f\xeb\x02E@\xa6~rz\x02\'R\xc0*\xf6\x9b\x9d\xe8\x03E@\xb3\xefU\x92`0R\xc0\xa4SJ\x1cU\x04E@\xb41\x00\xf4\x1f1R\xc0tK0\x05G\x00E@\r\x98h\xdbT4R\xc0\x1c\xc0$\xc5\xa3\xffD@\x03\xa2\xcd\xbc@4R\xc0\xc0\xe2)\xf8I\x04E@\x0cKd\xddc@R\xc0`\xfau\xf4\x9b\x04E@\x03\xd4\x96\xa3\xebBR\xc0\x82\x8en\xd1\xa5\x04E@\x1e\xca\xef\xa0\xfd^R\xc0\x02\xc7\xf9!\x12\x06E@P\xda\xb7\xff\xec_R\xc0\x12\xc1\xa68\xea\tE@2\xe2\x9d\xe7sVR\xc0\x1e\xfb\xe8\xd2\x9b@E@\xb2:3\x0f\x84PR\xc0H-0\xd7~_E@\xf8\x1c\xed\xafBAR\xc0&\xc0\xe3N\xc5^E@\x8c\x1e\x1ec\x12;R\xc0\xe4R\xa1\xf4a^E@5s\nW+\x1dR\xc0\x84\x8b\xf9\xba\xe8\\E@^L\x19*\xea\x11R\xc0\xf8\x16RF8\\E@\x9eZ\xcb\xa9\x88\xfbQ\xc0\xa8\xdb\'\xd6\x85ZE@?+\xbd\t\xa9\xf9Q\xc0~K\x9d\xd6IZE@\x0f\x8f\x0bda\xd2Q\xc0\xee\x90\xcb\xd5kYE@\xec\xa8\x91\x81\'\xd0Q\xc0?\x7fM\xd7\xef\\E@64"\x03d\xcfQ\xc0\xd6\x99\x80\xd2,_E@\xa9\xbb\x11\x1d\xed\xcbQ\xc0\xca\x7f \xb3\x8f^E@4r\xfa\x81\x96\xcbQ\xc0\x9c\x17\xed,VgE@\x9d\xee\xf0\xfa\xb7\xc7Q\xc0\xb2\xd4\x9fq\xbdhE@S\xe2r42\xc4Q\xc0\n\x1c\xe1\xef\xf3fE@!\x83y\x95\xa0\xc1Q\xc0O\x14\xf1.\xf3lE@\xb0\xf2^,\xf7\xbaQ\xc0\xfe\x89\x10\x93LqE@\xf6\x0f\xa9\xa7z\xb9Q\xc0FI\x942\x85qE@\xca>\xfb$b\xb6Q\xc0:\xa9\x7f\xda\x84nE@\x99f=\x9d\x16\xb4Q\xc0b6z\xff\xfbnE@\xf8\x1c\xcc*W\xafQ\xc0\xc8\x1fmU\xeeTE@\xcc\xb7\t\xfa\xf6\xa5Q\xc0p\xa3_"\xbaRE@Qf[{\x8a\xa8Q\xc0J\x07l\x06\x94JE@b\x8c\x1fK\n\xb4Q\xc0 \xffz\xa0\xf1EE@6\xa8\xee\xcf0\xb9Q\xc0:P\xe3MZ9E@\xf1H\xcc\xd5z\xbdQ\xc0\x82\x19u\xaaX7E@n\x18\xea\xb6/\xc2Q\xc0\x175gx\x8f$E@U\xadT\xc7\x15\xbbQ\xc0J_B\xaa\x04\x1eE@\xcb\x88\xa5\x86!\xb9Q\xc0z\xf3\xde\xa1\x04"E@\x1f\x04\x08@\xc7\xb4Q\xc0\x8co.NX!E@?\xce\x01\xf8\x92\xb1Q\xc0\x10\xf6\x91r\xd3\x1fE@nD\xd5\x08\xe8\xabQ\xc0\n\xf6\x9b\xf4\x9a\x13E@\x069\x91\xd5\x98\xa7Q\xc0\xc4\xd5\x10\xa1\xed\xfbD@L\x1b\xef\xe6\x94\xa2Q\xc0$\xad\x14j)\xf7D@\xa4\xe1T\xc3i\xa2Q\xc09\x04\xa28#\xe7D@\xd8\xea\xfa\xce\x1a\x9bQ\xc0g\x01\x88\x04/\xdfD@\xde\xc6#\x80\x86\x91Q\xc0\xff\x88\x16w_\xdcD@\x14hp\x07\xd5\x95Q\xc0(\x95L\xb3\x1c\xdbD@\xa1 \xbe\xf7"\x8dQ\xc0\x19(\xa4\x9a5\xdbD@\x9bKt\xce:\x81Q\xc0\xb8\xe3\x9b\xd3\x08\xe4D@\x175%X\x07\x80Q\xc0\x16\xe7\x88\xe3\x9c\xedD@\x8b\x0e\x11\x8cn\x86Q\xc0pZ\xae\xe7G\x00E@\xef\x08`YT\x90Q\xc0\x90\xc7\xcc\xfd\xb1\x07E@P\x02\xa0Q\xa5\x88Q\xc0\xc2\xf2\xd2~G\tE@\xf4\x84\xd0\xeb:\x83Q\xc0\xbd@\xb0\xbe]\x03E@\xb0G/\xf7\xb4}Q\xc0\xcdI<]\xb9\xf3D@\xae\x04l\xe9\xbczQ\xc0@\x99+wB\xe2D@5\xb6ME\x15}Q\xc0,\xc8f\x8f\xf3\xd5D@\xe6[z\x8br\x99Q\xc0x0\x10\xbdh\xceD@\xc8\x01\xfe\xf2\xb4\x9bQ\xc0\xf0\xb6\xcf\xc6\xed\xc8D@*\x82\xc1\xe3\xc6\xa8Q\xc0\x06n9O\x18\xc5D@\x0ei\x7f\x87\x8d\xaaQ\xc0z\x0cy./\xc7D@\xd8\x12$+\xaa\xa7Q\xc0\x9b\x93\x1bU)\xdeD@\x8f\x07\xb59\xb9\xb5Q\xc0\x9c\xcf\x98t7\xd0D@/\xb9#\xa1\x18\xb9Q\xc0\x94\xc3X\n$\xd1D@\xb2\xeeYk\x13\xc0Q\xc0\xa22ko\x93\xc2D@\xd0PQ\x18\x7f\xc7Q\xc0W\xa9\xe8\xaa\x1c\xbfD@0v(\x9f\t\xc9Q\xc0\xfa\x02g\xff\xdf\xd3D@H\x9dJF\xb9\xccQ\xc0\x0c\xc0\x99\x19\xd9\xd6D@\xb1\xfd\r\x8c\xa7\xceQ\xc0\xa4m\x9f\xba\x95\xdaD@\x96/\xfdo\x10\xd1Q\xc0Xm3\x97\xf7\xdfD@\x06]\xcf\xdfo\xd4Q\xc0AV:L\xd7\xe2D@\x01\x03\x00\x00\x00\x01\x00\x00\x00\x0f\x00\x00\x00\xc0\xb6\x07]\xad\xa6Q\xc02\xbc\x8c5\xff\xb6D@\xa9xP\x1aU\xa4Q\xc02\x92"\xe9v\xbbD@\xf2\xb9_\x96a\xa3Q\xc0o\xeeb\xfbl\xb5D@"\xfdj\xd8\xda\xa4Q\xc0\xc0\x90\x1a;\x84\xb4D@\x186*V\xf8\xa0Q\xc0Z"\x89M\x07\xb3D@\xc7j=\xf0\x1c\x9fQ\xc0\xed3hH\xb8\xabD@\x881\xcdxF\xafQ\xc0zJ`\x9a\xc5\xaaD@L7j\xfbB\xb1Q\xc0h>\xfc?*\xa6D@\xe3\xd2!\xca\x02\xb6Q\xc0\xac!n\xe7\x9e\xacD@p\xbc\xa4\xe0\x14\xb2Q\xc0\x7f\x12\xffI\x1f\xadD@\xbb\x0c\x1b\xdbV\xb1Q\xc0\\\xcd\xe5\xf4\x98\xa9D@\x80\xe8\xd2\xfc\x1c\xb0Q\xc0\x14C\x00\xed\xea\xb0D@\xc7\'\xb0 \xb8\xaaQ\xc0W\x81:c;\xbaD@\x86\x9c\x9f\x1e\xc6\xa6Q\xc02\xfc\xe8\xc4\xc1\xbcD@\xc0\xb6\x07]\xad\xa6Q\xc02\xbc\x8c5\xff\xb6D@\x01\x03\x00\x00\x00\x01\x00\x00\x00\r\x00\x00\x00k\x97\xa4\xa3\x07\x82Q\xc0@\xa7\xf3]\xed\xa7D@\xb8\xaa\xa0\xa1j\x80Q\xc0\x98+\xd84\x92\xa9D@T@x%\xb4\x81Q\xc0x\xd7\x92\xb5)\xabD@R8\x8a\xc8\x9b\x85Q\xc0\xa8\xc2\x93 \xff\xa5D@]r\xdd\x055\x82Q\xc0\xfcUH\x92\xc3\xacD@\xf7"J%\'\x83Q\xc0\xb0)@\xca+\xb2D@\xb8\xfb\xdf\x9e\xd2}Q\xc0\xe1A\x12\x00\xbf\xa5D@\x0f\xd7\xa3\xfd\xfa}Q\xc0\x99\x08\xc8\x84;\xa0D@\x85\xbc\xcfF\x99\x86Q\xc0\x10\xdd1\xf0\x7f\x9eD@eg\xec/\xa6\x8dQ\xc0\x99\xdf\xe4\x16\x96\xa2D@\'\xdc\xad\x10A\x8dQ\xc0\x1ag\xa1\xa7\xa4\xa5D@$\xc4\x04\x8aC\x86Q\xc0Uu\xb2l\x89\xa3D@k\x97\xa4\xa3\x07\x82Q\xc0@\xa7\xf3]\xed\xa7D@') + ret = field.get_intersects(polygon) + self.assertTrue(polygon.almost_equals(ret.spatial.geom.polygon.value.compressed()[0])) + def test_get_intersects_domain_polygon(self): - regular = make_poly((36.61,41.39),(-101.41,-95.47)) + regular = make_poly((36.61, 41.39), (-101.41, -95.47)) field = self.get_field(with_value=True) - for b in [True,False]: - ret = field.get_intersects(regular,use_spatial_index=b) - self.assertNumpyAll(ret.variables['tmax'].value,field.variables['tmax'].value) - self.assertNumpyAll(field.spatial.grid.value,ret.spatial.grid.value) + for b in [True, False]: + ret = field.get_intersects(regular, use_spatial_index=b) + self.assertNumpyAll(ret.variables['tmax'].value, field.variables['tmax'].value) + self.assertNumpyAll(field.spatial.grid.value, ret.spatial.grid.value) def test_get_intersects_irregular_polygon(self): - irregular = wkt.loads('POLYGON((-100.106049 38.211305,-99.286894 38.251591,-99.286894 38.258306,-99.286894 38.258306,-99.260036 39.252035,-98.769886 39.252035,-98.722885 37.734583,-100.092620 37.714440,-100.106049 38.211305))') + irregular = wkt.loads( + 'POLYGON((-100.106049 38.211305,-99.286894 38.251591,-99.286894 38.258306,-99.286894 38.258306,-99.260036 39.252035,-98.769886 39.252035,-98.722885 37.734583,-100.092620 37.714440,-100.106049 38.211305))') keywords = dict(b=[True, False], with_corners=[True, False]) for k in itr_products_keywords(keywords, as_namedtuple=True): field = self.get_field(with_value=True) if k.with_corners: field.spatial.grid.corners - ret = field.get_intersects(irregular,use_spatial_index=k.b) - self.assertEqual(ret.shape,(2,31,2,2,2)) - self.assertNumpyAll(ret.variables['tmax'].value.mask[0,2,1,:,:],np.array([[True,False],[False,False]])) - self.assertEqual(ret.spatial.uid.data[ret.spatial.get_mask()][0],5) + ret = field.get_intersects(irregular, use_spatial_index=k.b) + self.assertEqual(ret.shape, (2, 31, 2, 2, 2)) + self.assertNumpyAll(ret.variables['tmax'].value.mask[0, 2, 1, :, :], + np.array([[True, False], [False, False]])) + self.assertEqual(ret.spatial.uid.data[ret.spatial.get_mask()][0], 5) if k.with_corners: - self.assertNumpyAll(ret.spatial.grid.corners.mask, np.array([[[[True, True, True, True], [False, False, False, False]], [[False, False, False, False], [False, False, False, False]]], [[[True, True, True, True], [False, False, False, False]], [[False, False, False, False], [False, False, False, False]]]])) + self.assertNumpyAll(ret.spatial.grid.corners.mask, np.array([ + [[[True, True, True, True], [False, False, False, False]], + [[False, False, False, False], [False, False, False, False]]], + [[[True, True, True, True], [False, False, False, False]], + [[False, False, False, False], [False, False, False, False]]]])) else: self.assertIsNone(ret.spatial.grid._corners) def test_get_intersects_single_bounds_row(self): field = self.get_field(with_value=True) - sub = field[:,0,:,0,0] - irregular = wkt.loads('POLYGON((-100.106049 38.211305,-99.286894 38.251591,-99.286894 38.258306,-99.286894 38.258306,-99.260036 39.252035,-98.769886 39.252035,-98.722885 37.734583,-100.092620 37.714440,-100.106049 38.211305))') - ## the intersects operations is empty. this was testing that contiguous + sub = field[:, 0, :, 0, 0] + irregular = wkt.loads( + 'POLYGON((-100.106049 38.211305,-99.286894 38.251591,-99.286894 38.258306,-99.286894 38.258306,-99.260036 39.252035,-98.769886 39.252035,-98.722885 37.734583,-100.092620 37.714440,-100.106049 38.211305))') + # # the intersects operations is empty. this was testing that contiguous ## bounds check fails appropriately with a single bounds row. with self.assertRaises(EmptySubsetError): sub.get_intersects(irregular) @@ -338,14 +471,14 @@ def test_get_iter_two_variables(self): var2.alias = 'tmax2' var2._value = var2._value + 3 field.variables.add_variable(deepcopy(var2), assign_new_uid=True) - aliases = set([row['alias'] for row in field.get_iter()]) + aliases = set([row[1]['alias'] for row in field.get_iter(melted=True)]) self.assertEqual(set(['tmax', 'tmax2']), aliases) vids = [] for row in field.get_iter(): - vids.append(row['vid']) - if row['alias'] == 'tmax2': - self.assertTrue(row['value'] > 3) + vids.append(row[1]['vid']) + if row[1]['alias'] == 'tmax2': + self.assertTrue(row[1]['value'] > 3) self.assertEqual(set(vids), set([1, 2])) def test_iter(self): @@ -402,111 +535,42 @@ def test_shape_as_dict(self): field = self.get_field(with_value=False) to_test = field.shape_as_dict for variable in field.variables.values(): - self.assertEqual(variable._value,None) - self.assertEqual(to_test,{'Y': 3, 'X': 4, 'Z': 2, 'R': 2, 'T': 31}) - - def test_slicing(self): - field = self.get_field(with_value=True) - with self.assertRaises(IndexError): - field[0] - sub = field[0,0,0,0,0] - self.assertEqual(sub.shape,(1,1,1,1,1)) - self.assertEqual(sub.variables['tmax'].value.shape,(1,1,1,1,1)) - - def test_slicing_general(self): - """Test slicing on different types of fields.""" - - ibounds = [True, False] - ivalue = [True, False] - ilevel = [True, False] - itemporal = [True, False] - irealization = [True, False] - for ib, iv, il, it, ir in itertools.product(ibounds, ivalue, ilevel, itemporal, irealization): - field = self.get_field(with_bounds=ib, with_value=iv, with_level=il, with_temporal=it, with_realization=ir) - - if il: - self.assertEqual(field.shape[2], 2) - else: - self.assertEqual(field.shape[2], 1) - - # # try a bad slice - with self.assertRaises(IndexError): - field[0] - - ## now good slices - - ## if data is loaded prior to slicing then memory is shared - field.spatial.geom.point.value - field_slc = field[:, :, :, :, :] - self.assertTrue(np.may_share_memory(field.spatial.grid.value, field_slc.spatial.grid.value)) - self.assertTrue(np.may_share_memory(field.spatial.geom.point.value, field_slc.spatial.geom.point.value)) - - field_value = field.variables['tmax']._value - field_slc_value = field_slc.variables['tmax']._value - try: - self.assertNumpyAll(field_value, field_slc_value) - except AttributeError: - # with no attached value to the field, the private value will be nones - if iv is None: - self.assertIsNone(field_value) - self.assertIsNone(field_slc_value) - - if iv == True: - self.assertTrue(np.may_share_memory(field_value, field_slc_value)) - else: - self.assertEqual(field_slc_value, None) - - field_slc = field[0, 0, 0, 0, 0] - self.assertEqual(field_slc.shape, (1, 1, 1, 1, 1)) - if iv: - self.assertEqual(field_slc.variables['tmax'].value.shape, (1, 1, 1, 1, 1)) - self.assertNumpyAll(field_slc.variables['tmax'].value, - np.ma.array(field.variables['tmax'].value[0, 0, 0, 0, 0]).reshape(1, 1, 1, 1, 1)) - else: - self.assertEqual(field_slc.variables['tmax']._value, None) - self.assertEqual(field_slc.variables['tmax']._value, field.variables['tmax']._value) - - def test_slicing_specific(self): - field = self.get_field(with_value=True) - field_slc = field[:,0:2,0,:,:] - self.assertEqual(field_slc.shape,(2,2,1,3,4)) - self.assertEqual(field_slc.variables['tmax'].value.shape,(2,2,1,3,4)) - ref_field_real_slc = field.variables['tmax'].value[:,0:2,0,:,:] - self.assertNumpyAll(ref_field_real_slc.flatten(),field_slc.variables['tmax'].value.flatten()) + self.assertEqual(variable._value, None) + self.assertEqual(to_test, {'Y': 3, 'X': 4, 'Z': 2, 'R': 2, 'T': 31}) def test_subsetting(self): - for wv in [True,False]: + for wv in [True, False]: field = self.get_field(with_value=wv) - self.assertNotIsInstance(field.temporal.value,np.ma.MaskedArray) - - temporal_start = dt(2000,1,1,12) - temporal_stop = dt(2000,1,31,12) - ret = field.temporal.get_between(temporal_start,temporal_stop) - self.assertIsInstance(ret,VectorDimension) - self.assertNumpyAll(ret.value,field.temporal.value) - self.assertNumpyAll(ret.bounds,field.temporal.bounds) - - ret = field.get_between('temporal',temporal_start,temporal_stop) - self.assertIsInstance(ret,Field) - self.assertEqual(ret.shape,field.shape) + self.assertNotIsInstance(field.temporal.value, np.ma.MaskedArray) + + temporal_start = dt(2000, 1, 1, 12) + temporal_stop = dt(2000, 1, 31, 12) + ret = field.temporal.get_between(temporal_start, temporal_stop) + self.assertIsInstance(ret, VectorDimension) + self.assertNumpyAll(ret.value, field.temporal.value) + self.assertNumpyAll(ret.bounds, field.temporal.bounds) + + ret = field.get_between('temporal', temporal_start, temporal_stop) + self.assertIsInstance(ret, Field) + self.assertEqual(ret.shape, field.shape) if wv: - self.assertNumpyAll(field.variables['tmax'].value,ret.variables['tmax'].value) + self.assertNumpyAll(field.variables['tmax'].value, ret.variables['tmax'].value) else: with self.assertRaises(NotImplementedError): ret.variables['tmax'].value - ## try empty subset + # # try empty subset with self.assertRaises(EmptySubsetError): - field.get_between('level',100000,2000000000) + field.get_between('level', 100000, 2000000000) - ret = field.get_between('realization',1,1) - self.assertEqual(ret.shape,(1, 31, 2, 3, 4)) + ret = field.get_between('realization', 1, 1) + self.assertEqual(ret.shape, (1, 31, 2, 3, 4)) if wv: - self.assertNumpyAll(ret.variables['tmax'].value,field.variables['tmax'].value[0:1,:,:,:,:]) + self.assertNumpyAll(ret.variables['tmax'].value, field.variables['tmax'].value[0:1, :, :, :, :]) - ret = field.get_between('temporal',dt(2000,1,15),dt(2000,1,30)) - self.assertEqual(ret.temporal.value[0],dt(2000,1,15,12)) - self.assertEqual(ret.temporal.value[-1],dt(2000,1,30,12)) + ret = field.get_between('temporal', dt(2000, 1, 15), dt(2000, 1, 30)) + self.assertEqual(ret.temporal.value[0], dt(2000, 1, 15, 12)) + self.assertEqual(ret.temporal.value[-1], dt(2000, 1, 30, 12)) def test_variables(self): row = VectorDimension(value=[5, 6]) @@ -556,7 +620,7 @@ def test_write_fiona(self): values = [r['properties'][alias] for r in records] self.assertAlmostEqual(np.mean(values), field.variables[alias].value.mean(), places=6) - n = reduce(lambda x, y: x*y, field.shape) + n = reduce(lambda x, y: x * y, field.shape) if k.melted: n *= len(field.variables) self.assertEqual(n, len(records)) @@ -572,16 +636,22 @@ def test_write_fiona(self): self.assertEqual(gtype, 'Point') # test with a fake object passed in as a fiona object. this should raise an exception as the method will attempt - # to use the object instead of creating a new collection. + # to use the object instead of creating a new collection. the object should not be closed when done. class DontHateMe(Exception): pass + class WriteMe(Exception): + pass + class Nothing(object): def close(self): raise DontHateMe() - with self.assertRaises(DontHateMe): + def write(self, *args, **kwargs): + raise WriteMe() + + with self.assertRaises(WriteMe): field.write_fiona(path, fobject=Nothing()) # test all geometries are accounted for as well as properties @@ -603,6 +673,31 @@ def close(self): break self.assertTrue(found) + # test with upper keys + field = self.get_field(with_value=True, crs=WGS84())[0, 0, 0, 0, 0] + path = self.get_temporary_file_path('what.shp') + field.write_fiona(path=path, use_upper_keys=True) + with fiona.open(path) as source: + for row in source: + for key in row['properties']: + self.assertTrue(key.isupper()) + + # test with upper keys + field = self.get_field(with_value=True, crs=WGS84())[0, 0, 0, 0, 0] + path = self.get_temporary_file_path('what2.shp') + headers = ['time', 'tid'] + field.write_fiona(path=path, headers=headers) + with fiona.open(path) as source: + self.assertEqual(source.meta['schema']['properties'].keys(), headers) + + # test passing a ugid + field = self.get_field(with_value=True, crs=WGS84())[0, 0, 0, 0, 0] + path = self.get_temporary_file_path('what3.shp') + field.write_fiona(path=path, ugid=10) + with fiona.open(path) as source: + for row in source: + self.assertEqual(row['properties'][constants.HEADERS.ID_SELECTION_GEOMETRY], 10) + def test_write_to_netcdf_dataset(self): keywords = dict(file_only=[False, True], second_variable_alias=[None, 'tmin_alias'], @@ -648,7 +743,8 @@ def test_write_to_netcdf_dataset(self): with nc_scope(path) as ds: self.assertEqual(ds.another, 'some more information') try: - variable_names = ['time', 'time_bounds', 'latitude', 'latitude_bounds', 'longitude', 'longitude_bounds', 'tmax', second_variable_alias] + variable_names = ['time', 'time_bounds', 'latitude', 'latitude_bounds', 'longitude', + 'longitude_bounds', 'tmax', second_variable_alias] dimension_names = ['time', 'bounds', 'latitude', 'longitude'] if k.crs is not None: variable_names.append(k.crs.name) @@ -659,7 +755,8 @@ def test_write_to_netcdf_dataset(self): self.assertEqual(set(ds.dimensions.keys()), set(dimension_names)) except AssertionError: self.assertTrue(k.remove_dimension_names) - variable_names = ['time', 'time_bounds', 'yc', 'yc_bounds', 'xc', 'xc_bounds', 'tmax', second_variable_alias] + variable_names = ['time', 'time_bounds', 'yc', 'yc_bounds', 'xc', 'xc_bounds', 'tmax', + second_variable_alias] dimension_names = ['time', 'bounds', 'yc', 'xc'] if k.crs is not None: variable_names.append(k.crs.name) @@ -754,15 +851,15 @@ def test_write_to_netcdf_dataset_without_temporal(self): class TestDerivedField(AbstractTestField): - def test_init(self): - field = self.get_field(with_value=True,month_count=2) + field = self.get_field(with_value=True, month_count=2) tgd = field.temporal.get_grouping(['month']) - new_data = np.random.rand(2,2,2,3,4) - mu = Variable(name='mu',value=new_data) - df = DerivedField(variables=mu,temporal=tgd,spatial=field.spatial, - level=field.level,realization=field.realization) + new_data = np.random.rand(2, 2, 2, 3, 4) + mu = Variable(name='mu', value=new_data) + df = DerivedField(variables=mu, temporal=tgd, spatial=field.spatial, + level=field.level, realization=field.realization) self.assertIsInstance(df, Field) - self.assertIsInstance(df.temporal.value[0],datetime.datetime) - self.assertEqual(df.temporal.value.tolist(),[datetime.datetime(2000, 1, 16, 0, 0),datetime.datetime(2000, 2, 16, 0, 0)]) - self.assertEqual(df.temporal.bounds[1,1],datetime.datetime(2000, 3, 1, 0, 0)) + self.assertIsInstance(df.temporal.value[0], datetime.datetime) + self.assertEqual(df.temporal.value.tolist(), + [datetime.datetime(2000, 1, 16, 0, 0), datetime.datetime(2000, 2, 16, 0, 0)]) + self.assertEqual(df.temporal.bounds[1, 1], datetime.datetime(2000, 3, 1, 0, 0)) diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py index 10d5ebdce..02f8a05d5 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_variable.py @@ -15,13 +15,11 @@ class FakeAbstractSourcedVariable(AbstractSourcedVariable): - def _set_value_from_source_(self): - self._value = self._src_idx*2 + self._value = self._src_idx * 2 class TestAbstractSourcedVariable(TestBase): - def iter(self): src_idx = [1, 2] data = 'foo' @@ -44,16 +42,14 @@ def test_format_src_idx(self): def test_get_value(self): aa = FakeAbstractSourcedVariable('foo', src_idx=[1, 2]) aa._value = None - self.assertNumpyAll(aa._get_value_(), np.array([1, 2])*2) + self.assertNumpyAll(aa._get_value_(), np.array([1, 2]) * 2) def test_src_idx(self): aa = FakeAbstractSourcedVariable('foo', src_idx=[1, 2]) self.assertNumpyAll(aa._src_idx, np.array([1, 2])) - class FakeAbstractValueVariable(AbstractValueVariable): - def _get_value_(self): return np.array(self._value) @@ -106,7 +102,8 @@ def _get_units_(v): raise except ValueError: # units are not convertible - if _get_units_(k.units) == _get_units_('mm/day') and _get_units_(k.conform_units_to) == _get_units_('kelvin'): + if _get_units_(k.units) == _get_units_('mm/day') and _get_units_(k.conform_units_to) == _get_units_( + 'kelvin'): continue else: raise @@ -207,7 +204,6 @@ def test_cfunits_conform_masked_array(self): class TestDerivedVariable(TestBase): - def test_init(self): self.assertEqual(DerivedVariable.__bases__, (Variable,)) @@ -235,7 +231,6 @@ def test_iter_melted(self): class TestVariable(TestBase): - def test_init(self): self.assertEqual(Variable.__bases__, (AbstractSourcedVariable, AbstractValueVariable)) @@ -249,16 +244,16 @@ def test_init(self): self.assertEqual(var.alias, 'foo') def test_init_with_value_with_dtype_fill_value(self): - var = Variable(data='foo',dtype=np.float,fill_value=9,value=np.array([1,2,3,4])) - self.assertEqual(var.dtype,np.float) - self.assertEqual(var.fill_value,9) + var = Variable(data='foo', dtype=np.float, fill_value=9, value=np.array([1, 2, 3, 4])) + self.assertEqual(var.dtype, np.float) + self.assertEqual(var.fill_value, 9) def test_init_with_value_without_dtype_fill_value(self): - value = np.array([1,2,3,4]) + value = np.array([1, 2, 3, 4]) value = np.ma.array(value) - var = Variable(data='foo',value=value) - self.assertEqual(var.dtype,value.dtype) - self.assertEqual(var.fill_value,value.fill_value) + var = Variable(data='foo', value=value) + self.assertEqual(var.dtype, value.dtype) + self.assertEqual(var.fill_value, value.fill_value) def test_init_without_value_dtype_fill_value(self): var = Variable(data='foo') @@ -268,9 +263,9 @@ def test_init_without_value_dtype_fill_value(self): var.fill_value def test_init_without_value_with_dtype_fill_value(self): - var = Variable(data='foo',dtype=np.float,fill_value=9) - self.assertEqual(var.dtype,np.float) - self.assertEqual(var.fill_value,9) + var = Variable(data='foo', dtype=np.float, fill_value=9) + self.assertEqual(var.dtype, np.float) + self.assertEqual(var.fill_value, 9) def test_str(self): var = Variable(name='toon') @@ -299,6 +294,12 @@ def test_get_empty_like(self): new_var.meta['hi'] = 'there' self.assertDictEqual(var.meta, {'foo': 5}) + def test_getitem(self): + var = Variable(value=np.random.rand(1, 1, 1, 1, 51), name='foo') + slc = [slice(None, None, None), slice(None, None, None), slice(None, None, None), np.array([0]), np.array([14])] + ret = var.__getitem__(slc) + self.assertEqual(ret.shape, tuple([1] * 5)) + def test_iter_melted(self): def _assert_key_(attr, key, row, actual_none=None): diff --git a/src/ocgis/test/test_ocgis/test_util/test_environment.py b/src/ocgis/test/test_ocgis/test_util/test_environment.py index 84c57e5c1..4e01eca98 100644 --- a/src/ocgis/test/test_ocgis/test_util/test_environment.py +++ b/src/ocgis/test/test_ocgis/test_util/test_environment.py @@ -1,10 +1,10 @@ -import unittest -from ocgis import env, OcgOperations import os import tempfile +from importlib import import_module + +from ocgis import env, OcgOperations from ocgis.test.base import TestBase from ocgis.util.environment import EnvParmImport -from importlib import import_module class TestEnvImportParm(TestBase): @@ -38,7 +38,10 @@ def get_is_available(self,module_name): av = True except ImportError: av = False - return(av) + return av + + def test_init(self): + self.assertIsNone(env.MELTED) def test_conf_path(self): env.CONF_PATH @@ -116,8 +119,3 @@ def test_env_overload(self): def test_str(self): ret = str(env) self.assertTrue(len(ret) > 300) - - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() diff --git a/src/ocgis/test/test_real_data/test_random_datasets.py b/src/ocgis/test/test_real_data/test_random_datasets.py index f76e92ec2..d35e32aa0 100644 --- a/src/ocgis/test/test_real_data/test_random_datasets.py +++ b/src/ocgis/test/test_real_data/test_random_datasets.py @@ -97,7 +97,7 @@ def test_cccma_rotated_pole(self): rd = self.test_data.get_rd('rotated_pole_cccma') geom = (5.87161922454834, 47.26985931396479, 15.03811264038086, 55.05652618408209) ops = ocgis.OcgOperations(dataset=rd, output_format='shp', geom=geom, - select_ugid=[1], snippet=True) + select_ugid=[1], snippet=True, melted=True) ret = ops.execute() with fiona.open(ret) as source: @@ -448,12 +448,12 @@ def test_maurer_2010(self): ret = ops.execute() def test_clip_aggregate(self): - # # this geometry was hanging + # this geometry was hanging rd = self.test_data.get_rd('cancm4_tas', kwds={'time_region': {'year': [2003]}}) ops = OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[14, 16], aggregate=False, spatial_operation='clip', output_format=constants.OUTPUT_FORMAT_CSV_SHAPEFILE) - ret = ops.execute() + ops.execute() @attr('slow') def test_narccap_point_subset_small(self): diff --git a/src/ocgis/test/test_simple/test_simple.py b/src/ocgis/test/test_simple/test_simple.py index bd67293f7..66bf63320 100644 --- a/src/ocgis/test/test_simple/test_simple.py +++ b/src/ocgis/test/test_simple/test_simple.py @@ -737,7 +737,7 @@ def test_geojson_projection(self): def test_limiting_headers(self): headers = ['value'] - ops = OcgOperations(dataset=self.get_dataset(), headers=headers, output_format='csv') + ops = OcgOperations(dataset=self.get_dataset(), headers=headers, output_format='csv', melted=True) ret = ops.execute() with open(ret) as f: reader = DictReader(f) @@ -754,29 +754,49 @@ def test_shp_conversion(self): ocgis.env.OVERWRITE = True calc = [None, [{'func': 'mean', 'name': 'my_mean'}]] group = ['month', 'year'] - for c in calc: + + keywords = dict(calc=calc, melted=[False, True]) + + for k in self.iter_product_keywords(keywords): ops = OcgOperations(dataset=self.get_dataset(), output_format='shp', calc_grouping=group, - calc=c) + calc=k.calc, + melted=k.melted) ret = self.get_ret(ops) - if c is None: + if k.calc is None: with fiona.open(ret) as f: - schema_properties = OrderedDict( - [(u'DID', 'int:10'), (u'VID', 'int:10'), (u'UGID', 'int:10'), (u'TID', 'int:10'), - (u'LID', 'int:10'), (u'GID', 'int:10'), (u'VARIABLE', 'str:80'), (u'ALIAS', 'str:80'), - (u'TIME', 'str:80'), (u'YEAR', 'int:10'), (u'MONTH', 'int:10'), (u'DAY', 'int:10'), - (u'LEVEL', 'int:10'), (u'VALUE', 'float:24.15')]) - self.assertDictEqual(f.meta['schema']['properties'], schema_properties) + target = f.meta['schema']['properties'] + if k.melted: + schema_properties = OrderedDict( + [(u'DID', 'int:10'), (u'VID', 'int:10'), (u'UGID', 'int:10'), (u'TID', 'int:10'), + (u'LID', 'int:10'), (u'GID', 'int:10'), (u'VARIABLE', 'str:80'), (u'ALIAS', 'str:80'), + (u'TIME', 'str:80'), (u'YEAR', 'int:10'), (u'MONTH', 'int:10'), (u'DAY', 'int:10'), + (u'LEVEL', 'int:10'), (u'VALUE', 'float:24.15')]) + else: + schema_properties = OrderedDict( + [(u'TID', 'int:10'), (u'TIME', 'str:80'), (u'LB_TIME', 'str:80'), (u'UB_TIME', 'str:80'), + (u'YEAR', 'int:10'), (u'MONTH', 'int:10'), (u'DAY', 'int:10'), (u'LID', 'int:10'), + (u'LEVEL', 'int:10'), (u'LB_LEVEL', 'int:10'), (u'UB_LEVEL', 'int:10'), + (u'FOO', 'float:24.15')]) + self.assertAsSetEqual(target.keys(), schema_properties.keys()) + self.assertDictEqual(target, schema_properties) self.assertDictEqual(f.meta, {'crs': {'init': u'epsg:4326'}, 'driver': u'ESRI Shapefile', 'schema': {'geometry': 'Polygon', 'properties': schema_properties}}) self.assertEqual(len(f), 1952) - record_properties = OrderedDict( - [(u'DID', 1), (u'VID', 1), (u'UGID', 1), (u'TID', 11), (u'LID', 2), (u'GID', 5.0), - (u'VARIABLE', u'foo'), (u'ALIAS', u'foo'), (u'TIME', '2000-03-11 12:00:00'), (u'YEAR', 2000), - (u'MONTH', 3), (u'DAY', 11), (u'LEVEL', 150), (u'VALUE', 1.0)]) + if k.melted: + record_properties = OrderedDict( + [(u'DID', 1), (u'VID', 1), (u'UGID', 1), (u'TID', 11), (u'LID', 2), (u'GID', 5.0), + (u'VARIABLE', u'foo'), (u'ALIAS', u'foo'), (u'TIME', '2000-03-11 12:00:00'), + (u'YEAR', 2000), + (u'MONTH', 3), (u'DAY', 11), (u'LEVEL', 150), (u'VALUE', 1.0)]) + else: + record_properties = OrderedDict( + [(u'TID', 11), (u'TIME', u'2000-03-11 12:00:00'), (u'LB_TIME', u'2000-03-11 00:00:00'), + (u'UB_TIME', u'2000-03-12 00:00:00'), (u'YEAR', 2000), (u'MONTH', 3), (u'DAY', 11), + (u'LID', 2), (u'LEVEL', 150), (u'LB_LEVEL', 100), (u'UB_LEVEL', 200), (u'FOO', 1.0)]) record = list(f)[340] self.assertDictEqual(record['properties'], record_properties) record_coordinates = [ @@ -788,15 +808,22 @@ def test_shp_conversion(self): 'properties': record_properties}) else: with fiona.open(ret) as f: + if k.melted: + actual = OrderedDict( + [(u'DID', 'int:10'), (u'VID', 'int:10'), (u'CID', 'int:10'), (u'UGID', 'int:10'), + (u'TID', 'int:10'), (u'LID', 'int:10'), (u'GID', 'int:10'), (u'VARIABLE', 'str:80'), + (u'ALIAS', 'str:80'), (u'CALC_KEY', 'str:80'), (u'CALC_ALIAS', 'str:80'), + (u'TIME', 'str:80'), + (u'YEAR', 'int:10'), (u'MONTH', 'int:10'), (u'DAY', 'int:10'), (u'LEVEL', 'int:10'), + (u'VALUE', 'float:24.15')]) + else: + actual = OrderedDict( + [(u'TID', 'int:10'), (u'TIME', 'str:80'), (u'LB_TIME', 'str:80'), (u'UB_TIME', 'str:80'), + (u'YEAR', 'int:10'), (u'MONTH', 'int:10'), (u'DAY', 'int:10'), (u'LID', 'int:10'), + (u'LEVEL', 'int:10'), (u'LB_LEVEL', 'int:10'), (u'UB_LEVEL', 'int:10'), + (u'MY_MEAN', 'float:24.15')]) self.assertDictEqual(f.meta, {'crs': {'init': u'epsg:4326'}, 'driver': u'ESRI Shapefile', - 'schema': {'geometry': 'Polygon', 'properties': OrderedDict( - [(u'DID', 'int:10'), (u'VID', 'int:10'), (u'CID', 'int:10'), - (u'UGID', 'int:10'), (u'TID', 'int:10'), (u'LID', 'int:10'), - (u'GID', 'int:10'), (u'VARIABLE', 'str:80'), - (u'ALIAS', 'str:80'), (u'CALC_KEY', 'str:80'), - (u'CALC_ALIAS', 'str:80'), (u'TIME', 'str:80'), - (u'YEAR', 'int:10'), (u'MONTH', 'int:10'), (u'DAY', 'int:10'), - (u'LEVEL', 'int:10'), (u'VALUE', 'float:24.15')])}}) + 'schema': {'geometry': 'Polygon', 'properties': actual}}) self.assertEqual(len(f), 64) def test_shp_conversion_with_external_geometries(self): @@ -823,16 +850,17 @@ def _make_record_(wkt_str, ugid, state_name): ocgis.env.DIR_SHPCABINET = self.current_dir_output ops = OcgOperations(dataset=self.get_dataset(), geom='states', - output_format='shp') + output_format='shp', + melted=True) ret = ops.execute() output_folder = os.path.join(self.current_dir_output, ops.prefix) contents = os.listdir(output_folder) self.assertEqual(set(contents), - set(['ocgis_output_metadata.txt', 'ocgis_output_source_metadata.txt', 'ocgis_output_ugid.shp', - 'ocgis_output_ugid.dbf', 'ocgis_output_ugid.cpg', 'ocgis_output.dbf', 'ocgis_output.log', - 'ocgis_output.shx', 'ocgis_output.shp', 'ocgis_output_ugid.shx', 'ocgis_output.cpg', - 'ocgis_output.prj', 'ocgis_output_ugid.prj', 'ocgis_output_did.csv'])) + {'ocgis_output_metadata.txt', 'ocgis_output_source_metadata.txt', 'ocgis_output_ugid.shp', + 'ocgis_output_ugid.dbf', 'ocgis_output_ugid.cpg', 'ocgis_output.dbf', 'ocgis_output.log', + 'ocgis_output.shx', 'ocgis_output.shp', 'ocgis_output_ugid.shx', 'ocgis_output.cpg', + 'ocgis_output.prj', 'ocgis_output_ugid.prj', 'ocgis_output_did.csv'}) with fiona.open(ret) as f: rows = list(f) @@ -983,7 +1011,8 @@ def _make_record_(wkt_str, ugid, state_name): output_format='shp', aggregate=True, prefix='aggregation_clip', - spatial_operation='clip') + spatial_operation='clip', + melted=True) ret = ops.execute() with fiona.open(ret) as f: @@ -993,47 +1022,56 @@ def _make_record_(wkt_str, ugid, state_name): self.assertEqual(row['properties']['UGID'], row['properties']['GID']) self.assertEqual(set([row['properties']['GID'] for row in rows]), set([1, 2])) self.assertEqual(len(rows), 244) - self.assertEqual(set(os.listdir(os.path.join(self.current_dir_output, ops.prefix))), set( - ['aggregation_clip_ugid.shp', 'aggregation_clip.cpg', 'aggregation_clip_metadata.txt', - 'aggregation_clip_did.csv', 'aggregation_clip.log', 'aggregation_clip.dbf', 'aggregation_clip.shx', - 'aggregation_clip_ugid.prj', 'aggregation_clip_ugid.cpg', 'aggregation_clip_ugid.shx', - 'aggregation_clip.shp', 'aggregation_clip_ugid.dbf', 'aggregation_clip.prj', - 'aggregation_clip_source_metadata.txt'])) + self.assertEqual(set(os.listdir(os.path.join(self.current_dir_output, ops.prefix))), + {'aggregation_clip_ugid.shp', 'aggregation_clip.cpg', 'aggregation_clip_metadata.txt', + 'aggregation_clip_did.csv', 'aggregation_clip.log', 'aggregation_clip.dbf', + 'aggregation_clip.shx', 'aggregation_clip_ugid.prj', 'aggregation_clip_ugid.cpg', + 'aggregation_clip_ugid.shx', 'aggregation_clip.shp', 'aggregation_clip_ugid.dbf', + 'aggregation_clip.prj', 'aggregation_clip_source_metadata.txt'}) def test_csv_conversion(self): ocgis.env.OVERWRITE = True - ops = OcgOperations(dataset=self.get_dataset(), output_format='csv') - ret = self.get_ret(ops) + # ops = OcgOperations(dataset=self.get_dataset(), output_format='csv') + # self.get_ret(ops) - # # test with a geometry to check writing of user-geometry overview shapefile + # test with a geometry to check writing of user-geometry overview shapefile geom = make_poly((38, 39), (-104, -103)) - ops = OcgOperations(dataset=self.get_dataset(), output_format='csv', geom=geom) - ret = ops.execute() - output_dir = os.path.join(self.current_dir_output, ops.prefix) - contents = set(os.listdir(output_dir)) - self.assertEqual(contents, set( - ['ocgis_output_source_metadata.txt', 'ocgis_output_metadata.txt', 'ocgis_output.log', - 'ocgis_output_did.csv', 'ocgis_output.csv'])) - with open(ret, 'r') as f: - reader = csv.DictReader(f) - row = reader.next() - self.assertDictEqual(row, {'LID': '1', 'UGID': '1', 'VID': '1', 'ALIAS': 'foo', 'DID': '1', 'YEAR': '2000', - 'VALUE': '1.0', 'MONTH': '3', 'VARIABLE': 'foo', 'GID': '6', - 'TIME': '2000-03-01 12:00:00', 'TID': '1', 'LEVEL': '50', 'DAY': '1'}) - - did_file = os.path.join(output_dir, ops.prefix + '_did.csv') - uri = os.path.join(self.current_dir_output, self.fn) - with open(did_file, 'r') as f: - reader = csv.DictReader(f) - row = reader.next() - self.assertDictEqual(row, {'ALIAS': 'foo', 'DID': '1', 'URI': uri, 'UNITS': 'K', - 'STANDARD_NAME': 'Maximum Temperature Foo', 'VARIABLE': 'foo', - 'LONG_NAME': 'foo_foo'}) + for melted in [True, False]: + ops = OcgOperations(dataset=self.get_dataset(), output_format='csv', geom=geom, melted=melted) + ret = ops.execute() - with open(ret, 'r') as f: - reader = csv.DictReader(f) - rows = list(reader) + output_dir = os.path.join(self.current_dir_output, ops.prefix) + contents = set(os.listdir(output_dir)) + self.assertEqual(contents, + {'ocgis_output_source_metadata.txt', 'ocgis_output_metadata.txt', 'ocgis_output.log', + 'ocgis_output_did.csv', 'ocgis_output.csv'}) + with open(ret, 'r') as f: + reader = csv.DictReader(f) + row = reader.next() + if melted: + actual = {'LID': '1', 'UGID': '1', 'VID': '1', 'ALIAS': 'foo', 'DID': '1', 'YEAR': '2000', + 'VALUE': '1.0', + 'MONTH': '3', 'VARIABLE': 'foo', 'GID': '6', 'TIME': '2000-03-01 12:00:00', 'TID': '1', + 'LEVEL': '50', 'DAY': '1'} + else: + actual = {'LID': '1', 'LB_LEVEL': '0', 'LEVEL': '50', 'TIME': '2000-03-01 12:00:00', 'MONTH': '3', + 'UB_LEVEL': '100', 'LB_TIME': '2000-03-01 00:00:00', 'YEAR': '2000', 'TID': '1', + 'FOO': '1.0', 'UB_TIME': '2000-03-02 00:00:00', 'DAY': '1'} + self.assertDictEqual(row, actual) + + did_file = os.path.join(output_dir, ops.prefix + '_did.csv') + uri = os.path.join(self.current_dir_output, self.fn) + with open(did_file, 'r') as f: + reader = csv.DictReader(f) + row = reader.next() + self.assertDictEqual(row, {'ALIAS': 'foo', 'DID': '1', 'URI': uri, 'UNITS': 'K', + 'STANDARD_NAME': 'Maximum Temperature Foo', 'VARIABLE': 'foo', + 'LONG_NAME': 'foo_foo'}) + + with open(ret, 'r') as f: + reader = csv.DictReader(f) + rows = list(reader) ops = OcgOperations(dataset=self.get_dataset(), output_format='numpy', geom=geom) npy = ops.execute() @@ -1042,16 +1080,25 @@ def test_csv_conversion(self): def test_csv_calc_conversion(self): calc = [{'func': 'mean', 'name': 'my_mean'}] calc_grouping = ['month', 'year'] - ops = OcgOperations(dataset=self.get_dataset(), output_format='csv', calc=calc, calc_grouping=calc_grouping) - ret = ops.execute() - with open(ret, 'r') as f: - reader = csv.DictReader(f) - row = reader.next() - self.assertDictEqual(row, {'LID': '1', 'UGID': '1', 'VID': '1', 'CID': '1', 'DID': '1', 'YEAR': '2000', - 'TIME': '2000-03-16 00:00:00', 'CALC_ALIAS': 'my_mean', 'VALUE': '1.0', - 'MONTH': '3', 'VARIABLE': 'foo', 'ALIAS': 'foo', 'GID': '1', 'CALC_KEY': 'mean', - 'TID': '1', 'LEVEL': '50', 'DAY': '16'}) + for melted in [True, False]: + ops = OcgOperations(dataset=self.get_dataset(), output_format='csv', calc=calc, calc_grouping=calc_grouping, + melted=melted, prefix=str(melted)) + ret = ops.execute() + + with open(ret, 'r') as f: + reader = csv.DictReader(f) + row = reader.next() + if melted: + actual = {'LID': '1', 'UGID': '1', 'VID': '1', 'CID': '1', 'DID': '1', 'YEAR': '2000', + 'TIME': '2000-03-16 00:00:00', 'CALC_ALIAS': 'my_mean', 'VALUE': '1.0', + 'MONTH': '3', 'VARIABLE': 'foo', 'ALIAS': 'foo', 'GID': '1', 'CALC_KEY': 'mean', + 'TID': '1', 'LEVEL': '50', 'DAY': '16'} + else: + actual = {'LID': '1', 'LB_LEVEL': '0', 'LEVEL': '50', 'TIME': '2000-03-16 00:00:00', 'MONTH': '3', + 'MY_MEAN': '1.0', 'UB_LEVEL': '100', 'LB_TIME': '2000-03-01 00:00:00', 'YEAR': '2000', + 'TID': '1', 'UB_TIME': '2000-04-01 00:00:00', 'DAY': '16'} + self.assertDictEqual(row, actual) def test_csv_calc_conversion_two_calculations(self): calc = [{'func': 'mean', 'name': 'my_mean'}, {'func': 'min', 'name': 'my_min'}] @@ -1453,7 +1500,6 @@ def test_differing_projection_no_output_crs(self): if o != constants.OUTPUT_FORMAT_NUMPY: pass - def test_differing_projection_with_output_crs(self): nc_normal = SimpleNc() nc_normal.write() @@ -1469,8 +1515,8 @@ def test_differing_projection_with_output_crs(self): for o in output_format: try: - ops = OcgOperations(dataset=dataset, output_format=o, output_crs=CFWGS84(), - prefix=o) + ops = OcgOperations(dataset=dataset, output_format=o, output_crs=CFWGS84(), prefix=o, melted=True) + self.assertTrue(ops.melted) ret = ops.execute() if o == constants.OUTPUT_FORMAT_NUMPY: diff --git a/src/ocgis/test/test_work.py b/src/ocgis/test/test_work.py new file mode 100644 index 000000000..1bae4c151 --- /dev/null +++ b/src/ocgis/test/test_work.py @@ -0,0 +1,34 @@ +from ocgis import ShpCabinet, RequestDataset, OcgOperations +from ocgis.test.base import TestBase + +""" +These tests written to guide bug fixing or issue development. Theses tests are typically high-level and block-specific +testing occurs in tandem. It is expected that any issues identified by these tests have a corresponding test in the +package hierarchy. Hence, these tests in theory may be removed... +""" + + +class Test20150119(TestBase): + def test_shapefile_through_operations_subset(self): + path = ShpCabinet().get_shp_path('state_boundaries') + rd = RequestDataset(path) + field = rd.get() + self.assertIsNone(field.spatial.properties) + ops = OcgOperations(dataset=rd, output_format='shp', geom='state_boundaries', select_ugid=[15]) + ret = ops.execute() + rd2 = RequestDataset(ret) + field2 = rd2.get() + self.assertAsSetEqual(field.variables.keys(), field2.variables.keys()) + self.assertEqual(tuple([1] * 5), field2.shape) + + def test_shapefile_through_operations(self): + path = ShpCabinet().get_shp_path('state_boundaries') + rd = RequestDataset(path) + field = rd.get() + self.assertIsNone(field.spatial.properties) + ops = OcgOperations(dataset=rd, output_format='shp') + ret = ops.execute() + rd2 = RequestDataset(ret) + field2 = rd2.get() + self.assertAsSetEqual(field.variables.keys(), field2.variables.keys()) + self.assertEqual(field.shape, field2.shape) \ No newline at end of file diff --git a/src/ocgis/util/environment.py b/src/ocgis/util/environment.py index df7b42623..fdcf5eb26 100644 --- a/src/ocgis/util/environment.py +++ b/src/ocgis/util/environment.py @@ -3,86 +3,92 @@ class Environment(object): - def __init__(self): - self.OVERWRITE = EnvParm('OVERWRITE',False,formatter=self._format_bool_) - self.DIR_OUTPUT = EnvParm('DIR_OUTPUT',os.getcwd()) - self.DIR_SHPCABINET = EnvParm('DIR_SHPCABINET',None) - self.DIR_DATA = EnvParm('DIR_DATA',None) - self.DIR_TEST_DATA = EnvParm('DIR_TEST_DATA',None) - self.MODE = EnvParm('MODE','raw') - self.PREFIX = EnvParm('PREFIX','ocgis_output') - self.FILL_VALUE = EnvParm('FILL_VALUE',1e20,formatter=float) - self.VERBOSE = EnvParm('VERBOSE',False,formatter=self._format_bool_) - self.OPTIMIZE_FOR_CALC = EnvParm('OPTIMIZE_FOR_CALC',False,formatter=self._format_bool_) - self.ENABLE_FILE_LOGGING = EnvParm('ENABLE_FILE_LOGGING',True,formatter=self._format_bool_) - self.DEBUG = EnvParm('DEBUG',False,formatter=self._format_bool_) - self.DIR_BIN = EnvParm('DIR_BIN',None) - self.USE_SPATIAL_INDEX = EnvParmImport('USE_SPATIAL_INDEX',None,'rtree') - self.USE_CFUNITS = EnvParmImport('USE_CFUNITS',None,'cfunits') + self.OVERWRITE = EnvParm('OVERWRITE', False, formatter=self._format_bool_) + self.DIR_OUTPUT = EnvParm('DIR_OUTPUT', os.getcwd()) + self.DIR_SHPCABINET = EnvParm('DIR_SHPCABINET', None) + self.DIR_DATA = EnvParm('DIR_DATA', None) + self.DIR_TEST_DATA = EnvParm('DIR_TEST_DATA', None) + self.MELTED = EnvParm('MELTED', None, formatter=self._format_bool_) + self.MODE = EnvParm('MODE', 'raw') + self.PREFIX = EnvParm('PREFIX', 'ocgis_output') + self.FILL_VALUE = EnvParm('FILL_VALUE', 1e20, formatter=float) + self.VERBOSE = EnvParm('VERBOSE', False, formatter=self._format_bool_) + self.OPTIMIZE_FOR_CALC = EnvParm('OPTIMIZE_FOR_CALC', False, formatter=self._format_bool_) + self.ENABLE_FILE_LOGGING = EnvParm('ENABLE_FILE_LOGGING', True, formatter=self._format_bool_) + self.DEBUG = EnvParm('DEBUG', False, formatter=self._format_bool_) + self.DIR_BIN = EnvParm('DIR_BIN', None) + self.USE_SPATIAL_INDEX = EnvParmImport('USE_SPATIAL_INDEX', None, 'rtree') + self.USE_CFUNITS = EnvParmImport('USE_CFUNITS', None, 'cfunits') self.CONF_PATH = EnvParm('CONF_PATH', os.path.expanduser('~/.config/ocgis.conf')) from ocgis.interface.base.crs import CFWGS84 + self.DEFAULT_COORDSYS = EnvParm('DEFAULT_COORDSYS', CFWGS84()) - + self.ops = None self._optimize_store = {} - + def __str__(self): msg = [] for value in self.__dict__.itervalues(): - if isinstance(value,EnvParm): + if isinstance(value, EnvParm): msg.append(str(value)) msg.sort() - return('\n'.join(msg)) - - def __getattribute__(self,name): - attr = object.__getattribute__(self,name) + return '\n'.join(msg) + + def __getattribute__(self, name): + attr = object.__getattribute__(self, name) try: ret = attr.value except AttributeError: ret = attr - return(ret) - - def __setattr__(self,name,value): - if isinstance(value,EnvParm) or name in ['ops','_optimize_store']: - object.__setattr__(self,name,value) + return ret + + def __setattr__(self, name, value): + if isinstance(value, EnvParm) or name in ['ops', '_optimize_store']: + object.__setattr__(self, name, value) else: - attr = object.__getattribute__(self,name) + attr = object.__getattribute__(self, name) attr.value = value - + def reset(self): - '''Reset values to defaults (Values will be read from any overloaded - system environment variables.''' + """ + Reset values to defaults (Values will be read from any overloaded system environment variables. + """ + for value in self.__dict__.itervalues(): - if isinstance(value,EnvParm): + if isinstance(value, EnvParm): value._value = 'use_env' - getattr(value,'value') + getattr(value, 'value') env.ops = None self._optimize_store = {} - - @staticmethod + + @staticmethod def _format_bool_(value): - '''Format a string to boolean. - + """ + Format a string to boolean. + :param value: The value to convert. - :type value: int or str''' + :type value: int or str + """ + from ocgis.util.helpers import format_bool - return(format_bool(value)) + + return format_bool(value) class EnvParm(object): - - def __init__(self,name,default,formatter=None): + def __init__(self, name, default, formatter=None): self.name = name.upper() self.env_name = 'OCGIS_{0}'.format(self.name) self.formatter = formatter self.default = default self._value = 'use_env' - + def __str__(self): - return('{0}={1}'.format(self.name,self.value)) - + return '{0}={1}'.format(self.name, self.value) + @property def value(self): if self._value == 'use_env': @@ -90,7 +96,7 @@ def value(self): if ret is None: ret = self.default else: - ## attempt to use the parameter's format method. + # # attempt to use the parameter's format method. try: ret = self.format(ret) except NotImplementedError: @@ -98,27 +104,27 @@ def value(self): ret = self.formatter(ret) else: ret = self._value - return(ret) + return ret + @value.setter - def value(self,value): + def value(self, value): self._value = value - - def format(self,value): - raise(NotImplementedError) - - + + def format(self, value): + raise NotImplementedError + + class EnvParmImport(EnvParm): - - def __init__(self,name,default,module_name): + def __init__(self, name, default, module_name): self.module_name = module_name - super(EnvParmImport,self).__init__(name,default) - + super(EnvParmImport, self).__init__(name, default) + @property def value(self): if self._value == 'use_env': ret = os.getenv(self.env_name) if ret is None: - if self.default == None: + if self.default is None: ret = self._get_module_available_() else: ret = self.default @@ -126,18 +132,19 @@ def value(self): ret = Environment._format_bool_(ret) else: ret = self._value - return(ret) + return ret + @value.setter - def value(self,value): + def value(self, value): self._value = value - + def _get_module_available_(self): try: import_module(self.module_name) ret = True except ImportError: ret = False - return(ret) + return ret env = Environment() diff --git a/src/ocgis/util/logging_ocgis.py b/src/ocgis/util/logging_ocgis.py index 2dca3edbb..cdd0e9fb7 100644 --- a/src/ocgis/util/logging_ocgis.py +++ b/src/ocgis/util/logging_ocgis.py @@ -1,100 +1,97 @@ import logging import os -import fiona -## try to turn off fiona logging except for errors +# try to turn off fiona logging except for errors fiona_logger = logging.getLogger('Fiona') fiona_logger.setLevel(logging.ERROR) - - + + class ProgressOcgOperations(object): - ''' + """ :param function callback: A function taking two parameters: ``percent_complete`` and ``message``. :param int n_subsettables: The number of data objects to subset and/or manipulate. :param int n_geometries: The number of geometries to use for subsetting. :param int n_calculations: The number of calculations to apply. - ''' - - def __init__(self,callback=None,n_subsettables=1,n_geometries=1,n_calculations=0): - assert(n_subsettables > 0) - assert(n_geometries > 0) - + """ + + def __init__(self, callback=None, n_subsettables=1, n_geometries=1, n_calculations=0): + assert (n_subsettables > 0) + assert (n_geometries > 0) + self.callback = callback self.n_subsettables = n_subsettables self.n_geometries = n_geometries self.n_calculations = n_calculations self.n_completed_operations = 0 - - def __call__(self,message=None): + + def __call__(self, message=None): if self.callback is not None: - return(self.callback(self.percent_complete,message)) - + return self.callback(self.percent_complete, message) + @property def n_operations(self): if self.n_calculations == 0: nc = 1 else: nc = self.n_calculations - return(self.n_subsettables*self.n_geometries*nc) - + return self.n_subsettables * self.n_geometries * nc + @property def percent_complete(self): - return(100*(self.n_completed_operations/float(self.n_operations))) - + return 100 * (self.n_completed_operations / float(self.n_operations)) + def mark(self): self.n_completed_operations += 1 class OcgisLogging(object): - def __init__(self): self.level = None - self.null = True ## pass through if not configured + self.null = True # pass through if not configured self.parent = None self.duplicates = set() self.callback = None self.callback_level = None - - def __call__(self,msg=None,logger=None,level=logging.INFO,alias=None,ugid=None,exc=None, + + def __call__(self, msg=None, logger=None, level=logging.INFO, alias=None, ugid=None, exc=None, check_duplicate=False): - + if self.callback is not None and self.callback_level <= level: if msg is not None: self.callback(msg) elif exc is not None: - callback_msg = '{0}: {1}'.format(exc.__class__.__name__,exc) + callback_msg = '{0}: {1}'.format(exc.__class__.__name__, exc) self.callback(callback_msg) - + if self.null: if exc is None: pass else: - raise(exc) + raise exc else: if check_duplicate: if msg in self.duplicates: - return() + return () else: self.duplicates.add(msg) dest_level = level or self.level - ## get the logger by string name - if isinstance(logger,basestring): + # # get the logger by string name + if isinstance(logger, basestring): dest_logger = self.get_logger(logger) else: dest_logger = logger or self.parent if alias is not None: - msg = self.get_formatted_msg(msg,alias,ugid=ugid) + msg = self.get_formatted_msg(msg, alias, ugid=ugid) if exc is None: - dest_logger.log(dest_level,msg) + dest_logger.log(dest_level, msg) else: dest_logger.exception(msg) - raise(exc) - - def configure(self,to_file=None,to_stream=False,level=logging.INFO,callback=None, - callback_level=logging.INFO): - ## set the callback arguments + raise exc + + def configure(self, to_file=None, to_stream=False, level=logging.INFO, callback=None, callback_level=logging.INFO): + # # set the callback arguments self.callback = callback self.callback_level = callback_level ## no need to configure loggers @@ -115,7 +112,7 @@ def configure(self,to_file=None,to_stream=False,level=logging.INFO,callback=None self.parent.setLevel(level) self.parent.handlers = [] ## add the file handler - fh = logging.FileHandler(filename,'w') + fh = logging.FileHandler(filename, 'w') fh.setLevel(level) fh.setFormatter(logging.Formatter(fmt='%(name)12s: %(levelname)s: %(asctime)s: %(message)s', datefmt='%Y-%m-%d %H:%M')) @@ -126,21 +123,22 @@ def configure(self,to_file=None,to_stream=False,level=logging.INFO,callback=None console.setLevel(level) console.setFormatter(logging.Formatter('%(name)12s: %(levelname)s: %(message)s')) self.parent.addHandler(console) - - def get_formatted_msg(self,msg,alias,ugid=None): + + @staticmethod + def get_formatted_msg(msg, alias, ugid=None): if ugid is None: - ret = 'alias={0}: {1}'.format(alias,msg) + ret = 'alias={0}: {1}'.format(alias, msg) else: - ret = 'alias={0}, ugid={1}: {2}'.format(alias,ugid,msg) - return(ret) - - def get_logger(self,name): + ret = 'alias={0}, ugid={1}: {2}'.format(alias, ugid, msg) + return ret + + def get_logger(self, name): if self.null: ret = None else: ret = logging.getLogger('ocgis').getChild(name) - return(ret) - + return ret + def shutdown(self): self.__init__() try: From f2a5b1559ad5d939d321bdf1b1509f6a7241f2e8 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Thu, 29 Jan 2015 11:10:45 -0700 Subject: [PATCH 63/71] support seasonal aggregations through icclim #354 Seasonal aggregation now works with ICCLIM calculations. The newest HEAD of ICCLIM is required for this functionality to correctly format the history string. --- doc/api.rst | 12 +- doc/computation.rst | 9 + doc/install.rst | 3 +- .../install_dependencies_ubuntu.sh | 49 +-- src/ocgis/api/operations.py | 2 +- src/ocgis/calc/temporal_groups.py | 31 ++ src/ocgis/contrib/library_icclim.py | 264 +++++++------- .../test_misc/test_dependency_versions.py | 14 +- .../test_calc/test_temporal_groups.py | 19 ++ .../test_contrib/test_library_icclim.py | 323 ++++++++++-------- .../test_simple/test_optional_dependencies.py | 13 +- 11 files changed, 423 insertions(+), 316 deletions(-) create mode 100644 src/ocgis/calc/temporal_groups.py create mode 100644 src/ocgis/test/test_ocgis/test_calc/test_temporal_groups.py diff --git a/doc/api.rst b/doc/api.rst index 4a535f258..7fe9dd5f7 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -46,7 +46,7 @@ Operations API (:class:`ocgis.OcgOperations`) ============================================= .. autoclass:: ocgis.OcgOperations -:members: execute, get_base_request_size + :members: execute, get_base_request_size Detailed Argument Information ----------------------------- @@ -62,10 +62,10 @@ A ``dataset`` is the target file(s) or object(s) containing data to process. A ` 3. An OpenClimateGIS field object (use :class:`~Field` or :class:`~ocgis.RequestDatasetCollection`). If a :class:`~ocgis.Field` object is used, be aware operations may modify the object inplace. .. autoclass:: ocgis.RequestDataset -:members: inspect, inspect_as_dct + :members: inspect, inspect_as_dct .. autoclass:: ocgis.RequestDatasetCollection -:members: update + :members: update dir_output ~~~~~~~~~~ @@ -394,10 +394,10 @@ A dictionary with regridding options. Please see the documentation for :meth:`~o ========================= .. autoclass:: ocgis.ShpCabinet -:members: keys, iter_geoms + :members: keys, iter_geoms .. autoclass:: ocgis.ShpCabinetIterator -:members: __iter__ + :members: __iter__ Adding Additional Shapefile Data -------------------------------- @@ -412,7 +412,7 @@ The shapefile's "`geom key`_" is the name of the shapefile. It must have an alph ========================= .. autoclass:: ocgis.Inspect -:members: + :members: Data Collections ================ diff --git a/doc/computation.rst b/doc/computation.rst index 6a008f177..a7a52c90a 100644 --- a/doc/computation.rst +++ b/doc/computation.rst @@ -240,4 +240,13 @@ Thresholds :members: calculate :undoc-members: +Calculation using ``icclim`` for ECA Indices +============================================ + +The optional Python library ``icclim`` (http://icclim.readthedocs.org/en/latest) may be used to calculate the full suite of European Climate Assessment (ECA) indices. To select an ``icclim`` calculation, prefix the name of the indice with the prefix ``'icclim_'``. A list of indices computable with ``icclim`` is available here: http://icclim.readthedocs.org/en/latest/python_api.html#icclim-indice-compute-indice. + +For example, to calculate the *TG* indice (mean of daily mean temperature), select the calculation like: + +>>> calc = [{'func': 'icclim_TG', 'name': 'TG'}] + .. _NumPy masked array functions: http://docs.scipy.org/doc/numpy/reference/maskedarray.html diff --git a/doc/install.rst b/doc/install.rst index 6363c4e06..ac30bfeea 100644 --- a/doc/install.rst +++ b/doc/install.rst @@ -21,7 +21,7 @@ Python 2.7.6 http://www.python.org/download/releases/2.7.6/ Optional Dependencies --------------------- -There are two optional dependencies. OpenClimateGIS will still operate without these libraries installed but functionality and performance may change. +Optional dependencies are listed below. OpenClimateGIS will still operate without these libraries installed but functionality and performance may change. ============= ======== ====================================================== ================================================================================================================================= Package Name Version URL Usage @@ -29,6 +29,7 @@ Package Name Version URL Us ``rtree`` 0.8.0 https://pypi.python.org/pypi/Rtree/ Constructs spatial indexes at runtime. Useful for complicated GIS operations (i.e. large or complex polygons for subsetting) ``cfunits`` 0.9.6 https://code.google.com/p/cfunits-python/ Allows unit transformations for ``conform_units_to`` argument to :class:`~ocgis.RequestDataset` or :class:`~ocgis.OcgOperations`. ``ESMPy`` 6.3.0rp1 https://www.earthsystemcog.org/projects/esmpy/releases Supports regridding operations. +``icclim`` 3.0 http://icclim.readthedocs.org/en/latest/ Calculation of the full suite of European Climate Assessment (ECA) indices with optimized code implementation. ============= ======== ====================================================== ================================================================================================================================= Ubuntu Linux diff --git a/doc/sphinx_examples/install_dependencies_ubuntu.sh b/doc/sphinx_examples/install_dependencies_ubuntu.sh index ca9cf130d..51b6b40a4 100644 --- a/doc/sphinx_examples/install_dependencies_ubuntu.sh +++ b/doc/sphinx_examples/install_dependencies_ubuntu.sh @@ -8,8 +8,6 @@ sudo pip install numpy netCDF4 shapely fiona rtree ## osgeo ## ########### -## If this fails, try the apt-get install below... - ## http://stackoverflow.com/questions/11336153/python-gdal-package-missing-header-file-when-installing-via-pip pip install --no-install GDAL cd /tmp/pip_build_ubuntu/GDAL @@ -25,22 +23,22 @@ sudo python setup.py install CFUNITS_SRCDIR=/tmp/build_cfunits CFUNITS_VER=0.9.6 -CFUNITS_SRC=$CFUNITS_SRCDIR/cfunits-python/v$CFUNITS_VER -CFUNITS_TARBALL=cfunits-$CFUNITS_VER.tar.gz -CFUNITS_URL=https://cfunits-python.googlecode.com/files/$CFUNITS_TARBALL - -mkdir -p $CFUNITS_SRC -cd $CFUNITS_SRC -wget $CFUNITS_URL -tar -xzvf $CFUNITS_TARBALL -cd cfunits-$CFUNITS_VER +CFUNITS_SRC=${CFUNITS_SRCDIR}/cfunits-python/v${CFUNITS_VER} +CFUNITS_TARBALL=cfunits-${CFUNITS_VER}.tar.gz +CFUNITS_URL=https://cfunits-python.googlecode.com/files/${CFUNITS_TARBALL} + +mkdir -p ${CFUNITS_SRC} +cd ${CFUNITS_SRC} +wget ${CFUNITS_URL} +tar -xzvf ${CFUNITS_TARBALL} +cd cfunits-${CFUNITS_VER} sudo python setup.py install ## installation does not copy UDUNITS database CFUNITS_SETUP_DIR=`pwd` ## assumes a standard location. the installation directory may be retrieved by running the command: ## python -c "import cfunits, os;print(os.path.split(cfunits.__file__)[0])" CFUNITS_INSTALL_DIR=/usr/local/lib/python2.7/dist-packages/cfunits -sudo cp -r $CFUNITS_SETUP_DIR/cfunits/etc $CFUNITS_INSTALL_DIR +sudo cp -r ${CFUNITS_SETUP_DIR}/cfunits/etc ${CFUNITS_INSTALL_DIR} ######### # ESMPy # @@ -63,19 +61,32 @@ ESMF_INSTALL_PREFIX= ## ESMF framework install ## sudo apt-get install gfortran g++ -mkdir -p $ESMF_SRCDIR -cd $ESMF_SRCDIR +mkdir -p ${ESMF_SRCDIR} +cd ${ESMF_SRCDIR} -tar -xzvf $ESMF_TAR +tar -xzvf ${ESMF_TAR} cd esmf export ESMF_DIR=`pwd` make -export ESMF_INSTALL_PREFIX=$ESMF_INSTALL_PREFIX -export ESMF_INSTALL_LIBDIR=$ESMF_INSTALL_PREFIX/lib +export ESMF_INSTALL_PREFIX=${ESMF_INSTALL_PREFIX} +export ESMF_INSTALL_LIBDIR=${ESMF_INSTALL_PREFIX}/lib sudo -E make install ## ESMPy install ## -cd $ESMF_SRCDIR/esmf/src/addon/ESMPy -python setup.py build --ESMFMKFILE=$ESMF_INSTALL_PREFIX/lib/esmf.mk +cd ${ESMF_SRCDIR}/esmf/src/addon/ESMPy +python setup.py build --ESMFMKFILE=${ESMF_INSTALL_PREFIX}/lib/esmf.mk +sudo python setup.py install + +########## +# ICCLIM # +########## + +ICCLIM_SRCDIR=~/src/icclim/git + +git clone https://github.com/tatarinova/icclim.git +pushd ${ICCLIM_SRCDIR}/icclim +gcc -fPIC -g -c -Wall ./icclim/libC.c -o ./icclim/libC.o +gcc -shared -o ./icclim/libC.so ./icclim/libC.o sudo python setup.py install +popd \ No newline at end of file diff --git a/src/ocgis/api/operations.py b/src/ocgis/api/operations.py index 2524bd05f..c96d44580 100644 --- a/src/ocgis/api/operations.py +++ b/src/ocgis/api/operations.py @@ -431,4 +431,4 @@ def _raise_(msg, obj=OutputFormat): _raise_(msg, obj=CalcGrouping) else: for c in self.calc: - c['ref'].validate(self) + c['ref'].validate(self) \ No newline at end of file diff --git a/src/ocgis/calc/temporal_groups.py b/src/ocgis/calc/temporal_groups.py new file mode 100644 index 000000000..86ebd9fe5 --- /dev/null +++ b/src/ocgis/calc/temporal_groups.py @@ -0,0 +1,31 @@ +from abc import ABCMeta + + +class AbstractTemporalGroup(object): + __metaclass__ = ABCMeta + + +class SeasonalTemporalGroup(list, AbstractTemporalGroup): + _integer_name_map = {1: 'January', 2: 'February', 3: 'March', 4: 'April', 5: 'May', 6: 'June', 7: 'July', + 8: 'August', 9: 'September', 10: 'October', 11: 'November', 12: 'December'} + _season_type_flags = ('unique', 'year') + + def __init__(self, iterable): + list.__init__(self, iterable) + + @property + def icclim_mode(self): + ret = [] + flag = None + for element in self: + sub = [] + if element not in self._season_type_flags: + for sub_element in element: + sub.append(self._integer_name_map[sub_element][0]) + ret.append(''.join(sub)) + else: + flag = element + ret = '-'.join(ret) + if flag is not None: + ret = '{0} ({1})'.format(ret, flag) + return ret diff --git a/src/ocgis/contrib/library_icclim.py b/src/ocgis/contrib/library_icclim.py index d5731ee3a..d151cc368 100644 --- a/src/ocgis/contrib/library_icclim.py +++ b/src/ocgis/contrib/library_icclim.py @@ -11,65 +11,63 @@ from ocgis.calc.base import AbstractUnivariateSetFunction, AbstractMultivariateFunction, AbstractParameterizedFunction from ocgis import constants -from ocgis.exc import DefinitionValidationError -from ocgis.api.parms.definition import CalcGrouping +from ocgis.calc.temporal_groups import SeasonalTemporalGroup _icclim_function_map = { - 'icclim_TG':{'func':calc_indice.TG_calculation,'meta':slu.TG_setvarattr}, - 'icclim_TN':{'func':calc_indice.TN_calculation,'meta':slu.TN_setvarattr}, - 'icclim_TX':{'func':calc_indice.TX_calculation,'meta':slu.TX_setvarattr}, - 'icclim_SU':{'func':calc_indice.SU_calculation,'meta':slu.SU_setvarattr}, - 'icclim_DTR':{'func':calc_indice.DTR_calculation,'meta':slu.DTR_setvarattr}, - 'icclim_ETR':{'func':calc_indice.ETR_calculation,'meta':slu.ETR_setvarattr}, - 'icclim_TXx':{'func':calc_indice.TXx_calculation,'meta':slu.TXx_setvarattr}, - 'icclim_TXn':{'func':calc_indice.TXn_calculation,'meta':slu.TXn_setvarattr}, - 'icclim_TNx':{'func':calc_indice.TNx_calculation,'meta':slu.TNx_setvarattr}, - 'icclim_TNn':{'func':calc_indice.TNn_calculation,'meta':slu.TNn_setvarattr}, - 'icclim_CSU':{'func':calc_indice.CSU_calculation,'meta':slu.CSU_setvarattr}, - 'icclim_TR':{'func':calc_indice.TR_calculation,'meta':slu.TR_setvarattr}, - 'icclim_FD':{'func':calc_indice.FD_calculation,'meta':slu.FD_setvarattr}, - 'icclim_CFD':{'func':calc_indice.CFD_calculation,'meta':slu.CFD_setvarattr}, - 'icclim_ID':{'func':calc_indice.ID_calculation,'meta':slu.ID_setvarattr}, - 'icclim_HD17':{'func':calc_indice.HD17_calculation,'meta':slu.HD17_setvarattr}, - 'icclim_GD4':{'func':calc_indice.GD4_calculation,'meta':slu.GD4_setvarattr}, - 'icclim_vDTR':{'func':calc_indice.vDTR_calculation,'meta':slu.vDTR_setvarattr}, - 'icclim_RR':{'func':calc_indice.RR_calculation,'meta':slu.RR_setvarattr}, - 'icclim_RR1':{'func':calc_indice.RR1_calculation,'meta':slu.RR1_setvarattr}, - 'icclim_CWD':{'func':calc_indice.CWD_calculation,'meta':slu.CWD_setvarattr}, - 'icclim_SDII':{'func':calc_indice.SDII_calculation,'meta':slu.SDII_setvarattr}, - 'icclim_R10mm':{'func':calc_indice.R10mm_calculation,'meta':slu.R10mm_setvarattr}, - 'icclim_R20mm':{'func':calc_indice.R20mm_calculation,'meta':slu.R20mm_setvarattr}, - 'icclim_RX1day':{'func':calc_indice.RX1day_calculation,'meta':slu.RX1day_setvarattr}, - 'icclim_RX5day':{'func':calc_indice.RX5day_calculation,'meta':slu.RX5day_setvarattr}, - 'icclim_SD':{'func':calc_indice.SD_calculation,'meta':slu.SD_setvarattr}, - 'icclim_SD1':{'func':calc_indice.SD1_calculation,'meta':slu.SD1_setvarattr}, - 'icclim_SD5cm':{'func':calc_indice.SD5cm_calculation,'meta':slu.SD5cm_setvarattr}, - 'icclim_SD50cm':{'func':calc_indice.SD50cm_calculation,'meta':slu.SD50cm_setvarattr}, - 'icclim_CDD':{'func':calc_indice.CDD_calculation,'meta':slu.CDD_setvarattr}, - 'icclim_TG10p':{'func':calc_indice_perc.TG10p_calculation,'meta':slu.TG10p_setvarattr}, - 'icclim_TX10p':{'func':calc_indice_perc.TX10p_calculation,'meta':slu.TX10p_setvarattr}, - 'icclim_TN10p':{'func':calc_indice_perc.TN10p_calculation,'meta':slu.TN10p_setvarattr}, - 'icclim_TG90p':{'func':calc_indice_perc.TG90p_calculation,'meta':slu.TG90p_setvarattr}, - 'icclim_TX90p':{'func':calc_indice_perc.TX90p_calculation,'meta':slu.TX90p_setvarattr}, - 'icclim_TN90p':{'func':calc_indice_perc.TN90p_calculation,'meta':slu.TN90p_setvarattr}, - 'icclim_WSDI':{'func':calc_indice_perc.WSDI_calculation,'meta':slu.WSDI_setvarattr}, - 'icclim_CSDI':{'func':calc_indice_perc.CSDI_calculation,'meta':slu.CSDI_setvarattr}, - 'icclim_R75p':{'func':calc_indice_perc.R75p_calculation,'meta':slu.R75p_setvarattr}, - 'icclim_R75TOT':{'func':calc_indice_perc.R75TOT_calculation,'meta':slu.R75TOT_setvarattr}, - 'icclim_R95p':{'func':calc_indice_perc.R95p_calculation,'meta':slu.R95p_setvarattr}, - 'icclim_R95TOT':{'func':calc_indice_perc.R95TOT_calculation,'meta':slu.R95TOT_setvarattr}, - 'icclim_R99p':{'func':calc_indice_perc.R99p_calculation,'meta':slu.R99p_setvarattr}, - 'icclim_R99TOT':{'func':calc_indice_perc.R99TOT_calculation,'meta':slu.R99TOT_setvarattr}, - 'icclim_CD': {'func': calc_indice_perc.CD_calculation, 'meta': slu.CD_setvarattr}, - 'icclim_CW': {'func': calc_indice_perc.CW_calculation, 'meta': slu.CW_setvarattr}, - 'icclim_WD': {'func': calc_indice_perc.WD_calculation, 'meta': slu.WD_setvarattr}, - 'icclim_WW': {'func': calc_indice_perc.WW_calculation, 'meta': slu.WW_setvarattr}, - } + 'icclim_TG': {'func': calc_indice.TG_calculation, 'meta': slu.TG_setvarattr}, + 'icclim_TN': {'func': calc_indice.TN_calculation, 'meta': slu.TN_setvarattr}, + 'icclim_TX': {'func': calc_indice.TX_calculation, 'meta': slu.TX_setvarattr}, + 'icclim_SU': {'func': calc_indice.SU_calculation, 'meta': slu.SU_setvarattr}, + 'icclim_DTR': {'func': calc_indice.DTR_calculation, 'meta': slu.DTR_setvarattr}, + 'icclim_ETR': {'func': calc_indice.ETR_calculation, 'meta': slu.ETR_setvarattr}, + 'icclim_TXx': {'func': calc_indice.TXx_calculation, 'meta': slu.TXx_setvarattr}, + 'icclim_TXn': {'func': calc_indice.TXn_calculation, 'meta': slu.TXn_setvarattr}, + 'icclim_TNx': {'func': calc_indice.TNx_calculation, 'meta': slu.TNx_setvarattr}, + 'icclim_TNn': {'func': calc_indice.TNn_calculation, 'meta': slu.TNn_setvarattr}, + 'icclim_CSU': {'func': calc_indice.CSU_calculation, 'meta': slu.CSU_setvarattr}, + 'icclim_TR': {'func': calc_indice.TR_calculation, 'meta': slu.TR_setvarattr}, + 'icclim_FD': {'func': calc_indice.FD_calculation, 'meta': slu.FD_setvarattr}, + 'icclim_CFD': {'func': calc_indice.CFD_calculation, 'meta': slu.CFD_setvarattr}, + 'icclim_ID': {'func': calc_indice.ID_calculation, 'meta': slu.ID_setvarattr}, + 'icclim_HD17': {'func': calc_indice.HD17_calculation, 'meta': slu.HD17_setvarattr}, + 'icclim_GD4': {'func': calc_indice.GD4_calculation, 'meta': slu.GD4_setvarattr}, + 'icclim_vDTR': {'func': calc_indice.vDTR_calculation, 'meta': slu.vDTR_setvarattr}, + 'icclim_RR': {'func': calc_indice.RR_calculation, 'meta': slu.RR_setvarattr}, + 'icclim_RR1': {'func': calc_indice.RR1_calculation, 'meta': slu.RR1_setvarattr}, + 'icclim_CWD': {'func': calc_indice.CWD_calculation, 'meta': slu.CWD_setvarattr}, + 'icclim_SDII': {'func': calc_indice.SDII_calculation, 'meta': slu.SDII_setvarattr}, + 'icclim_R10mm': {'func': calc_indice.R10mm_calculation, 'meta': slu.R10mm_setvarattr}, + 'icclim_R20mm': {'func': calc_indice.R20mm_calculation, 'meta': slu.R20mm_setvarattr}, + 'icclim_RX1day': {'func': calc_indice.RX1day_calculation, 'meta': slu.RX1day_setvarattr}, + 'icclim_RX5day': {'func': calc_indice.RX5day_calculation, 'meta': slu.RX5day_setvarattr}, + 'icclim_SD': {'func': calc_indice.SD_calculation, 'meta': slu.SD_setvarattr}, + 'icclim_SD1': {'func': calc_indice.SD1_calculation, 'meta': slu.SD1_setvarattr}, + 'icclim_SD5cm': {'func': calc_indice.SD5cm_calculation, 'meta': slu.SD5cm_setvarattr}, + 'icclim_SD50cm': {'func': calc_indice.SD50cm_calculation, 'meta': slu.SD50cm_setvarattr}, + 'icclim_CDD': {'func': calc_indice.CDD_calculation, 'meta': slu.CDD_setvarattr}, + 'icclim_TG10p': {'func': calc_indice_perc.TG10p_calculation, 'meta': slu.TG10p_setvarattr}, + 'icclim_TX10p': {'func': calc_indice_perc.TX10p_calculation, 'meta': slu.TX10p_setvarattr}, + 'icclim_TN10p': {'func': calc_indice_perc.TN10p_calculation, 'meta': slu.TN10p_setvarattr}, + 'icclim_TG90p': {'func': calc_indice_perc.TG90p_calculation, 'meta': slu.TG90p_setvarattr}, + 'icclim_TX90p': {'func': calc_indice_perc.TX90p_calculation, 'meta': slu.TX90p_setvarattr}, + 'icclim_TN90p': {'func': calc_indice_perc.TN90p_calculation, 'meta': slu.TN90p_setvarattr}, + 'icclim_WSDI': {'func': calc_indice_perc.WSDI_calculation, 'meta': slu.WSDI_setvarattr}, + 'icclim_CSDI': {'func': calc_indice_perc.CSDI_calculation, 'meta': slu.CSDI_setvarattr}, + 'icclim_R75p': {'func': calc_indice_perc.R75p_calculation, 'meta': slu.R75p_setvarattr}, + 'icclim_R75TOT': {'func': calc_indice_perc.R75TOT_calculation, 'meta': slu.R75TOT_setvarattr}, + 'icclim_R95p': {'func': calc_indice_perc.R95p_calculation, 'meta': slu.R95p_setvarattr}, + 'icclim_R95TOT': {'func': calc_indice_perc.R95TOT_calculation, 'meta': slu.R95TOT_setvarattr}, + 'icclim_R99p': {'func': calc_indice_perc.R99p_calculation, 'meta': slu.R99p_setvarattr}, + 'icclim_R99TOT': {'func': calc_indice_perc.R99TOT_calculation, 'meta': slu.R99TOT_setvarattr}, + 'icclim_CD': {'func': calc_indice_perc.CD_calculation, 'meta': slu.CD_setvarattr}, + 'icclim_CW': {'func': calc_indice_perc.CW_calculation, 'meta': slu.CW_setvarattr}, + 'icclim_WD': {'func': calc_indice_perc.WD_calculation, 'meta': slu.WD_setvarattr}, + 'icclim_WW': {'func': calc_indice_perc.WW_calculation, 'meta': slu.WW_setvarattr}, +} class NcAttributesSimulator(object): - def __init__(self, attrs): self.attrs = attrs @@ -78,8 +76,8 @@ def __getattr__(self, name): def setncattr(self, key, value): self.attrs[key] = value - - + + class AbstractIcclimFunction(object): __metaclass__ = abc.ABCMeta description = None @@ -87,25 +85,24 @@ class AbstractIcclimFunction(object): long_name = '' _global_attributes_maintain = ['history'] _global_attribute_source_name = 'source_data_global_attributes' - _allowed_temporal_groupings = [('month',),('month','year'),('year',)] - + def set_field_metadata(self): # we are going to strip the metadata elements and store in a dictionary JSON representation - - def _get_value_(key,target): + + def _get_value_(key, target): try: ret = target[key] ret_key = key - return(ret,ret_key) + return ret, ret_key except KeyError: - for method in ['lower','upper','title']: + for method in ['lower', 'upper', 'title']: try: - ret_key = getattr(str,method)(key) + ret_key = getattr(str, method)(key) ret = target[ret_key] - return(ret,ret_key) + return ret, ret_key except KeyError: pass - return('',key) + return '', key # reorganize the output metadata pushing source global attributes to a new attribute. the old attributes are # serialized to a JSON string @@ -115,79 +112,78 @@ def _get_value_(key,target): sim.attrs[self._global_attribute_source_name] = original # copy attributes from the original dataset for key in self._global_attributes_maintain: - value,value_key = _get_value_(key,sim.attrs[self._global_attribute_source_name]) + value, value_key = _get_value_(key, sim.attrs[self._global_attribute_source_name]) sim.attrs[value_key] = value ref = sim.attrs[self._global_attribute_source_name] sim.attrs[self._global_attribute_source_name] = self._get_json_string_(ref) - + # update global attributes using ICCLIM functions indice_name = self.key.split('_')[1] - set_globattr.history(sim,self.tgd.grouping,indice_name,[self.field.temporal.value_datetime.min(),self.field.temporal.value_datetime.max()]) - set_globattr.title(sim,indice_name) + time_range = [self.field.temporal.value_datetime.min(), self.field.temporal.value_datetime.max()] + args = [sim, self.tgd.grouping, indice_name, time_range] + try: + set_globattr.history(*args) + except TypeError: + # temporal grouping is likely a season. convert to a season object and try again + args[1] = SeasonalTemporalGroup(self.tgd.grouping) + set_globattr.history(*args) + set_globattr.title(sim, indice_name) set_globattr.references(sim) - set_globattr.institution(sim,'Climate impact portal (http://climate4impact.eu)') - set_globattr.comment(sim,indice_name) + set_globattr.institution(sim, 'Climate impact portal (http://climate4impact.eu)') + set_globattr.comment(sim, indice_name) def set_variable_metadata(self, variable): sim = NcAttributesSimulator(variable.attrs) _icclim_function_map[self.key]['meta'](sim) # update the variable's units from the metadata as this is modified inside ICCLIM variable.units = variable.attrs['units'] - + + @classmethod + def validate_icclim(cls, ops): + """ + :type ops: :class:`ocgis.OcgOperations` + """ + pass + @staticmethod def _get_json_string_(dct): - ''' - Prepare a dictionary for conversion to JSON. The serializer does not - understand NumPy types so those must be converted to native Python types - first. - ''' + """ + Prepare a dictionary for conversion to JSON. The serializer does not understand NumPy types so those must be + converted to native Python types first. + """ + dct = deepcopy(dct) - for k,v in dct.iteritems(): + for k, v in dct.iteritems(): try: v = v.tolist() except AttributeError: pass dct[k] = v - return(json.dumps(dct)) - - @staticmethod - def validate_icclim(klass,ops): - should_raise = False - allowed = [set(_) for _ in klass._allowed_temporal_groupings] - try: - if set(ops.calc_grouping) not in allowed: - should_raise = True - except TypeError: - ## this is a seasonal grouping - should_raise = True - if should_raise: - msg = 'The following temporal groupings are supported for ICCLIM: {0}. '.format(klass._allowed_temporal_groupings) - msg += 'The requested temporal group is: {0}.'.format(ops.calc_grouping) - raise(DefinitionValidationError(CalcGrouping,msg)) - - -class AbstractIcclimUnivariateSetFunction(AbstractIcclimFunction,AbstractUnivariateSetFunction): + return json.dumps(dct) + + +class AbstractIcclimUnivariateSetFunction(AbstractIcclimFunction, AbstractUnivariateSetFunction): __metaclass__ = abc.ABCMeta - - def calculate(self,values): + + def calculate(self, values): return self._get_icclim_func_()(values, values.fill_value) - + @classmethod - def validate(cls,ops): - cls.validate_icclim(cls, ops) + def validate(cls, ops): + cls.validate_icclim(ops) super(AbstractIcclimUnivariateSetFunction, cls).validate(ops) def _get_icclim_func_(self): return _icclim_function_map[self.key]['func'] - - -class AbstractIcclimMultivariateFunction(AbstractIcclimFunction,AbstractMultivariateFunction): + + +class AbstractIcclimMultivariateFunction(AbstractIcclimFunction, AbstractMultivariateFunction): __metaclass__ = abc.ABCMeta - + @classmethod - def validate(cls,ops): - cls.validate_icclim(cls,ops) - super(AbstractIcclimMultivariateFunction,cls).validate(ops) + def validate(cls, ops): + cls.validate_icclim(ops) + super(AbstractIcclimMultivariateFunction, cls).validate(ops) class AbstractIcclimPercentileIndice(AbstractIcclimUnivariateSetFunction, AbstractParameterizedFunction): @@ -201,8 +197,8 @@ def __init__(self, *args, **kwargs): AbstractIcclimUnivariateSetFunction.__init__(self, *args, **kwargs) if self.field is not None: - assert(self.field.shape[0] == 1) - assert(self.field.shape[2] == 1) + assert (self.field.shape[0] == 1) + assert (self.field.shape[2] == 1) @abc.abstractproperty def percentile(self): @@ -211,7 +207,6 @@ def percentile(self): :type: int """ - pass def calculate(self, values, percentile_dict=None): @@ -222,7 +217,7 @@ def calculate(self, values, percentile_dict=None): except KeyError: variable = self.field.variables[self._curr_variable.alias] value = variable.value[0, :, 0, :, :] - assert(value.ndim == 3) + assert (value.ndim == 3) percentile_dict = get_percentile_dict(value, self.field.temporal.value_datetime, self.percentile, self.window_width, only_leap_years=self.only_leap_years) self._storage_percentile_dict[self._curr_variable.alias] = percentile_dict @@ -249,12 +244,12 @@ class IcclimTN(IcclimTG): class IcclimTX(IcclimTG): key = 'icclim_TX' - - + + class IcclimTXx(IcclimTG): key = 'icclim_TXx' - - + + class IcclimTXn(IcclimTG): key = 'icclim_TXn' @@ -291,39 +286,39 @@ class IcclimID(IcclimCSU): class IcclimHD17(IcclimTG): dtype = constants.NP_FLOAT key = 'icclim_HD17' - required_units = ['K','kelvin'] + required_units = ['K', 'kelvin'] class IcclimGD4(IcclimTG): dtype = constants.NP_FLOAT key = 'icclim_GD4' - required_units = ['K','kelvin'] + required_units = ['K', 'kelvin'] class IcclimSU(IcclimCSU): dtype = constants.NP_INT key = 'icclim_SU' - required_units = ['K','kelvin'] - - + required_units = ['K', 'kelvin'] + + class IcclimDTR(AbstractIcclimMultivariateFunction): key = 'icclim_DTR' dtype = constants.NP_FLOAT - required_variables = ['tasmin','tasmax'] + required_variables = ['tasmin', 'tasmax'] time_aggregation_external = False - - def calculate(self,tasmax=None,tasmin=None): - ret = _icclim_function_map[self.key]['func'](tasmax,tasmin,tasmax.fill_value,tasmin.fill_value) - ## convert output to a masked array + + def calculate(self, tasmax=None, tasmin=None): + ret = _icclim_function_map[self.key]['func'](tasmax, tasmin, tasmax.fill_value, tasmin.fill_value) + # convert output to a masked array ret_mask = ret == tasmax.fill_value - ret = np.ma.array(ret,mask=ret_mask,fill_value=tasmax.fill_value) - return(ret) - + ret = np.ma.array(ret, mask=ret_mask, fill_value=tasmax.fill_value) + return ret + class IcclimETR(IcclimDTR): key = 'icclim_ETR' - - + + class IcclimvDTR(IcclimDTR): key = 'icclim_vDTR' @@ -504,8 +499,8 @@ def _calculate_(self, tas=None, pr=None, tas_percentile_dict=None, pr_percentile Allows subclasses to overload parameter definitions for `calculate`. """ - assert(tas.ndim == 3) - assert(pr.ndim == 3) + assert (tas.ndim == 3) + assert (pr.ndim == 3) try: dt_arr = self.field.temporal.value_datetime[self._curr_group] @@ -525,7 +520,8 @@ def _calculate_(self, tas=None, pr=None, tas_percentile_dict=None, pr_percentile alias_pr = self.parms['pr'] t_arr_perc = self.field.variables[alias_tas].value.squeeze() p_arr_perc = self.field.variables[alias_pr].value.squeeze() - tas_percentile_dict = get_percentile_dict(t_arr_perc, dt_arr_perc, self.percentile_tas, self.window_width) + tas_percentile_dict = get_percentile_dict(t_arr_perc, dt_arr_perc, self.percentile_tas, + self.window_width) pr_percentile_dict = get_percentile_dict(p_arr_perc, dt_arr_perc, self.percentile_pr, self.window_width) self._storage_percentile_dict['tas'] = tas_percentile_dict self._storage_percentile_dict['pr'] = pr_percentile_dict diff --git a/src/ocgis/test/test_misc/test_dependency_versions.py b/src/ocgis/test/test_misc/test_dependency_versions.py index 44267df31..d9c6e8b8e 100644 --- a/src/ocgis/test/test_misc/test_dependency_versions.py +++ b/src/ocgis/test/test_misc/test_dependency_versions.py @@ -1,8 +1,10 @@ -import fiona import numpy +import netCDF4 + +import fiona import osgeo import shapely -import netCDF4 + from ocgis.test.base import TestBase @@ -10,15 +12,22 @@ class TestVersions(TestBase): def test_cfunits(self): import cfunits + self.assertEqual(cfunits.__version__, '0.9.6') def test_esmf(self): import ESMF + self.assertEqual(ESMF.__release__, 'ESMF_6_3_0rp1') def test_fiona(self): self.assertEqual(fiona.__version__, '1.4.5') + def test_icclim(self): + import icclim + + self.assertEqual(icclim.__version__, '3.0') + def test_netCDF4(self): self.assertEqual(netCDF4.__version__, '1.1.1') @@ -30,6 +39,7 @@ def test_osgeo(self): def test_rtree(self): import rtree + self.assertEqual(rtree.__version__, '0.8.0') def test_shapely(self): diff --git a/src/ocgis/test/test_ocgis/test_calc/test_temporal_groups.py b/src/ocgis/test/test_ocgis/test_calc/test_temporal_groups.py new file mode 100644 index 000000000..ab593a95f --- /dev/null +++ b/src/ocgis/test/test_ocgis/test_calc/test_temporal_groups.py @@ -0,0 +1,19 @@ +from ocgis.calc.temporal_groups import SeasonalTemporalGroup, AbstractTemporalGroup +from ocgis.test.base import TestBase + + +class TestSeasonalTemporalGroup(TestBase): + def test_init(self): + actual = [[12, 1, 2], [3, 4, 5], 'unique'] + st = SeasonalTemporalGroup(actual) + self.assertIsInstances(st, [list, AbstractTemporalGroup]) + self.assertEqual(st, actual) + + def test_icclim_mode(self): + actual = [[12, 1, 2], [3, 4, 5], 'unique'] + st = SeasonalTemporalGroup(actual) + self.assertEqual(st.icclim_mode, 'DJF-MAM (unique)') + + actual = [[12, 1, 2], [3, 4, 5]] + st = SeasonalTemporalGroup(actual) + self.assertEqual(st.icclim_mode, 'DJF-MAM') diff --git a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py index 372093152..d1422fd40 100644 --- a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py +++ b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py @@ -1,14 +1,13 @@ from netCDF4 import date2num -import unittest import json from collections import OrderedDict from copy import deepcopy -# noinspection PyUnresolvedReferences -from datetime import datetime - from numpy.ma import MaskedArray import numpy as np +from datetime import datetime +from ocgis.calc.temporal_groups import SeasonalTemporalGroup +from ocgis.interface.base.dimension.temporal import TemporalDimension from ocgis.calc.base import AbstractParameterizedFunction from ocgis.interface.base.variable import Variable, VariableCollection from ocgis.interface.base.field import Field @@ -31,46 +30,76 @@ from ocgis.util.large_array import compute +class FakeAbstractIcclimFunction(AbstractIcclimFunction): + key = 'icclim_fillme' + + def __init__(self, field, tgd): + self.field = field + self.tgd = tgd + + +class TestAbstractIcclimFunction(TestBase): + def setUp(self): + FakeAbstractIcclimFunction.key = 'icclim_TG' + super(TestAbstractIcclimFunction, self).setUp() + + def tearDown(self): + FakeAbstractIcclimFunction.key = 'icclim_fillme' + super(TestAbstractIcclimFunction, self).tearDown() + + def get(self): + field = self.get_field() + temporal = TemporalDimension(value=self.get_time_series(datetime(2000, 1, 1), datetime(2001, 12, 31))) + grouping = [[12, 1, 2]] + tgd = temporal.get_grouping(grouping) + aa = FakeAbstractIcclimFunction(field, tgd) + return aa + + def test_init(self): + f = self.get() + self.assertIsInstance(f, AbstractIcclimFunction) + + def test_set_field_metadata(self): + aa = self.get() + aa.set_field_metadata() + self.assertIn(SeasonalTemporalGroup(aa.tgd.grouping).icclim_mode, aa.field.attrs['history']) + + class TestLibraryIcclim(TestBase): - - def test_standard_AbstractIcclimFunction(self): - shapes = ([('month',), 12],[('month', 'year'), 24],[('year',),2]) + @attr('slow') + def test_icclim_combinatorial(self): + shapes = ([('month',), 12], [('month', 'year'), 24], [('year',), 2]) ocgis.env.OVERWRITE = True keys = set(library_icclim._icclim_function_map.keys()) ignore = [AbstractIcclimPercentileIndice] - for klass in [ - AbstractIcclimUnivariateSetFunction, - AbstractIcclimMultivariateFunction]: + for klass in [AbstractIcclimUnivariateSetFunction, AbstractIcclimMultivariateFunction]: for subclass in itersubclasses(klass): if any([subclass == i for i in ignore]): continue keys.remove(subclass.key) - self.assertEqual([('month',),('month','year'),('year',)],subclass._allowed_temporal_groupings) + self.assertEqual([('month',), ('month', 'year'), ('year',)], subclass._allowed_temporal_groupings) for cg in CalcGrouping.iter_possible(): - calc = [{'func':subclass.key,'name':subclass.key.split('_')[1]}] + calc = [{'func': subclass.key, 'name': subclass.key.split('_')[1]}] if klass == AbstractIcclimUnivariateSetFunction: rd = self.test_data.get_rd('cancm4_tas') - rd.time_region = {'year':[2001,2002]} - calc = [{'func':subclass.key,'name':subclass.key.split('_')[1]}] + rd.time_region = {'year': [2001, 2002]} + calc = [{'func': subclass.key, 'name': subclass.key.split('_')[1]}] else: tasmin = self.test_data.get_rd('cancm4_tasmin_2001') tasmax = self.test_data.get_rd('cancm4_tasmax_2001') - rd = [tasmin,tasmax] + rd = [tasmin, tasmax] for r in rd: - r.time_region = {'year':[2001,2002]} + r.time_region = {'year': [2001, 2002]} if subclass == IcclimCD or issubclass(subclass, IcclimCD): kwds = {'tas': 'tasmax', 'pr': 'tasmin'} else: kwds = {'tasmin': 'tasmin', 'tasmax': 'tasmax'} calc[0].update({'kwds': kwds}) try: - ops = ocgis.OcgOperations(dataset=rd, - output_format='nc', - calc=calc, - calc_grouping=cg, - geom=[3.39,40.62,10.54,52.30]) + ops = ocgis.OcgOperations(dataset=rd, output_format='nc', calc=calc, calc_grouping=cg, + geom=[3.39, 40.62, 10.54, 52.30]) ret = ops.execute() to_test = None for shape in shapes: @@ -78,36 +107,37 @@ def test_standard_AbstractIcclimFunction(self): to_test = shape[1] with nc_scope(ret) as ds: var = ds.variables[calc[0]['name']] - self.assertEqual(var.dtype,subclass.dtype) - self.assertEqual(var.shape,(to_test,5,4)) + self.assertEqual(var.dtype, subclass.dtype) + if to_test is not None: + self.assertEqual(var.shape, (to_test, 5, 4)) except DefinitionValidationError as e: - if e.message.startswith('''OcgOperations validation raised an exception on the argument/operation "calc_grouping" with the message: The following temporal groupings are supported for ICCLIM: [('month',), ('month', 'year'), ('year',)]. The requested temporal group is:'''): + msg = '''OcgOperations validation raised an exception on the argument/operation "calc_grouping" with the message: The following temporal groupings are supported for ICCLIM: [('month',), ('month', 'year'), ('year',)]. The requested temporal group is:''' + if e.message.startswith(msg): pass else: - raise(e) - self.assertEqual(len(keys),0) - + raise e + self.assertEqual(len(keys), 0) + def test_register_icclim(self): fr = FunctionRegistry() - self.assertNotIn('icclim_TG',fr) + self.assertNotIn('icclim_TG', fr) register_icclim(fr) - self.assertIn('icclim_TG',fr) - self.assertIn('icclim_vDTR',fr) - + self.assertIn('icclim_TG', fr) + self.assertIn('icclim_vDTR', fr) + def test_calc_argument_to_operations(self): - value = [{'func':'icclim_TG','name':'TG'}] + value = [{'func': 'icclim_TG', 'name': 'TG'}] calc = Calc(value) - self.assertEqual(len(calc.value),1) - self.assertEqual(calc.value[0]['ref'],IcclimTG) - + self.assertEqual(len(calc.value), 1) + self.assertEqual(calc.value[0]['ref'], IcclimTG) + def test_bad_icclim_key_to_operations(self): - value = [{'func':'icclim_TG_bad','name':'TG'}] + value = [{'func': 'icclim_TG_bad', 'name': 'TG'}] with self.assertRaises(DefinitionValidationError): Calc(value) class TestCD(TestBase): - def get_field_tdim(self): np.random.seed(1) start = datetime(2000, 1, 1) @@ -191,7 +221,6 @@ def test_operations(self): class TestTG10p(TestBase): - def test_init(self): tg = IcclimTG10p() @@ -222,7 +251,8 @@ def test_large_array_compute_local(self): calc = [{'func': 'icclim_TG10p', 'name': 'itg'}] calc_grouping = ['month'] rd = self.test_data.get_rd('cancm4_tas') - ops = ocgis.OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping, output_format='nc', geom='state_boundaries', + ops = ocgis.OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping, output_format='nc', + geom='state_boundaries', select_ugid=[24]) ret = compute(ops, 5, verbose=False) @@ -230,7 +260,9 @@ def test_large_array_compute_local(self): try: self.assertAlmostEqual(ds.variables['itg'][:].mean(), np.float32(29.518518)) except Exception as e: - import ipdb;ipdb.set_trace() + import ipdb; + + ipdb.set_trace() pass @attr('remote') @@ -241,162 +273,159 @@ def test_large_array_compute_remote(self): calc_grouping = ['month'] uri = 'http://opendap.knmi.nl/knmi/thredds/dodsC/IS-ENES/TESTSETS/tasmax_day_EC-EARTH_rcp26_r8i1p1_20760101-21001231.nc' rd = ocgis.RequestDataset(uri=uri, variable='tasmax') - ops = ocgis.OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping, output_format='nc', geom='state_boundaries', + ops = ocgis.OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping, output_format='nc', + geom='state_boundaries', select_ugid=[24]) ret = compute(ops, 5, verbose=False) with nc_scope(ret) as ds: self.assertAlmostEqual(ds.variables['itg'][:].mean(), 78.113095238095241) - + class TestDTR(TestBase): - def test_calculate(self): tasmin = self.test_data.get_rd('cancm4_tasmin_2001') tasmax = self.test_data.get_rd('cancm4_tasmax_2001') field = tasmin.get() field.variables.add_variable(deepcopy(tasmax.get().variables['tasmax']), assign_new_uid=True) - field = field[:,0:600,:,25:50,25:50] + field = field[:, 0:600, :, 25:50, 25:50] tgd = field.temporal.get_grouping(['month']) - dtr = IcclimDTR(field=field,tgd=tgd) + dtr = IcclimDTR(field=field, tgd=tgd) ret = dtr.execute() - self.assertEqual(ret['icclim_DTR'].value.shape,(1, 12, 1, 25, 25)) - + self.assertEqual(ret['icclim_DTR'].value.shape, (1, 12, 1, 25, 25)) + def test_bad_keyword_mapping(self): tasmin = self.test_data.get_rd('cancm4_tasmin_2001') tas = self.test_data.get_rd('cancm4_tas') - rds = [tasmin,tas] - calc = [{'func':'icclim_DTR','name':'DTR','kwds':{'tas':'tasmin','tasmax':'tasmax'}}] + rds = [tasmin, tas] + calc = [{'func': 'icclim_DTR', 'name': 'DTR', 'kwds': {'tas': 'tasmin', 'tasmax': 'tasmax'}}] with self.assertRaises(DefinitionValidationError): - ocgis.OcgOperations(dataset=rds,calc=calc,calc_grouping=['month'], + ocgis.OcgOperations(dataset=rds, calc=calc, calc_grouping=['month'], output_format='nc') - - calc = [{'func':'icclim_DTR','name':'DTR'}] + + calc = [{'func': 'icclim_DTR', 'name': 'DTR'}] with self.assertRaises(DefinitionValidationError): - ocgis.OcgOperations(dataset=rds,calc=calc,calc_grouping=['month'], + ocgis.OcgOperations(dataset=rds, calc=calc, calc_grouping=['month'], output_format='nc') - + def test_calculation_operations(self): - ## note the kwds must contain a map of the required variables to their - ## associated aliases. - calc = [{'func':'icclim_DTR','name':'DTR','kwds':{'tasmin':'tasmin','tasmax':'tasmax'}}] + # # note the kwds must contain a map of the required variables to their + # # associated aliases. + calc = [{'func': 'icclim_DTR', 'name': 'DTR', 'kwds': {'tasmin': 'tasmin', 'tasmax': 'tasmax'}}] tasmin = self.test_data.get_rd('cancm4_tasmin_2001') - tasmin.time_region = {'year':[2002]} + tasmin.time_region = {'year': [2002]} tasmax = self.test_data.get_rd('cancm4_tasmax_2001') - tasmax.time_region = {'year':[2002]} - rds = [tasmin,tasmax] - ops = ocgis.OcgOperations(dataset=rds,calc=calc,calc_grouping=['month'], + tasmax.time_region = {'year': [2002]} + rds = [tasmin, tasmax] + ops = ocgis.OcgOperations(dataset=rds, calc=calc, calc_grouping=['month'], output_format='nc') ops.execute() class TestETR(TestBase): - def test_calculate(self): tasmin = self.test_data.get_rd('cancm4_tasmin_2001') tasmax = self.test_data.get_rd('cancm4_tasmax_2001') field = tasmin.get() field.variables.add_variable(tasmax.get().variables['tasmax'], assign_new_uid=True) - field = field[:,0:600,:,25:50,25:50] + field = field[:, 0:600, :, 25:50, 25:50] tgd = field.temporal.get_grouping(['month']) - dtr = IcclimETR(field=field,tgd=tgd) + dtr = IcclimETR(field=field, tgd=tgd) ret = dtr.execute() - self.assertEqual(ret['icclim_ETR'].value.shape,(1, 12, 1, 25, 25)) - + self.assertEqual(ret['icclim_ETR'].value.shape, (1, 12, 1, 25, 25)) + def test_calculate_rotated_pole(self): tasmin_fake = self.test_data.get_rd('rotated_pole_ichec') tasmin_fake.alias = 'tasmin' tasmax_fake = deepcopy(tasmin_fake) tasmax_fake.alias = 'tasmax' - rds = [tasmin_fake,tasmax_fake] + rds = [tasmin_fake, tasmax_fake] for rd in rds: - rd.time_region = {'year':[1973]} - calc_ETR = [{'func':'icclim_ETR','name':'ETR','kwds':{'tasmin':'tasmin','tasmax':'tasmax'}}] - ops = ocgis.OcgOperations(dataset=[tasmin_fake,tasmax_fake], - calc=calc_ETR, - calc_grouping=['year', 'month'], - prefix = 'ETR_ocgis_icclim', - output_format = 'nc', - add_auxiliary_files=False) + rd.time_region = {'year': [1973]} + calc_ETR = [{'func': 'icclim_ETR', 'name': 'ETR', 'kwds': {'tasmin': 'tasmin', 'tasmax': 'tasmax'}}] + ops = ocgis.OcgOperations(dataset=[tasmin_fake, tasmax_fake], + calc=calc_ETR, + calc_grouping=['year', 'month'], + prefix='ETR_ocgis_icclim', + output_format='nc', + add_auxiliary_files=False) with nc_scope(ops.execute()) as ds: - self.assertEqual(ds.variables['ETR'][:].shape,(12,103,106)) + self.assertEqual(ds.variables['ETR'][:].shape, (12, 103, 106)) class TestTx(TestBase): - def test_calculate_operations(self): rd = self.test_data.get_rd('cancm4_tas') - slc = [None,None,None,[0,10],[0,10]] - calc_icclim = [{'func':'icclim_TG','name':'TG'}] - calc_ocgis = [{'func':'mean','name':'mean'}] - _calc_grouping = [['month'],['month','year']] + slc = [None, None, None, [0, 10], [0, 10]] + calc_icclim = [{'func': 'icclim_TG', 'name': 'TG'}] + calc_ocgis = [{'func': 'mean', 'name': 'mean'}] + _calc_grouping = [['month'], ['month', 'year']] for cg in _calc_grouping: - ops_ocgis = OcgOperations(calc=calc_ocgis,calc_grouping=cg,slice=slc, - dataset=rd) + ops_ocgis = OcgOperations(calc=calc_ocgis, calc_grouping=cg, slice=slc, dataset=rd) ret_ocgis = ops_ocgis.execute() - ops_icclim = OcgOperations(calc=calc_icclim,calc_grouping=cg,slice=slc, - dataset=rd) + ops_icclim = OcgOperations(calc=calc_icclim, calc_grouping=cg, slice=slc, dataset=rd) ret_icclim = ops_icclim.execute() - self.assertNumpyAll(ret_ocgis[1]['tas'].variables['mean'].value, - ret_icclim[1]['tas'].variables['TG'].value) - + self.assertNumpyAll(ret_ocgis[1]['tas'].variables['mean'].value, ret_icclim[1]['tas'].variables['TG'].value) + def test_calculation_operations_to_nc(self): rd = self.test_data.get_rd('cancm4_tas') - slc = [None,None,None,[0,10],[0,10]] - ops_ocgis = OcgOperations(calc=[{'func':'icclim_TG','name':'TG'}], + slc = [None, None, None, [0, 10], [0, 10]] + ops_ocgis = OcgOperations(calc=[{'func': 'icclim_TG', 'name': 'TG'}], calc_grouping=['month'], slice=slc, dataset=rd, output_format='nc') ret = ops_ocgis.execute() with nc_scope(ret) as ds: - self.assertIn('Calculation of TG indice (monthly climatology)',ds.history) - self.assertEqual(ds.title,'ECA temperature indice TG') + self.assertIn('Calculation of TG indice (monthly climatology)', ds.history) + self.assertEqual(ds.title, 'ECA temperature indice TG') var = ds.variables['TG'] - ## check the JSON serialization - self.assertEqual(ds.__dict__[AbstractIcclimFunction._global_attribute_source_name], - u'{"institution": "CCCma (Canadian Centre for Climate Modelling and Analysis, Victoria, BC, Canada)", "institute_id": "CCCma", "experiment_id": "decadal2000", "source": "CanCM4 2010 atmosphere: CanAM4 (AGCM15i, T63L35) ocean: CanOM4 (OGCM4.0, 256x192L40) sea ice: CanSIM1 (Cavitating Fluid, T63 Gaussian Grid) land: CLASS2.7", "model_id": "CanCM4", "forcing": "GHG,Oz,SA,BC,OC,LU,Sl,Vl (GHG includes CO2,CH4,N2O,CFC11,effective CFC12)", "parent_experiment_id": "N/A", "parent_experiment_rip": "N/A", "branch_time": 0.0, "contact": "cccma_info@ec.gc.ca", "references": "http://www.cccma.ec.gc.ca/models", "initialization_method": 1, "physics_version": 1, "tracking_id": "fac7bd83-dd7a-425b-b4dc-b5ab2e915939", "branch_time_YMDH": "2001:01:01:00", "CCCma_runid": "DHFP1B_E002_I2001_M01", "CCCma_parent_runid": "DHFP1_E002", "CCCma_data_licence": "1) GRANT OF LICENCE - The Government of Canada (Environment Canada) is the \\nowner of all intellectual property rights (including copyright) that may exist in this Data \\nproduct. You (as \\"The Licensee\\") are hereby granted a non-exclusive, non-assignable, \\nnon-transferable unrestricted licence to use this data product for any purpose including \\nthe right to share these data with others and to make value-added and derivative \\nproducts from it. This licence is not a sale of any or all of the owner\'s rights.\\n2) NO WARRANTY - This Data product is provided \\"as-is\\"; it has not been designed or \\nprepared to meet the Licensee\'s particular requirements. Environment Canada makes no \\nwarranty, either express or implied, including but not limited to, warranties of \\nmerchantability and fitness for a particular purpose. In no event will Environment Canada \\nbe liable for any indirect, special, consequential or other damages attributed to the \\nLicensee\'s use of the Data product.", "product": "output", "experiment": "10- or 30-year run initialized in year 2000", "frequency": "day", "creation_date": "2011-05-08T01:01:51Z", "history": "2011-05-08T01:01:51Z CMOR rewrote data to comply with CF standards and CMIP5 requirements.", "Conventions": "CF-1.4", "project_id": "CMIP5", "table_id": "Table day (28 March 2011) f9d6cfec5981bb8be1801b35a81002f0", "title": "CanCM4 model output prepared for CMIP5 10- or 30-year run initialized in year 2000", "parent_experiment": "N/A", "modeling_realm": "atmos", "realization": 2, "cmor_version": "2.5.4"}') - ## load the original source attributes from the JSON string + # check the JSON serialization + actual = u'{"institution": "CCCma (Canadian Centre for Climate Modelling and Analysis, Victoria, BC, Canada)", "institute_id": "CCCma", "experiment_id": "decadal2000", "source": "CanCM4 2010 atmosphere: CanAM4 (AGCM15i, T63L35) ocean: CanOM4 (OGCM4.0, 256x192L40) sea ice: CanSIM1 (Cavitating Fluid, T63 Gaussian Grid) land: CLASS2.7", "model_id": "CanCM4", "forcing": "GHG,Oz,SA,BC,OC,LU,Sl,Vl (GHG includes CO2,CH4,N2O,CFC11,effective CFC12)", "parent_experiment_id": "N/A", "parent_experiment_rip": "N/A", "branch_time": 0.0, "contact": "cccma_info@ec.gc.ca", "references": "http://www.cccma.ec.gc.ca/models", "initialization_method": 1, "physics_version": 1, "tracking_id": "fac7bd83-dd7a-425b-b4dc-b5ab2e915939", "branch_time_YMDH": "2001:01:01:00", "CCCma_runid": "DHFP1B_E002_I2001_M01", "CCCma_parent_runid": "DHFP1_E002", "CCCma_data_licence": "1) GRANT OF LICENCE - The Government of Canada (Environment Canada) is the \\nowner of all intellectual property rights (including copyright) that may exist in this Data \\nproduct. You (as \\"The Licensee\\") are hereby granted a non-exclusive, non-assignable, \\nnon-transferable unrestricted licence to use this data product for any purpose including \\nthe right to share these data with others and to make value-added and derivative \\nproducts from it. This licence is not a sale of any or all of the owner\'s rights.\\n2) NO WARRANTY - This Data product is provided \\"as-is\\"; it has not been designed or \\nprepared to meet the Licensee\'s particular requirements. Environment Canada makes no \\nwarranty, either express or implied, including but not limited to, warranties of \\nmerchantability and fitness for a particular purpose. In no event will Environment Canada \\nbe liable for any indirect, special, consequential or other damages attributed to the \\nLicensee\'s use of the Data product.", "product": "output", "experiment": "10- or 30-year run initialized in year 2000", "frequency": "day", "creation_date": "2011-05-08T01:01:51Z", "history": "2011-05-08T01:01:51Z CMOR rewrote data to comply with CF standards and CMIP5 requirements.", "Conventions": "CF-1.4", "project_id": "CMIP5", "table_id": "Table day (28 March 2011) f9d6cfec5981bb8be1801b35a81002f0", "title": "CanCM4 model output prepared for CMIP5 10- or 30-year run initialized in year 2000", "parent_experiment": "N/A", "modeling_realm": "atmos", "realization": 2, "cmor_version": "2.5.4"}' + self.assertEqual(ds.__dict__[AbstractIcclimFunction._global_attribute_source_name], actual) + # load the original source attributes from the JSON string json.loads(ds.__dict__[AbstractIcclimFunction._global_attribute_source_name]) - self.assertEqual(dict(var.__dict__),{'_FillValue':np.float32(1e20),u'units': u'K', 'grid_mapping': 'latitude_longitude', u'standard_name': AbstractIcclimFunction.standard_name, u'long_name': u'Mean of daily mean temperature'}) + actual = {'_FillValue': np.float32(1e20), u'units': u'K', 'grid_mapping': 'latitude_longitude', + u'standard_name': AbstractIcclimFunction.standard_name, + u'long_name': u'Mean of daily mean temperature'} + self.assertEqual(dict(var.__dict__), + actual) def test_calculate(self): rd = self.test_data.get_rd('cancm4_tas') field = rd.get() - field = field[:,:,:,0:10,0:10] - klasses = [IcclimTG,IcclimTN,IcclimTX] + field = field[:, :, :, 0:10, 0:10] + klasses = [IcclimTG, IcclimTN, IcclimTX] for klass in klasses: - for calc_grouping in [['month'],['month','year']]: + for calc_grouping in [['month'], ['month', 'year']]: tgd = field.temporal.get_grouping(calc_grouping) - itg = klass(field=field,tgd=tgd) + itg = klass(field=field, tgd=tgd) ret_icclim = itg.execute() - mean = Mean(field=field,tgd=tgd) + mean = Mean(field=field, tgd=tgd) ret_ocgis = mean.execute() - self.assertNumpyAll(ret_icclim[klass.key].value, - ret_ocgis['mean'].value) + self.assertNumpyAll(ret_icclim[klass.key].value, ret_ocgis['mean'].value) class TestSU(TestBase): - def test_calculate(self): rd = self.test_data.get_rd('cancm4_tasmax_2011') field = rd.get() - field = field[:,:,:,0:10,0:10] - for calc_grouping in [['month'],['month','year']]: + field = field[:, :, :, 0:10, 0:10] + for calc_grouping in [['month'], ['month', 'year']]: tgd = field.temporal.get_grouping(calc_grouping) - itg = IcclimSU(field=field,tgd=tgd) + itg = IcclimSU(field=field, tgd=tgd) ret_icclim = itg.execute() - threshold = Threshold(field=field,tgd=tgd,parms={'threshold':298.15,'operation':'gt'}) + threshold = Threshold(field=field, tgd=tgd, parms={'threshold': 298.15, 'operation': 'gt'}) ret_ocgis = threshold.execute() - self.assertNumpyAll(ret_icclim['icclim_SU'].value,ret_ocgis['threshold'].value) - + self.assertNumpyAll(ret_icclim['icclim_SU'].value, ret_ocgis['threshold'].value) + def test_calculation_operations_bad_units(self): rd = self.test_data.get_rd('daymet_tmax') - calc_icclim = [{'func':'icclim_SU','name':'SU'}] - ops_icclim = OcgOperations(calc=calc_icclim,calc_grouping=['month','year'],dataset=rd) + calc_icclim = [{'func': 'icclim_SU', 'name': 'SU'}] + ops_icclim = OcgOperations(calc=calc_icclim, calc_grouping=['month', 'year'], dataset=rd) with self.assertRaises(UnitsValidationError): ops_icclim.execute() - + def test_calculation_operations_to_nc(self): rd = self.test_data.get_rd('cancm4_tasmax_2011') slc = [None, None, None, [0, 10], [0, 10]] @@ -408,61 +437,55 @@ def test_calculation_operations_to_nc(self): history = to_test.pop('history') self.assertEqual(history[111:187], ' Calculation of SU indice (monthly climatology) from 2011-1-1 to 2020-12-31.') - self.assertDictEqual(to_test, OrderedDict([(u'source_data_global_attributes', - u'{"institution": "CCCma (Canadian Centre for Climate Modelling and Analysis, Victoria, BC, Canada)", "institute_id": "CCCma", "experiment_id": "decadal2010", "source": "CanCM4 2010 atmosphere: CanAM4 (AGCM15i, T63L35) ocean: CanOM4 (OGCM4.0, 256x192L40) sea ice: CanSIM1 (Cavitating Fluid, T63 Gaussian Grid) land: CLASS2.7", "model_id": "CanCM4", "forcing": "GHG,Oz,SA,BC,OC,LU,Sl,Vl (GHG includes CO2,CH4,N2O,CFC11,effective CFC12)", "parent_experiment_id": "N/A", "parent_experiment_rip": "N/A", "branch_time": 0.0, "contact": "cccma_info@ec.gc.ca", "references": "http://www.cccma.ec.gc.ca/models", "initialization_method": 1, "physics_version": 1, "tracking_id": "64384802-3f0f-4ab4-b569-697bd5430854", "branch_time_YMDH": "2011:01:01:00", "CCCma_runid": "DHFP1B_E002_I2011_M01", "CCCma_parent_runid": "DHFP1_E002", "CCCma_data_licence": "1) GRANT OF LICENCE - The Government of Canada (Environment Canada) is the \\nowner of all intellectual property rights (including copyright) that may exist in this Data \\nproduct. You (as \\"The Licensee\\") are hereby granted a non-exclusive, non-assignable, \\nnon-transferable unrestricted licence to use this data product for any purpose including \\nthe right to share these data with others and to make value-added and derivative \\nproducts from it. This licence is not a sale of any or all of the owner\'s rights.\\n2) NO WARRANTY - This Data product is provided \\"as-is\\"; it has not been designed or \\nprepared to meet the Licensee\'s particular requirements. Environment Canada makes no \\nwarranty, either express or implied, including but not limited to, warranties of \\nmerchantability and fitness for a particular purpose. In no event will Environment Canada \\nbe liable for any indirect, special, consequential or other damages attributed to the \\nLicensee\'s use of the Data product.", "product": "output", "experiment": "10- or 30-year run initialized in year 2010", "frequency": "day", "creation_date": "2012-03-28T15:32:08Z", "history": "2012-03-28T15:32:08Z CMOR rewrote data to comply with CF standards and CMIP5 requirements.", "Conventions": "CF-1.4", "project_id": "CMIP5", "table_id": "Table day (28 March 2011) f9d6cfec5981bb8be1801b35a81002f0", "title": "CanCM4 model output prepared for CMIP5 10- or 30-year run initialized in year 2010", "parent_experiment": "N/A", "modeling_realm": "atmos", "realization": 2, "cmor_version": "2.8.0"}'), - (u'title', u'ECA heat indice SU'), ( - u'references', u'ATBD of the ECA indices calculation (http://eca.knmi.nl/documents/atbd.pdf)'), ( - u'institution', - u'Climate impact portal (http://climate4impact.eu)'), - (u'comment', u' ')])) + actual = OrderedDict([(u'source_data_global_attributes', + u'{"institution": "CCCma (Canadian Centre for Climate Modelling and Analysis, Victoria, BC, Canada)", "institute_id": "CCCma", "experiment_id": "decadal2010", "source": "CanCM4 2010 atmosphere: CanAM4 (AGCM15i, T63L35) ocean: CanOM4 (OGCM4.0, 256x192L40) sea ice: CanSIM1 (Cavitating Fluid, T63 Gaussian Grid) land: CLASS2.7", "model_id": "CanCM4", "forcing": "GHG,Oz,SA,BC,OC,LU,Sl,Vl (GHG includes CO2,CH4,N2O,CFC11,effective CFC12)", "parent_experiment_id": "N/A", "parent_experiment_rip": "N/A", "branch_time": 0.0, "contact": "cccma_info@ec.gc.ca", "references": "http://www.cccma.ec.gc.ca/models", "initialization_method": 1, "physics_version": 1, "tracking_id": "64384802-3f0f-4ab4-b569-697bd5430854", "branch_time_YMDH": "2011:01:01:00", "CCCma_runid": "DHFP1B_E002_I2011_M01", "CCCma_parent_runid": "DHFP1_E002", "CCCma_data_licence": "1) GRANT OF LICENCE - The Government of Canada (Environment Canada) is the \\nowner of all intellectual property rights (including copyright) that may exist in this Data \\nproduct. You (as \\"The Licensee\\") are hereby granted a non-exclusive, non-assignable, \\nnon-transferable unrestricted licence to use this data product for any purpose including \\nthe right to share these data with others and to make value-added and derivative \\nproducts from it. This licence is not a sale of any or all of the owner\'s rights.\\n2) NO WARRANTY - This Data product is provided \\"as-is\\"; it has not been designed or \\nprepared to meet the Licensee\'s particular requirements. Environment Canada makes no \\nwarranty, either express or implied, including but not limited to, warranties of \\nmerchantability and fitness for a particular purpose. In no event will Environment Canada \\nbe liable for any indirect, special, consequential or other damages attributed to the \\nLicensee\'s use of the Data product.", "product": "output", "experiment": "10- or 30-year run initialized in year 2010", "frequency": "day", "creation_date": "2012-03-28T15:32:08Z", "history": "2012-03-28T15:32:08Z CMOR rewrote data to comply with CF standards and CMIP5 requirements.", "Conventions": "CF-1.4", "project_id": "CMIP5", "table_id": "Table day (28 March 2011) f9d6cfec5981bb8be1801b35a81002f0", "title": "CanCM4 model output prepared for CMIP5 10- or 30-year run initialized in year 2010", "parent_experiment": "N/A", "modeling_realm": "atmos", "realization": 2, "cmor_version": "2.8.0"}'), + (u'title', u'ECA heat indice SU'), ( + u'references', u'ATBD of the ECA indices calculation (http://eca.knmi.nl/documents/atbd.pdf)'), + (u'institution', u'Climate impact portal (http://climate4impact.eu)'), + (u'comment', u' ')]) + self.assertDictEqual(to_test, actual) var = ds.variables['SU'] to_test = dict(var.__dict__) self.assertEqual(to_test, {'_FillValue': 999999, u'units': u'days', u'standard_name': AbstractIcclimFunction.standard_name, u'long_name': 'Summer days (number of days where daily maximum temperature > 25 degrees)', 'grid_mapping': 'latitude_longitude'}) - + @attr('remote') def test_calculate_opendap(self): - ## test against an opendap target ensuring icclim and ocgis operations - ## are equivalent in the netcdf output + # test against an opendap target ensuring icclim and ocgis operations are equivalent in the netcdf output url = 'http://opendap.nmdc.eu/knmi/thredds/dodsC/IS-ENES/TESTSETS/tasmax_day_EC-EARTH_rcp26_r8i1p1_20760101-21001231.nc' calc_grouping = ['month'] - rd = ocgis.RequestDataset(uri=url,variable='tasmax') - - calc_icclim = [{'func':'icclim_SU','name':'SU'}] - ops = ocgis.OcgOperations(dataset=rd,calc=calc_icclim,calc_grouping=calc_grouping, - output_format='nc',geom='state_boundaries',select_ugid=[10], + rd = ocgis.RequestDataset(uri=url, variable='tasmax') + + calc_icclim = [{'func': 'icclim_SU', 'name': 'SU'}] + ops = ocgis.OcgOperations(dataset=rd, calc=calc_icclim, calc_grouping=calc_grouping, + output_format='nc', geom='state_boundaries', select_ugid=[10], prefix='icclim') ret_icclim = ops.execute() - - calc_ocgis = [{'func':'threshold','name':'SU','kwds':{'threshold':298.15,'operation':'gt'}}] - ops = ocgis.OcgOperations(dataset=rd,calc=calc_ocgis,calc_grouping=calc_grouping, - output_format='nc',geom='state_boundaries',select_ugid=[10], + + calc_ocgis = [{'func': 'threshold', 'name': 'SU', 'kwds': {'threshold': 298.15, 'operation': 'gt'}}] + ops = ocgis.OcgOperations(dataset=rd, calc=calc_ocgis, calc_grouping=calc_grouping, + output_format='nc', geom='state_boundaries', select_ugid=[10], prefix='ocgis') ret_ocgis = ops.execute() - - ## variable and datasets will have different attributes, so adjust those - ## before testing if the netCDFs are equal... - with nc_scope(ret_icclim,'r') as ds_icclim: - with nc_scope(ret_ocgis,'a') as ds_ocgis: - ## strip the current attributes + + # variable and datasets will have different attributes, so adjust those before testing if the netCDFs are + # equal... + with nc_scope(ret_icclim, 'r') as ds_icclim: + with nc_scope(ret_ocgis, 'a') as ds_ocgis: + # strip the current attributes for key in ds_ocgis.ncattrs(): ds_ocgis.delncattr(key) for key in ds_ocgis.variables['SU'].ncattrs(): if not key.startswith('_'): ds_ocgis.variables['SU'].delncattr(key) - ## make equivalent attributes + # make equivalent attributes for key in ds_icclim.ncattrs(): - setattr(ds_ocgis,key,getattr(ds_icclim,key)) - ## update the target variable attributes - for key,value in ds_icclim.variables['SU'].__dict__.iteritems(): + setattr(ds_ocgis, key, getattr(ds_icclim, key)) + # update the target variable attributes + for key, value in ds_icclim.variables['SU'].__dict__.iteritems(): if not key.startswith('_'): - setattr(ds_ocgis.variables['SU'],key,value) - - self.assertNcEqual(ret_icclim,ret_ocgis) - + setattr(ds_ocgis.variables['SU'], key, value) -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() \ No newline at end of file + self.assertNcEqual(ret_icclim, ret_ocgis) diff --git a/src/ocgis/test/test_simple/test_optional_dependencies.py b/src/ocgis/test/test_simple/test_optional_dependencies.py index 4a4ec16cc..d9c320dd4 100644 --- a/src/ocgis/test/test_simple/test_optional_dependencies.py +++ b/src/ocgis/test/test_simple/test_optional_dependencies.py @@ -1,5 +1,7 @@ from copy import deepcopy + from shapely.geometry import Point + from ocgis import RequestDataset, OcgOperations from ocgis.test.test_simple.make_test_data import SimpleNcNoLevel from ocgis.test.test_simple.test_simple import TestSimpleBase @@ -20,12 +22,17 @@ def test_esmf(self): rd2 = deepcopy(rd1) ops = OcgOperations(dataset=rd1, regrid_destination=rd2, output_format='nc') ret = ops.execute() - ignore_attributes = {'time_bnds': ['units', 'calendar'], - 'global': ['history'], - 'foo': ['grid_mapping']} + ignore_attributes = {'time_bnds': ['units', 'calendar'], 'global': ['history'], 'foo': ['grid_mapping']} ignore_variables = ['latitude_longitude'] self.assertNcEqual(ret, rd1.uri, ignore_attributes=ignore_attributes, ignore_variables=ignore_variables) + def test_icclim(self): + rd = RequestDataset(**self.get_dataset()) + calc = [{'func': 'icclim_TG', 'name': 'TG'}] + calc_grouping = ['month', 'year'] + ret = OcgOperations(dataset=rd, calc=calc, calc_grouping=calc_grouping).execute() + self.assertEqual(ret[1]['foo'].variables['TG'].value.mean(), 2.5) + def test_rtree(self): from ocgis.util.spatial.index import SpatialIndex From 19ea39c4370e15adc01542fb7e9eda44384e453f Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Thu, 29 Jan 2015 11:34:48 -0700 Subject: [PATCH 64/71] support seasonal aggregations through icclim #354 Fill value parametrization changed in ICCLIM for multivariate calculations. The way these functions are called was adjusted to provide fill values separately for both variables. --- src/ocgis/contrib/library_icclim.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ocgis/contrib/library_icclim.py b/src/ocgis/contrib/library_icclim.py index d151cc368..57b37ecc1 100644 --- a/src/ocgis/contrib/library_icclim.py +++ b/src/ocgis/contrib/library_icclim.py @@ -527,7 +527,7 @@ def _calculate_(self, tas=None, pr=None, tas_percentile_dict=None, pr_percentile self._storage_percentile_dict['pr'] = pr_percentile_dict ret = _icclim_function_map[self.key]['func'](tas, tas_percentile_dict, pr, pr_percentile_dict, dt_arr, - fill_val=tas.fill_value) + fill_val1=tas.fill_value, fill_val2=pr.fill_value) # convert output to a masked array ret_mask = ret == tas.fill_value ret = np.ma.array(ret, mask=ret_mask, fill_value=tas.fill_value) From 0d4e3b4a28061f53c35bfd5c7c8d7c50b689a3de Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 2 Feb 2015 13:23:38 -0700 Subject: [PATCH 65/71] added fixes for long running tests --- doc/install.rst | 1 + src/ocgis/api/subset.py | 7 +- src/ocgis/interface/base/dimension/spatial.py | 18 +- .../test_request/test_driver/test_nc.py | 405 +++++++++++------- .../test/test_ocgis/test_api/test_subset.py | 33 +- .../test_contrib/test_library_icclim.py | 54 ++- .../test/test_real_data/test_combinatorial.py | 3 + .../test/test_unfiled/test_remote_data.py | 2 +- 8 files changed, 349 insertions(+), 174 deletions(-) diff --git a/doc/install.rst b/doc/install.rst index ac30bfeea..cf9fb0c07 100644 --- a/doc/install.rst +++ b/doc/install.rst @@ -12,6 +12,7 @@ Package Name Version URL ============== ======= ======================================================================= Python 2.7.6 http://www.python.org/download/releases/2.7.6/ ``osgeo`` 1.11.1 https://pypi.python.org/pypi/GDAL/ +``setuptools`` 12.0.5 https://pypi.python.org/pypi/setuptools ``shapely`` 1.4.3 https://pypi.python.org/pypi/Shapely ``fiona`` 1.4.5 https://pypi.python.org/pypi/Fiona ``numpy`` 1.8.2 http://sourceforge.net/projects/numpy/files/NumPy/1.8.2/ diff --git a/src/ocgis/api/subset.py b/src/ocgis/api/subset.py index 5285e9e31..e2c32e2aa 100644 --- a/src/ocgis/api/subset.py +++ b/src/ocgis/api/subset.py @@ -590,9 +590,7 @@ def _get_regridded_field_with_subset_(self, sfield, subset_sdim_for_regridding=N def _process_geometries_(self, itr, field, headers, value_keys, alias): """ - :param sequence itr: Contains geometry dictionaries to process. If there - are no geometries to process, this will be a sequence of one element with - an empty dictionary. + :param sequence itr: An iterator yielding :class:`~ocgis.SpatialDimension` objects. :param :class:`ocgis.interface.Field` field: The field object to use for operations. :param sequence headers: Sequence of strings to use as headers for the @@ -667,7 +665,7 @@ def _process_geometries_(self, itr, field, headers, value_keys, alias): # if empty returns are allowed, there be an empty field if sfield is not None: - # # aggregate if requested + # aggregate if requested if self.ops.aggregate: ocgis_lh('executing spatial average', self._subset_log, alias=alias, ugid=subset_ugid) sfield = sfield.get_spatially_aggregated(new_spatial_uid=subset_ugid) @@ -699,6 +697,7 @@ def _process_geometries_(self, itr, field, headers, value_keys, alias): if subset_sdim is not None and subset_sdim.crs != self.ops.output_crs: subset_sdim.update_crs(self.ops.output_crs) # update the subset field CRS + sfield.spatial = deepcopy(sfield.spatial) sfield.spatial.update_crs(self.ops.output_crs) # use the field's alias if it is provided. otherwise, let it be automatically assigned diff --git a/src/ocgis/interface/base/dimension/spatial.py b/src/ocgis/interface/base/dimension/spatial.py index 609316329..4d9ddf8b1 100644 --- a/src/ocgis/interface/base/dimension/spatial.py +++ b/src/ocgis/interface/base/dimension/spatial.py @@ -591,18 +591,18 @@ def wrap(self): def write_fiona(self, path, target='polygon', driver='ESRI Shapefile'): attr = getattr(self.geom, target) attr.write_fiona(path, self.crs.value, driver=driver) - return (path) + return path def _format_uid_(self, value): - return (np.atleast_2d(value)) + return np.atleast_2d(value) def _get_sliced_properties_(self, slc): if self.properties is not None: # # determine major axis major = self.shape.index(max(self.shape)) - return (self.properties[slc[major]]) + return self.properties[slc[major]] else: - return (None) + return None def _get_uid_(self): if self._geom is not None: @@ -690,7 +690,7 @@ def __getitem__(self, slc): ret.row = row ret.col = col - return (ret) + return ret @property def corners(self): @@ -793,7 +793,7 @@ def resolution(self): rows = np.mean(np.diff(r_value[0, :, :], axis=0)) cols = np.mean(np.diff(r_value[1, :, :], axis=1)) ret = np.mean([rows, cols]) - return (ret) + return ret @property def shape(self): @@ -875,7 +875,7 @@ def get_subset_bbox(self, min_col, min_row, max_col, max_row, return_indices=Fal if return_indices: ret = (ret, (row_slc, col_slc)) - return (ret) + return ret def set_extrapolated_corners(self): """ @@ -1036,7 +1036,7 @@ def shape(self): ret = self.polygon.shape else: ret = self.point.shape - return (ret) + return ret def get_highest_order_abstraction(self): """ @@ -1209,7 +1209,7 @@ def write_fiona(self, path, crs, driver='ESRI Shapefile'): feature = {'properties': {'UGID': uid}, 'geometry': mapping(geom)} f.write(feature) - return (path) + return path @staticmethod def _write_fiona_prep_geom_(geom): diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index b99561053..7b406b639 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -5,13 +5,13 @@ import netCDF4 as nc from collections import OrderedDict import numpy as np +from datetime import datetime as dt +import datetime from cfunits import Units import fiona from shapely.geometry.geo import shape -from datetime import datetime as dt -import datetime from ocgis.interface.nc.spatial import NcSpatialGridDimension from ocgis.interface.base.dimension.base import VectorDimension from ocgis import constants @@ -29,37 +29,36 @@ class TestDriverNetcdf(TestBase): - def get_2d_state_boundaries(self): geoms = [] build = True sc = ShpCabinet() path = sc.get_shp_path('state_boundaries') - with fiona.open(path,'r') as source: - for ii,row in enumerate(source): + with fiona.open(path, 'r') as source: + for ii, row in enumerate(source): if build: nrows = len(source) dtype = [] - for k,v in source.schema['properties'].iteritems(): + for k, v in source.schema['properties'].iteritems(): if v.startswith('str'): v = str('|S{0}'.format(v.split(':')[1])) else: - v = getattr(np,v.split(':')[0]) - dtype.append((str(k),v)) - fill = np.empty(nrows,dtype=dtype) + v = getattr(np, v.split(':')[0]) + dtype.append((str(k), v)) + fill = np.empty(nrows, dtype=dtype) ref_names = fill.dtype.names build = False fill[ii] = tuple([row['properties'][n] for n in ref_names]) geoms.append(shape(row['geometry'])) geoms = np.atleast_2d(geoms) - return(geoms,fill) + return geoms, fill def get_2d_state_boundaries_sdim(self): - geoms,attrs = self.get_2d_state_boundaries() + geoms, attrs = self.get_2d_state_boundaries() poly = SpatialGeometryPolygonDimension(value=geoms) geom = SpatialGeometryDimension(polygon=poly) - sdim = SpatialDimension(geom=geom,properties=attrs,crs=WGS84()) - return(sdim) + sdim = SpatialDimension(geom=geom, properties=attrs, crs=WGS84()) + return sdim def test_get_dimensioned_variables_one_variable_in_target_dataset(self): uri = self.test_data.get_uri('cancm4_tas') @@ -85,7 +84,7 @@ def test_get_dimensioned_variables_two_variables_in_target_dataset(self): def test_get_field(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') - rd = RequestDataset(variable=ref_test['variable'],uri=uri) + rd = RequestDataset(variable=ref_test['variable'], uri=uri) field = rd.get() self.assertIsInstance(field.spatial.grid, NcSpatialGridDimension) @@ -110,25 +109,25 @@ def test_get_field(self): self.assertEqual(len(field.spatial.grid.row.attrs), 5) self.assertEqual(len(field.spatial.grid.col.attrs), 5) - ds = nc.Dataset(uri,'r') + ds = nc.Dataset(uri, 'r') - self.assertEqual(field.level,None) - self.assertEqual(field.spatial.crs,WGS84()) + self.assertEqual(field.level, None) + self.assertEqual(field.spatial.crs, WGS84()) tv = field.temporal.value test_tv = ds.variables['time'][:] - self.assertNumpyAll(tv,test_tv) - self.assertNumpyAll(field.temporal.bounds,ds.variables['time_bnds'][:]) + self.assertNumpyAll(tv, test_tv) + self.assertNumpyAll(field.temporal.bounds, ds.variables['time_bnds'][:]) tdt = field.temporal.value_datetime - self.assertEqual(tdt[4],dt(2001,1,5,12)) - self.assertNumpyAll(field.temporal.bounds_datetime[1001],np.array([dt(2003,9,29),dt(2003,9,30)])) + self.assertEqual(tdt[4], dt(2001, 1, 5, 12)) + self.assertNumpyAll(field.temporal.bounds_datetime[1001], np.array([dt(2003, 9, 29), dt(2003, 9, 30)])) rv = field.temporal.value_datetime[100] rb = field.temporal.bounds_datetime[100] - self.assertTrue(all([rv > rb[0],rv < rb[1]])) + self.assertTrue(all([rv > rb[0], rv < rb[1]])) - self.assertEqual(field.temporal.extent_datetime,(datetime.datetime(2001,1,1),datetime.datetime(2011,1,1))) + self.assertEqual(field.temporal.extent_datetime, (datetime.datetime(2001, 1, 1), datetime.datetime(2011, 1, 1))) ds.close() @@ -183,31 +182,30 @@ def test_get_field_dtype_on_dimensions(self): with nc_scope(rd.uri) as ds: test_dtype_temporal = ds.variables['time'].dtype test_dtype_value = ds.variables['tas'].dtype - self.assertEqual(field.temporal.dtype,test_dtype_temporal) - self.assertEqual(field.variables['tas'].dtype,test_dtype_value) - self.assertEqual(field.temporal.dtype,np.float64) + self.assertEqual(field.temporal.dtype, test_dtype_temporal) + self.assertEqual(field.variables['tas'].dtype, test_dtype_value) + self.assertEqual(field.temporal.dtype, np.float64) def test_get_field_dtype_fill_value(self): rd = self.test_data.get_rd('cancm4_tas') field = rd.get() - ## dtype and fill_value should be read from metadata. when accessed they - ## should not load the value. - self.assertEqual(field.variables['tas'].dtype,np.float32) - self.assertEqual(field.variables['tas'].fill_value,np.float32(1e20)) - self.assertEqual(field.variables['tas']._value,None) + # dtype and fill_value should be read from metadata. when accessed they should not load the value. + self.assertEqual(field.variables['tas'].dtype, np.float32) + self.assertEqual(field.variables['tas'].fill_value, np.float32(1e20)) + self.assertEqual(field.variables['tas']._value, None) def test_get_field_datetime_slicing(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') - rd = RequestDataset(variable=ref_test['variable'],uri=uri) + rd = RequestDataset(variable=ref_test['variable'], uri=uri) field = rd.get() field.temporal.value_datetime field.temporal.bounds_datetime - slced = field[:,239,:,:,:] - self.assertEqual(slced.temporal.value_datetime,np.array([dt(2001,8,28,12)])) - self.assertNumpyAll(slced.temporal.bounds_datetime,np.array([dt(2001,8,28),dt(2001,8,29)]).reshape(1, 2)) + slced = field[:, 239, :, :, :] + self.assertEqual(slced.temporal.value_datetime, np.array([dt(2001, 8, 28, 12)])) + self.assertNumpyAll(slced.temporal.bounds_datetime, np.array([dt(2001, 8, 28), dt(2001, 8, 29)]).reshape(1, 2)) def test_get_field_units_read_from_file(self): rd = self.test_data.get_rd('cancm4_tas') @@ -217,92 +215,92 @@ def test_get_field_units_read_from_file(self): def test_get_field_value_datetime_after_slicing(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') - rd = RequestDataset(variable=ref_test['variable'],uri=uri) + rd = RequestDataset(variable=ref_test['variable'], uri=uri) field = rd.get() - slced = field[:,10:130,:,4:7,100:37] - self.assertEqual(slced.temporal.value_datetime.shape,(120,)) + slced = field[:, 10:130, :, 4:7, 100:37] + self.assertEqual(slced.temporal.value_datetime.shape, (120,)) def test_get_field_bounds_datetime_after_slicing(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') - rd = RequestDataset(variable=ref_test['variable'],uri=uri) + rd = RequestDataset(variable=ref_test['variable'], uri=uri) field = rd.get() - slced = field[:,10:130,:,4:7,100:37] - self.assertEqual(slced.temporal.bounds_datetime.shape,(120,2)) + slced = field[:, 10:130, :, 4:7, 100:37] + self.assertEqual(slced.temporal.bounds_datetime.shape, (120, 2)) def test_get_field_slice(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') - rd = RequestDataset(variable=ref_test['variable'],uri=uri) + rd = RequestDataset(variable=ref_test['variable'], uri=uri) field = rd.get() - ds = nc.Dataset(uri,'r') - - slced = field[:,56:345,:,:,:] - self.assertNumpyAll(slced.temporal.value,ds.variables['time'][56:345]) - self.assertNumpyAll(slced.temporal.bounds,ds.variables['time_bnds'][56:345,:]) - to_test = ds.variables['tas'][56:345,:,:] - to_test = np.ma.array(to_test.reshape(1,289,1,64,128),mask=False) - self.assertNumpyAll(slced.variables['tas'].value,to_test) - - slced = field[:,2898,:,5,101] - to_test = ds.variables['tas'][2898,5,101] - to_test = np.ma.array(to_test.reshape(1,1,1,1,1),mask=False) + ds = nc.Dataset(uri, 'r') + + slced = field[:, 56:345, :, :, :] + self.assertNumpyAll(slced.temporal.value, ds.variables['time'][56:345]) + self.assertNumpyAll(slced.temporal.bounds, ds.variables['time_bnds'][56:345, :]) + to_test = ds.variables['tas'][56:345, :, :] + to_test = np.ma.array(to_test.reshape(1, 289, 1, 64, 128), mask=False) + self.assertNumpyAll(slced.variables['tas'].value, to_test) + + slced = field[:, 2898, :, 5, 101] + to_test = ds.variables['tas'][2898, 5, 101] + to_test = np.ma.array(to_test.reshape(1, 1, 1, 1, 1), mask=False) with self.assertRaises(AttributeError): slced.variables['tas']._field._value - self.assertNumpyAll(slced.variables['tas'].value,to_test) + self.assertNumpyAll(slced.variables['tas'].value, to_test) ds.close() def test_get_field_time_range(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') - rd = RequestDataset(variable=ref_test['variable'],uri=uri,time_range=[dt(2005,2,15),dt(2007,4,18)]) + rd = RequestDataset(variable=ref_test['variable'], uri=uri, time_range=[dt(2005, 2, 15), dt(2007, 4, 18)]) field = rd.get() - self.assertEqual(field.temporal.value_datetime[0],dt(2005, 2, 15, 12, 0)) - self.assertEqual(field.temporal.value_datetime[-1],dt(2007, 4, 18, 12, 0)) - self.assertEqual(field.shape,(1,793,1,64,128)) + self.assertEqual(field.temporal.value_datetime[0], dt(2005, 2, 15, 12, 0)) + self.assertEqual(field.temporal.value_datetime[-1], dt(2007, 4, 18, 12, 0)) + self.assertEqual(field.shape, (1, 793, 1, 64, 128)) def test_get_field_time_region(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') - ds = nc.Dataset(uri,'r') - rd = RequestDataset(variable=ref_test['variable'],uri=uri,time_region={'month':[8]}) + ds = nc.Dataset(uri, 'r') + rd = RequestDataset(variable=ref_test['variable'], uri=uri, time_region={'month': [8]}) field = rd.get() - self.assertEqual(field.shape,(1,310,1,64,128)) + self.assertEqual(field.shape, (1, 310, 1, 64, 128)) var = ds.variables['time'] - real_temporal = nc.num2date(var[:],var.units,var.calendar) + real_temporal = nc.num2date(var[:], var.units, var.calendar) select = [True if x.month == 8 else False for x in real_temporal] - indices = np.arange(0,var.shape[0],dtype=constants.NP_INT)[np.array(select)] - self.assertNumpyAll(indices,field.temporal._src_idx) - self.assertNumpyAll(field.temporal.value_datetime,real_temporal[indices]) - self.assertNumpyAll(field.variables['tas'].value.data.squeeze(),ds.variables['tas'][indices,:,:]) + indices = np.arange(0, var.shape[0], dtype=constants.NP_INT)[np.array(select)] + self.assertNumpyAll(indices, field.temporal._src_idx) + self.assertNumpyAll(field.temporal.value_datetime, real_temporal[indices]) + self.assertNumpyAll(field.variables['tas'].value.data.squeeze(), ds.variables['tas'][indices, :, :]) - bounds_temporal = nc.num2date(ds.variables['time_bnds'][indices,:],var.units,var.calendar) - self.assertNumpyAll(bounds_temporal,field.temporal.bounds_datetime) + bounds_temporal = nc.num2date(ds.variables['time_bnds'][indices, :], var.units, var.calendar) + self.assertNumpyAll(bounds_temporal, field.temporal.bounds_datetime) ds.close() def test_get_field_time_region_with_years(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') - ds = nc.Dataset(uri,'r') - rd = RequestDataset(variable=ref_test['variable'],uri=uri,time_region={'month':[8],'year':[2008,2010]}) + ds = nc.Dataset(uri, 'r') + rd = RequestDataset(variable=ref_test['variable'], uri=uri, time_region={'month': [8], 'year': [2008, 2010]}) field = rd.get() - self.assertEqual(field.shape,(1,62,1,64,128)) + self.assertEqual(field.shape, (1, 62, 1, 64, 128)) var = ds.variables['time'] - real_temporal = nc.num2date(var[:],var.units,var.calendar) - select = [True if x.month == 8 and x.year in [2008,2010] else False for x in real_temporal] - indices = np.arange(0,var.shape[0],dtype=constants.NP_INT)[np.array(select)] - self.assertNumpyAll(indices,field.temporal._src_idx) - self.assertNumpyAll(field.temporal.value_datetime,real_temporal[indices]) - self.assertNumpyAll(field.variables['tas'].value.data.squeeze(),ds.variables['tas'][indices,:,:]) + real_temporal = nc.num2date(var[:], var.units, var.calendar) + select = [True if x.month == 8 and x.year in [2008, 2010] else False for x in real_temporal] + indices = np.arange(0, var.shape[0], dtype=constants.NP_INT)[np.array(select)] + self.assertNumpyAll(indices, field.temporal._src_idx) + self.assertNumpyAll(field.temporal.value_datetime, real_temporal[indices]) + self.assertNumpyAll(field.variables['tas'].value.data.squeeze(), ds.variables['tas'][indices, :, :]) - bounds_temporal = nc.num2date(ds.variables['time_bnds'][indices,:],var.units,var.calendar) - self.assertNumpyAll(bounds_temporal,field.temporal.bounds_datetime) + bounds_temporal = nc.num2date(ds.variables['time_bnds'][indices, :], var.units, var.calendar) + self.assertNumpyAll(bounds_temporal, field.temporal.bounds_datetime) ds.close() @@ -311,134 +309,137 @@ def test_get_field_geometry_subset(self): uri = self.test_data.get_uri('cancm4_tas') states = self.get_2d_state_boundaries_sdim() - ca = states[:,states.properties['STATE_NAME'] == 'California'] + ca = states[:, states.properties['STATE_NAME'] == 'California'] self.assertTrue(ca.properties['STATE_NAME'] == 'California') ca.crs.unwrap(ca) - ca = ca.geom.polygon.value[0,0] + ca = ca.geom.polygon.value[0, 0] - for u in [True,False]: - rd = RequestDataset(variable=ref_test['variable'],uri=uri,alias='foo') + for u in [True, False]: + rd = RequestDataset(variable=ref_test['variable'], uri=uri, alias='foo') field = rd.get() - ca_sub = field.get_intersects(ca,use_spatial_index=u) - self.assertEqual(ca_sub.shape,(1, 3650, 1, 5, 4)) + ca_sub = field.get_intersects(ca, use_spatial_index=u) + self.assertEqual(ca_sub.shape, (1, 3650, 1, 5, 4)) self.assertTrue(ca_sub.variables['foo'].value.mask.any()) self.assertFalse(field.spatial.uid.mask.any()) self.assertFalse(field.spatial.get_mask().any()) - ca_sub = field.get_intersects(ca.envelope,use_spatial_index=u) - self.assertEqual(ca_sub.shape,(1, 3650, 1, 5, 4)) + ca_sub = field.get_intersects(ca.envelope, use_spatial_index=u) + self.assertEqual(ca_sub.shape, (1, 3650, 1, 5, 4)) self.assertFalse(ca_sub.variables['foo'].value.mask.any()) - rd = RequestDataset(variable=ref_test['variable'],uri=uri,alias='foo',time_region={'year':[2007]}) + rd = RequestDataset(variable=ref_test['variable'], uri=uri, alias='foo', time_region={'year': [2007]}) field = rd.get() - ca_sub = field.get_intersects(ca,use_spatial_index=u) - self.assertEqual(ca_sub.shape,(1, 365, 1, 5, 4)) - self.assertEqual(set([2007]),set([d.year for d in ca_sub.temporal.value_datetime])) + ca_sub = field.get_intersects(ca, use_spatial_index=u) + self.assertEqual(ca_sub.shape, (1, 365, 1, 5, 4)) + self.assertEqual(set([2007]), set([d.year for d in ca_sub.temporal.value_datetime])) def test_get_field_time_region_slicing(self): ref_test = self.test_data['cancm4_tas'] uri = self.test_data.get_uri('cancm4_tas') - rd = RequestDataset(variable=ref_test['variable'],uri=uri,alias='foo', - time_region={'month':[1,10],'year':[2011,2013]}) + rd = RequestDataset(variable=ref_test['variable'], uri=uri, alias='foo', + time_region={'month': [1, 10], 'year': [2011, 2013]}) with self.assertRaises(EmptySubsetError): rd.get() - rd = RequestDataset(variable=ref_test['variable'],uri=uri,alias='foo', - time_region={'month':[1,10],'year':[2005,2007]}) + rd = RequestDataset(variable=ref_test['variable'], uri=uri, alias='foo', + time_region={'month': [1, 10], 'year': [2005, 2007]}) field = rd.get() - sub = field[:,:,:,50,75] - self.assertEqual(sub.shape,(1,124,1,1,1)) - self.assertEqual(sub.variables['foo'].value.shape,(1,124,1,1,1)) + sub = field[:, :, :, 50, 75] + self.assertEqual(sub.shape, (1, 124, 1, 1, 1)) + self.assertEqual(sub.variables['foo'].value.shape, (1, 124, 1, 1, 1)) field = rd.get() - sub = field[:,:,:,50,75:77] - sub2 = field[:,:,:,0,1] - self.assertEqual(sub2.shape,(1, 124, 1, 1, 1)) + sub = field[:, :, :, 50, 75:77] + sub2 = field[:, :, :, 0, 1] + self.assertEqual(sub2.shape, (1, 124, 1, 1, 1)) @attr('remote') def test_get_field_remote(self): uri = 'http://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml' variable = 'sresa1b_bccr-bcm2-0_1_Tavg' - rd = RequestDataset(uri,variable,time_region={'month':[1,10],'year':[2011,2013]}) + rd = RequestDataset(uri, variable, time_region={'month': [1, 10], 'year': [2011, 2013]}) field = rd.get() field.variables['sresa1b_bccr-bcm2-0_1_Tavg'].value - values = field[:,:,:,50,75] + values = field[:, :, :, 50, 75] to_test = values.variables['sresa1b_bccr-bcm2-0_1_Tavg'].value.compressed() - ds = nc.Dataset('http://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml','r') + ds = nc.Dataset('http://cida.usgs.gov/thredds/dodsC/maurer/maurer_brekke_w_meta.ncml', 'r') try: - values = ds.variables['sresa1b_bccr-bcm2-0_1_Tavg'][:,50,75] - times = nc.num2date(ds.variables['time'][:],ds.variables['time'].units,ds.variables['time'].calendar) + values = ds.variables['sresa1b_bccr-bcm2-0_1_Tavg'][:, 50, 75] + times = nc.num2date(ds.variables['time'][:], ds.variables['time'].units, ds.variables['time'].calendar) select = np.array([True if time in list(field.temporal.value_datetime) else False for time in times]) - sel_values = values[select,:,:] - self.assertNumpyAll(to_test,sel_values) + sel_values = values[select] + self.assertNumpyAll(to_test, sel_values) finally: ds.close() def test_get_field_with_projection(self): uri = self.test_data.get_uri('narccap_wrfg') - rd = RequestDataset(uri,'pr') + rd = RequestDataset(uri, 'pr') field = rd.get() - self.assertIsInstance(field.spatial.crs,CFLambertConformal) + self.assertIsInstance(field.spatial.crs, CFLambertConformal) field.spatial.update_crs(CFWGS84()) - self.assertIsInstance(field.spatial.crs,CFWGS84) - self.assertEqual(field.spatial.grid.row,None) - self.assertAlmostEqual(field.spatial.grid.value.mean(),-26.269666952512416) + self.assertIsInstance(field.spatial.crs, CFWGS84) + self.assertEqual(field.spatial.grid.row, None) + self.assertAlmostEqual(field.spatial.grid.value.mean(), -26.269666952512416) field.spatial.crs.unwrap(field.spatial) - self.assertAlmostEqual(field.spatial.grid.value.mean(),153.73033304748759) + self.assertAlmostEqual(field.spatial.grid.value.mean(), 153.73033304748759) self.assertIsNone(field.spatial.geom.polygon) - self.assertAlmostEqual(field.spatial.geom.point.value[0,100].x,278.52630062012787) - self.assertAlmostEqual(field.spatial.geom.point.value[0,100].y,21.4615681252577) + self.assertAlmostEqual(field.spatial.geom.point.value[0, 100].x, 278.52630062012787) + self.assertAlmostEqual(field.spatial.geom.point.value[0, 100].y, 21.4615681252577) def test_get_field_projection_axes(self): uri = self.test_data.get_uri('cmip3_extraction') variable = 'Tavg' - rd = RequestDataset(uri,variable) + rd = RequestDataset(uri, variable) with self.assertRaises(DimensionNotFound): rd.get() - rd = RequestDataset(uri,variable,dimension_map={'R':'projection','T':'time','X':'longitude','Y':'latitude'}) + rd = RequestDataset(uri, variable, + dimension_map={'R': 'projection', 'T': 'time', 'X': 'longitude', 'Y': 'latitude'}) field = rd.get() - self.assertEqual(field.shape,(36, 1800, 1, 7, 12)) - self.assertEqual(field.temporal.value_datetime[0],datetime.datetime(1950, 1, 16, 0, 0)) - self.assertEqual(field.temporal.value_datetime[-1],datetime.datetime(2099, 12, 15, 0, 0)) - self.assertEqual(field.level,None) - self.assertNumpyAll(field.realization.value,np.arange(1,37,dtype=constants.NP_INT)) + self.assertEqual(field.shape, (36, 1800, 1, 7, 12)) + self.assertEqual(field.temporal.value_datetime[0], datetime.datetime(1950, 1, 16, 0, 0)) + self.assertEqual(field.temporal.value_datetime[-1], datetime.datetime(2099, 12, 15, 0, 0)) + self.assertEqual(field.level, None) + self.assertNumpyAll(field.realization.value, np.arange(1, 37, dtype=constants.NP_INT)) - ds = nc.Dataset(uri,'r') + ds = nc.Dataset(uri, 'r') to_test = ds.variables['Tavg'] - self.assertNumpyAll(to_test[:],field.variables['Tavg'].value.squeeze()) + self.assertNumpyAll(to_test[:], field.variables['Tavg'].value.squeeze()) ds.close() def test_get_field_projection_axes_slicing(self): uri = self.test_data.get_uri('cmip3_extraction') variable = 'Tavg' - rd = RequestDataset(uri,variable,dimension_map={'R':'projection','T':'time','X':'longitude','Y':'latitude'}) + rd = RequestDataset(uri, variable, + dimension_map={'R': 'projection', 'T': 'time', 'X': 'longitude', 'Y': 'latitude'}) field = rd.get() - sub = field[15,:,:,:,:] - self.assertEqual(sub.shape,(1,1800,1,7,12)) + sub = field[15, :, :, :, :] + self.assertEqual(sub.shape, (1, 1800, 1, 7, 12)) - ds = nc.Dataset(uri,'r') + ds = nc.Dataset(uri, 'r') to_test = ds.variables['Tavg'] - self.assertNumpyAll(to_test[15,:,:,:],sub.variables[variable].value.squeeze()) + self.assertNumpyAll(to_test[15, :, :, :], sub.variables[variable].value.squeeze()) ds.close() def test_get_field_multifile_load(self): uri = self.test_data.get_uri('narccap_pr_wrfg_ncep') - rd = RequestDataset(uri,'pr') + rd = RequestDataset(uri, 'pr') field = rd.get() - self.assertEqual(field.temporal.extent_datetime,(datetime.datetime(1981, 1, 1, 0, 0), datetime.datetime(1991, 1, 1, 0, 0))) - self.assertAlmostEqual(field.temporal.resolution,0.125) + self.assertEqual(field.temporal.extent_datetime, + (datetime.datetime(1981, 1, 1, 0, 0), datetime.datetime(1991, 1, 1, 0, 0))) + self.assertAlmostEqual(field.temporal.resolution, 0.125) def test_get_field_climatology_bounds(self): rd = self.test_data.get_rd('cancm4_tas') - ops = ocgis.OcgOperations(dataset=rd,output_format='nc',geom='state_boundaries', - select_ugid=[27],calc=[{'func':'mean','name':'mean'}], + ops = ocgis.OcgOperations(dataset=rd, output_format='nc', geom='state_boundaries', + select_ugid=[27], calc=[{'func': 'mean', 'name': 'mean'}], calc_grouping=['month']) ret = ops.execute() - rd = RequestDataset(uri=ret,variable='mean') + rd = RequestDataset(uri=ret, variable='mean') field = rd.get() - self.assertNotEqual(field.temporal.bounds,None) + self.assertNotEqual(field.temporal.bounds, None) def test_get_field_without_row_column_vectors(self): """Test loading a field objects without row and column vectors.""" @@ -516,7 +517,8 @@ def test_get_vector_dimension(self): rd = RequestDataset(path) driver = DriverNetcdf(rd) k = 'row' - v = {'name_uid': 'yc_id', 'axis': 'Y', 'adds': {'interpolate_bounds': False}, 'name': 'yc', 'cls': VectorDimension} + v = {'name_uid': 'yc_id', 'axis': 'Y', 'adds': {'interpolate_bounds': False}, 'name': 'yc', + 'cls': VectorDimension} source_metadata = rd.source_metadata res = driver._get_vector_dimension_(k, v, source_metadata) self.assertEqual(res['name'], 'yc') @@ -540,7 +542,6 @@ def test_inspect(self): class Test(TestBase): - def test_get_dimension_map_1(self): """Test dimension dictionary returned correctly.""" @@ -555,12 +556,130 @@ def test_get_dimension_map_1(self): def test_get_dimension_map_2(self): """Test special case where bounds were in the file but not found by the code.""" - ## this metadata was causing an issue with the bounds not being discovered (Maurer02new_OBS_tas_daily.1971-2000.nc) + # this metadata was causing an issue with the bounds not being discovered (Maurer02new_OBS_tas_daily.1971-2000.nc) # rd = RequestDataset(uri='/usr/local/climate_data/maurer/2010-concatenated/Maurer02new_OBS_tas_daily.1971-2000.nc', variable='tas') # metadata = rd.source_metadata - metadata = NcMetadata([('dataset', OrderedDict([(u'CDI', u'Climate Data Interface version 1.5.0 (http://code.zmaw.de/projects/cdi)'), (u'Conventions', u'GDT 1.2'), (u'history', u'Wed Jul 3 07:17:09 2013: ncrcat nldas_met_update.obs.daily.tas.1971.nc nldas_met_update.obs.daily.tas.1972.nc nldas_met_update.obs.daily.tas.1973.nc nldas_met_update.obs.daily.tas.1974.nc nldas_met_update.obs.daily.tas.1975.nc nldas_met_update.obs.daily.tas.1976.nc nldas_met_update.obs.daily.tas.1977.nc nldas_met_update.obs.daily.tas.1978.nc nldas_met_update.obs.daily.tas.1979.nc nldas_met_update.obs.daily.tas.1980.nc nldas_met_update.obs.daily.tas.1981.nc nldas_met_update.obs.daily.tas.1982.nc nldas_met_update.obs.daily.tas.1983.nc nldas_met_update.obs.daily.tas.1984.nc nldas_met_update.obs.daily.tas.1985.nc nldas_met_update.obs.daily.tas.1986.nc nldas_met_update.obs.daily.tas.1987.nc nldas_met_update.obs.daily.tas.1988.nc nldas_met_update.obs.daily.tas.1989.nc nldas_met_update.obs.daily.tas.1990.nc nldas_met_update.obs.daily.tas.1991.nc nldas_met_update.obs.daily.tas.1992.nc nldas_met_update.obs.daily.tas.1993.nc nldas_met_update.obs.daily.tas.1994.nc nldas_met_update.obs.daily.tas.1995.nc nldas_met_update.obs.daily.tas.1996.nc nldas_met_update.obs.daily.tas.1997.nc nldas_met_update.obs.daily.tas.1998.nc nldas_met_update.obs.daily.tas.1999.nc nldas_met_update.obs.daily.tas.2000.nc Maurer02new_OBS_tas_daily.1971-2000.nc\nFri Oct 28 08:44:48 2011: cdo ifthen conus_mask.nc /archive/public/gridded_obs/daily/ncfiles_2010/nldas_met_update.obs.daily.tas.1971.nc /data3/emaurer/ldas_met_2010/process/met/nc/daily/nldas_met_update.obs.daily.tas.1971.nc'), (u'institution', u'Princeton U.'), (u'file_name', u'nldas_met_update.obs.daily.tas.1971.nc'), (u'History', u'Interpolated from 1-degree data'), (u'authors', u'Sheffield, J., G. Goteti, and E. F. Wood, 2006: Development of a 50-yr high-resolution global dataset of meteorological forcings for land surface modeling, J. Climate, 19 (13), 3088-3111'), (u'description', u'Gridded Observed global data'), (u'creation_date', u'2006'), (u'SurfSgnConvention', u'Traditional'), (u'CDO', u'Climate Data Operators version 1.5.0 (http://code.zmaw.de/projects/cdo)'), (u'nco_openmp_thread_number', 1)])), ('file_format', 'NETCDF3_CLASSIC'), ('variables', OrderedDict([(u'longitude', {'dtype': 'float32', 'fill_value': 1e+20, 'dimensions': (u'longitude',), 'name': u'longitude', 'attrs': OrderedDict([(u'long_name', u'Longitude'), (u'units', u'degrees_east'), (u'standard_name', u'longitude'), (u'axis', u'X'), (u'bounds', u'longitude_bnds')])}), (u'longitude_bnds', {'dtype': 'float32', 'fill_value': 1e+20, 'dimensions': (u'longitude', u'nb2'), 'name': u'longitude_bnds', 'attrs': OrderedDict()}), (u'latitude', {'dtype': 'float32', 'fill_value': 1e+20, 'dimensions': (u'latitude',), 'name': u'latitude', 'attrs': OrderedDict([(u'long_name', u'Latitude'), (u'units', u'degrees_north'), (u'standard_name', u'latitude'), (u'axis', u'Y'), (u'bounds', u'latitude_bnds')])}), (u'latitude_bnds', {'dtype': 'float32', 'fill_value': 1e+20, 'dimensions': (u'latitude', u'nb2'), 'name': u'latitude_bnds', 'attrs': OrderedDict()}), (u'time', {'dtype': 'float64', 'fill_value': 1e+20, 'dimensions': (u'time',), 'name': u'time', 'attrs': OrderedDict([(u'units', u'days since 1940-01-01 00:00:00'), (u'calendar', u'standard')])}), (u'tas', {'dtype': 'float32', 'fill_value': 1e+20, 'dimensions': (u'time', u'latitude', u'longitude'), 'name': u'tas', 'attrs': OrderedDict([(u'units', u'C')])})])), ('dimensions', OrderedDict([(u'longitude', {'isunlimited': False, 'len': 462}), (u'nb2', {'isunlimited': False, 'len': 2}), (u'latitude', {'isunlimited': False, 'len': 222}), (u'time', {'isunlimited': True, 'len': 10958})]))]) + metadata = NcMetadata([('dataset', OrderedDict( + [(u'CDI', u'Climate Data Interface version 1.5.0 (http://code.zmaw.de/projects/cdi)'), + (u'Conventions', u'GDT 1.2'), (u'history', + u'Wed Jul 3 07:17:09 2013: ncrcat nldas_met_update.obs.daily.tas.1971.nc nldas_met_update.obs.daily.tas.1972.nc nldas_met_update.obs.daily.tas.1973.nc nldas_met_update.obs.daily.tas.1974.nc nldas_met_update.obs.daily.tas.1975.nc nldas_met_update.obs.daily.tas.1976.nc nldas_met_update.obs.daily.tas.1977.nc nldas_met_update.obs.daily.tas.1978.nc nldas_met_update.obs.daily.tas.1979.nc nldas_met_update.obs.daily.tas.1980.nc nldas_met_update.obs.daily.tas.1981.nc nldas_met_update.obs.daily.tas.1982.nc nldas_met_update.obs.daily.tas.1983.nc nldas_met_update.obs.daily.tas.1984.nc nldas_met_update.obs.daily.tas.1985.nc nldas_met_update.obs.daily.tas.1986.nc nldas_met_update.obs.daily.tas.1987.nc nldas_met_update.obs.daily.tas.1988.nc nldas_met_update.obs.daily.tas.1989.nc nldas_met_update.obs.daily.tas.1990.nc nldas_met_update.obs.daily.tas.1991.nc nldas_met_update.obs.daily.tas.1992.nc nldas_met_update.obs.daily.tas.1993.nc nldas_met_update.obs.daily.tas.1994.nc nldas_met_update.obs.daily.tas.1995.nc nldas_met_update.obs.daily.tas.1996.nc nldas_met_update.obs.daily.tas.1997.nc nldas_met_update.obs.daily.tas.1998.nc nldas_met_update.obs.daily.tas.1999.nc nldas_met_update.obs.daily.tas.2000.nc Maurer02new_OBS_tas_daily.1971-2000.nc\nFri Oct 28 08:44:48 2011: cdo ifthen conus_mask.nc /archive/public/gridded_obs/daily/ncfiles_2010/nldas_met_update.obs.daily.tas.1971.nc /data3/emaurer/ldas_met_2010/process/met/nc/daily/nldas_met_update.obs.daily.tas.1971.nc'), + (u'institution', u'Princeton U.'), (u'file_name', u'nldas_met_update.obs.daily.tas.1971.nc'), + (u'History', u'Interpolated from 1-degree data'), (u'authors', + u'Sheffield, J., G. Goteti, and E. F. Wood, 2006: Development of a 50-yr high-resolution global dataset of meteorological forcings for land surface modeling, J. Climate, 19 (13), 3088-3111'), + (u'description', u'Gridded Observed global data'), (u'creation_date', u'2006'), + (u'SurfSgnConvention', u'Traditional'), + (u'CDO', u'Climate Data Operators version 1.5.0 (http://code.zmaw.de/projects/cdo)'), + (u'nco_openmp_thread_number', 1)])), ('file_format', 'NETCDF3_CLASSIC'), ('variables', OrderedDict([( + u'longitude', + { + 'dtype': 'float32', + 'fill_value': 1e+20, + 'dimensions': ( + u'longitude',), + 'name': u'longitude', + 'attrs': OrderedDict( + [ + ( + u'long_name', + u'Longitude'), + ( + u'units', + u'degrees_east'), + ( + u'standard_name', + u'longitude'), + ( + u'axis', + u'X'), + ( + u'bounds', + u'longitude_bnds')])}), + ( + u'longitude_bnds', + { + 'dtype': 'float32', + 'fill_value': 1e+20, + 'dimensions': ( + u'longitude', + u'nb2'), + 'name': u'longitude_bnds', + 'attrs': OrderedDict()}), + ( + u'latitude', + { + 'dtype': 'float32', + 'fill_value': 1e+20, + 'dimensions': ( + u'latitude',), + 'name': u'latitude', + 'attrs': OrderedDict( + [ + ( + u'long_name', + u'Latitude'), + ( + u'units', + u'degrees_north'), + ( + u'standard_name', + u'latitude'), + ( + u'axis', + u'Y'), + ( + u'bounds', + u'latitude_bnds')])}), + ( + u'latitude_bnds', + { + 'dtype': 'float32', + 'fill_value': 1e+20, + 'dimensions': ( + u'latitude', + u'nb2'), + 'name': u'latitude_bnds', + 'attrs': OrderedDict()}), + ( + u'time', + { + 'dtype': 'float64', + 'fill_value': 1e+20, + 'dimensions': ( + u'time',), + 'name': u'time', + 'attrs': OrderedDict( + [ + ( + u'units', + u'days since 1940-01-01 00:00:00'), + ( + u'calendar', + u'standard')])}), + ( + u'tas', + { + 'dtype': 'float32', + 'fill_value': 1e+20, + 'dimensions': ( + u'time', + u'latitude', + u'longitude'), + 'name': u'tas', + 'attrs': OrderedDict( + [ + ( + u'units', + u'C')])})])), + ('dimensions', OrderedDict([(u'longitude', {'isunlimited': False, 'len': 462}), + (u'nb2', {'isunlimited': False, 'len': 2}), + (u'latitude', {'isunlimited': False, 'len': 222}), + (u'time', {'isunlimited': True, 'len': 10958})]))]) dim_map = get_dimension_map('tas', metadata) - self.assertDictEqual(dim_map, {'Y': {'variable': u'latitude', 'bounds': u'latitude_bnds', 'dimension': u'latitude', 'pos': 1}, 'X': {'variable': u'longitude', 'bounds': u'longitude_bnds', 'dimension': u'longitude', 'pos': 2}, 'Z': None, 'T': {'variable': u'time', 'bounds': None, 'dimension': u'time', 'pos': 0}}) + self.assertDictEqual(dim_map, { + 'Y': {'variable': u'latitude', 'bounds': u'latitude_bnds', 'dimension': u'latitude', 'pos': 1}, + 'X': {'variable': u'longitude', 'bounds': u'longitude_bnds', 'dimension': u'longitude', 'pos': 2}, + 'Z': None, + 'T': {'variable': u'time', 'bounds': None, 'dimension': u'time', 'pos': 0}}) def test_get_dimension_map_3(self): """Test when bounds are found but the bounds variable is actually missing.""" diff --git a/src/ocgis/test/test_ocgis/test_api/test_subset.py b/src/ocgis/test/test_ocgis/test_api/test_subset.py index 1505cc83b..61148be6a 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_subset.py +++ b/src/ocgis/test/test_ocgis/test_api/test_subset.py @@ -6,14 +6,15 @@ import numpy as np import ESMF +from shapely import wkt from ocgis.calc.library.index.duration import FrequencyDuration from ocgis.api.parms.definition import OutputFormat from ocgis.interface.base.field import Field from ocgis.api.operations import OcgOperations from ocgis.conv.numpy_ import NumpyConverter -from ocgis.interface.base.crs import Spherical, CFWGS84, CFPolarStereographic, WGS84 -from ocgis.interface.base.dimension.spatial import SpatialDimension +from ocgis.interface.base.crs import Spherical, CFWGS84, CFPolarStereographic, WGS84, CoordinateReferenceSystem +from ocgis.interface.base.dimension.spatial import SpatialDimension, SpatialGridDimension from ocgis.test.base import TestBase import ocgis from ocgis.api.subset import SubsetOperation @@ -214,6 +215,34 @@ def test_geometry_dictionary(self): actual = pickle.loads(actual) self.assertEqual(coll.properties, actual) + def test_process_geometries(self): + # test multiple geometries with coordinate system update works as expected + + a = 'POLYGON((-105.21347987288135073 40.21514830508475313,-104.39928495762711691 40.21514830508475313,-104.3192002118643984 39.5677966101694949,-102.37047139830508513 39.61451271186440692,-102.12354343220337682 37.51896186440677639,-105.16009004237288593 37.51896186440677639,-105.21347987288135073 40.21514830508475313))' + b = 'POLYGON((-104.15235699152542281 39.02722457627118757,-103.71189088983049942 39.44099576271186436,-102.71750529661017026 39.28082627118644155,-102.35712394067796538 37.63908898305084705,-104.13900953389830306 37.63241525423728717,-104.15235699152542281 39.02722457627118757))' + geom = [{'geom': wkt.loads(xx), 'properties': {'UGID': ugid}} for ugid, xx in enumerate([a, b])] + + grid_value = [ + [[37.0, 37.0, 37.0, 37.0], [38.0, 38.0, 38.0, 38.0], [39.0, 39.0, 39.0, 39.0], [40.0, 40.0, 40.0, 40.0]], + [[-105.0, -104.0, -103.0, -102.0], [-105.0, -104.0, -103.0, -102.0], [-105.0, -104.0, -103.0, -102.0], + [-105.0, -104.0, -103.0, -102.0]]] + grid_value = np.ma.array(grid_value, mask=False) + output_crs = CoordinateReferenceSystem( + value={'a': 6370997, 'lon_0': -100, 'y_0': 0, 'no_defs': True, 'proj': 'laea', 'x_0': 0, 'units': 'm', + 'b': 6370997, 'lat_0': 45}) + grid = SpatialGridDimension(value=grid_value) + sdim = SpatialDimension(grid=grid, crs=WGS84()) + field = Field(spatial=sdim) + + ops = OcgOperations(dataset=field, geom=geom, output_crs=output_crs) + ret = ops.execute() + + expected = {0: -502052.79407259845, + 1: -510391.37909706926} + for ugid, field_dict in ret.iteritems(): + for field in field_dict.itervalues(): + self.assertAlmostEqual(field.spatial.grid.value.data.mean(), expected[ugid]) + def test_regridding_bounding_box_wrapped(self): """Test subsetting with a wrapped bounding box with the target as a 0-360 global grid.""" diff --git a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py index d1422fd40..917ff0978 100644 --- a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py +++ b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py @@ -4,8 +4,9 @@ from copy import deepcopy from numpy.ma import MaskedArray import numpy as np - from datetime import datetime +from unittest import SkipTest + from ocgis.calc.temporal_groups import SeasonalTemporalGroup from ocgis.interface.base.dimension.temporal import TemporalDimension from ocgis.calc.base import AbstractParameterizedFunction @@ -47,10 +48,10 @@ def tearDown(self): FakeAbstractIcclimFunction.key = 'icclim_fillme' super(TestAbstractIcclimFunction, self).tearDown() - def get(self): + def get(self, grouping=None): field = self.get_field() temporal = TemporalDimension(value=self.get_time_series(datetime(2000, 1, 1), datetime(2001, 12, 31))) - grouping = [[12, 1, 2]] + grouping = grouping or [[12, 1, 2]] tgd = temporal.get_grouping(grouping) aa = FakeAbstractIcclimFunction(field, tgd) return aa @@ -60,12 +61,29 @@ def test_init(self): self.assertIsInstance(f, AbstractIcclimFunction) def test_set_field_metadata(self): + # test with a seasonal grouping aa = self.get() aa.set_field_metadata() self.assertIn(SeasonalTemporalGroup(aa.tgd.grouping).icclim_mode, aa.field.attrs['history']) + # test with a day calculation grouping + aa = self.get(grouping=['day']) + aa.set_field_metadata() + self.assertIn(str(['day']), aa.field.attrs['history']) + class TestLibraryIcclim(TestBase): + def test_bad_icclim_key_to_operations(self): + value = [{'func': 'icclim_TG_bad', 'name': 'TG'}] + with self.assertRaises(DefinitionValidationError): + Calc(value) + + def test_calc_argument_to_operations(self): + value = [{'func': 'icclim_TG', 'name': 'TG'}] + calc = Calc(value) + self.assertEqual(len(calc.value), 1) + self.assertEqual(calc.value[0]['ref'], IcclimTG) + @attr('slow') def test_icclim_combinatorial(self): shapes = ([('month',), 12], [('month', 'year'), 24], [('year',), 2]) @@ -79,7 +97,7 @@ def test_icclim_combinatorial(self): continue keys.remove(subclass.key) - self.assertEqual([('month',), ('month', 'year'), ('year',)], subclass._allowed_temporal_groupings) + for cg in CalcGrouping.iter_possible(): calc = [{'func': subclass.key, 'name': subclass.key.split('_')[1]}] if klass == AbstractIcclimUnivariateSetFunction: @@ -99,7 +117,7 @@ def test_icclim_combinatorial(self): calc[0].update({'kwds': kwds}) try: ops = ocgis.OcgOperations(dataset=rd, output_format='nc', calc=calc, calc_grouping=cg, - geom=[3.39, 40.62, 10.54, 52.30]) + geom=[35.39, 45.62, 42.54, 52.30]) ret = ops.execute() to_test = None for shape in shapes: @@ -109,7 +127,7 @@ def test_icclim_combinatorial(self): var = ds.variables[calc[0]['name']] self.assertEqual(var.dtype, subclass.dtype) if to_test is not None: - self.assertEqual(var.shape, (to_test, 5, 4)) + self.assertEqual(var.shape, (to_test, 3, 3)) except DefinitionValidationError as e: msg = '''OcgOperations validation raised an exception on the argument/operation "calc_grouping" with the message: The following temporal groupings are supported for ICCLIM: [('month',), ('month', 'year'), ('year',)]. The requested temporal group is:''' if e.message.startswith(msg): @@ -125,16 +143,19 @@ def test_register_icclim(self): self.assertIn('icclim_TG', fr) self.assertIn('icclim_vDTR', fr) - def test_calc_argument_to_operations(self): - value = [{'func': 'icclim_TG', 'name': 'TG'}] - calc = Calc(value) - self.assertEqual(len(calc.value), 1) - self.assertEqual(calc.value[0]['ref'], IcclimTG) + def test_seasonal_calc_grouping(self): + """Test seasonal calculation grouping with an ICCLIM function.""" - def test_bad_icclim_key_to_operations(self): - value = [{'func': 'icclim_TG_bad', 'name': 'TG'}] - with self.assertRaises(DefinitionValidationError): - Calc(value) + rd = self.test_data.get_rd('cancm4_tas') + slc = [None, [0, 600], None, [0, 10], [0, 10]] + calc_icclim = [{'func': 'icclim_TG', 'name': 'TG'}] + calc_ocgis = [{'func': 'mean', 'name': 'mean'}] + cg = [[12, 1, 2], 'unique'] + ops_ocgis = OcgOperations(calc=calc_ocgis, calc_grouping=cg, slice=slc, dataset=rd) + ret_ocgis = ops_ocgis.execute() + ops_icclim = OcgOperations(calc=calc_icclim, calc_grouping=cg, slice=slc, dataset=rd) + ret_icclim = ops_icclim.execute() + self.assertNumpyAll(ret_ocgis[1]['tas'].variables['mean'].value, ret_icclim[1]['tas'].variables['TG'].value) class TestCD(TestBase): @@ -453,6 +474,9 @@ def test_calculation_operations_to_nc(self): @attr('remote') def test_calculate_opendap(self): + msg = 'opendap url no longer works' + raise SkipTest(msg) + # test against an opendap target ensuring icclim and ocgis operations are equivalent in the netcdf output url = 'http://opendap.nmdc.eu/knmi/thredds/dodsC/IS-ENES/TESTSETS/tasmax_day_EC-EARTH_rcp26_r8i1p1_20760101-21001231.nc' calc_grouping = ['month'] diff --git a/src/ocgis/test/test_real_data/test_combinatorial.py b/src/ocgis/test/test_real_data/test_combinatorial.py index c63851c27..2bee46049 100644 --- a/src/ocgis/test/test_real_data/test_combinatorial.py +++ b/src/ocgis/test/test_real_data/test_combinatorial.py @@ -292,6 +292,9 @@ def test_combinatorial_projection_with_geometries(self): continue else: raise + except ValueError: + if unbounded and ab == 'polygon': + continue if o == constants.OUTPUT_FORMAT_SHAPEFILE: ugid_path = os.path.join(self.current_dir_output, ops.prefix, ops.prefix + '_ugid.shp') diff --git a/src/ocgis/test/test_unfiled/test_remote_data.py b/src/ocgis/test/test_unfiled/test_remote_data.py index 73f720fdc..1a79289cb 100644 --- a/src/ocgis/test/test_unfiled/test_remote_data.py +++ b/src/ocgis/test/test_unfiled/test_remote_data.py @@ -13,4 +13,4 @@ def test_geodataportal_prism(self): snippet=True, output_format='numpy', aggregate=False, prefix=variable) ret = ops.execute() - self.assertEqual(ret[25].variables[variable].value.shape, (1, 1, 227, 246)) + self.assertEqual(ret[25][variable].variables[variable].value.shape, (1, 1, 1, 227, 246)) From adb6ec849d334bb7b4a7225b387476a943b1c9c2 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Wed, 4 Feb 2015 13:34:55 -0700 Subject: [PATCH 66/71] allow dimension map argument to request dataset to contain full specification #355 The dimension map argument to request datasets may now contain the full specification with bounds and dimension name. --- src/ocgis/api/request/driver/base.py | 6 ++++++ src/ocgis/api/request/driver/nc.py | 15 ++++++------- src/ocgis/interface/base/dimension/spatial.py | 8 +++---- src/ocgis/interface/base/field.py | 4 ++-- .../test_request/test_driver/test_nc.py | 10 +++++++++ .../test_base/test_dimension/test_spatial.py | 15 +++++++++++++ .../test_interface/test_base/test_field.py | 21 +++++++++++++++++-- 7 files changed, 64 insertions(+), 15 deletions(-) diff --git a/src/ocgis/api/request/driver/base.py b/src/ocgis/api/request/driver/base.py index 20e63c0b2..91b35b7d0 100644 --- a/src/ocgis/api/request/driver/base.py +++ b/src/ocgis/api/request/driver/base.py @@ -1,8 +1,14 @@ import abc + from ocgis.exc import DefinitionValidationError class AbstractDriver(object): + """ + :param rd: The input request dataset object. + :type rd: :class:`~ocgis.RequestDataset` + """ + __metaclass__ = abc.ABCMeta def __init__(self, rd): diff --git a/src/ocgis/api/request/driver/nc.py b/src/ocgis/api/request/driver/nc.py index f9771f435..7e8f253a9 100644 --- a/src/ocgis/api/request/driver/nc.py +++ b/src/ocgis/api/request/driver/nc.py @@ -114,13 +114,14 @@ def get_source_metadata(self): metadata['dim_map'] = get_dimension_map(var['name'], metadata) else: for k, v in self.rd.dimension_map.iteritems(): - try: - variable_name = metadata['variables'][v]['name'] - except KeyError: - variable_name = None - self.rd.dimension_map[k] = {'variable': variable_name, - 'dimension': v, - 'pos': var['dimensions'].index(v)} + if not isinstance(v, dict): + try: + variable_name = metadata['variables'][v]['name'] + except KeyError: + variable_name = None + self.rd.dimension_map[k] = {'variable': variable_name, + 'dimension': v, + 'pos': var['dimensions'].index(v)} metadata['dim_map'] = self.rd.dimension_map return metadata diff --git a/src/ocgis/interface/base/dimension/spatial.py b/src/ocgis/interface/base/dimension/spatial.py index 4d9ddf8b1..3f4188bd5 100644 --- a/src/ocgis/interface/base/dimension/spatial.py +++ b/src/ocgis/interface/base/dimension/spatial.py @@ -652,8 +652,8 @@ def __init__(self, *args, **kwargs): kwargs['name'] = kwargs.get('name') or 'grid' - self.name_row = kwargs.pop('name_row', None) - self.name_col = kwargs.pop('name_col', None) + self.name_row = kwargs.pop('name_row', constants.DEFAULT_NAME_ROW_COORDINATES) + self.name_col = kwargs.pop('name_col', constants.DEFAULT_NAME_COL_COORDINATES) super(SpatialGridDimension, self).__init__(*args, **kwargs) @@ -911,8 +911,8 @@ def write_to_netcdf_dataset(self, dataset, **kwargs): self.col.write_to_netcdf_dataset(dataset, **kwargs) except AttributeError: # likely no row and column. write the grid value. - name_yc = constants.DEFAULT_NAME_ROW_COORDINATES - name_xc = constants.DEFAULT_NAME_COL_COORDINATES + name_yc = self.name_row + name_xc = self.name_col dataset.createDimension(name_yc, size=self.shape[0]) dataset.createDimension(name_xc, size=self.shape[1]) value = self.value diff --git a/src/ocgis/interface/base/field.py b/src/ocgis/interface/base/field.py index 8cc298959..7768348c0 100644 --- a/src/ocgis/interface/base/field.py +++ b/src/ocgis/interface/base/field.py @@ -535,8 +535,8 @@ def name_scope(target, name, axis): # write the grid.value directly if self.spatial.grid.row is None or self.spatial.grid.col is None: self.spatial.grid.write_to_netcdf_dataset(dataset, **kwargs) - value_dimensions.append('yc') - value_dimensions.append('xc') + value_dimensions.append(self.spatial.grid.name_row) + value_dimensions.append(self.spatial.grid.name_col) else: raise diff --git a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py index 7b406b639..587033a58 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py +++ b/src/ocgis/test/test_ocgis/test_api/test_request/test_driver/test_nc.py @@ -511,6 +511,16 @@ def test_get_name_bounds_dimension(self): res = DriverNetcdf._get_name_bounds_dimension_(source_metadata) self.assertIsNone(res) + def test_get_source_metadata(self): + dimension_map = {'X': {'variable': 'lon', 'dimension': 'x', 'pos': 2, 'bounds': 'lon_bnds'}, + 'Y': {'variable': 'lat', 'dimension': 'y', 'pos': 1, 'bounds': 'lat_bounds'}, + 'T': {'variable': 'time', 'dimension': 'time', 'pos': 0, 'bounds': 'time_bounds'}} + uri = self.test_data.get_uri('cancm4_tas') + rd = RequestDataset(uri=uri, dimension_map=dimension_map) + driver = DriverNetcdf(rd) + meta = driver.get_source_metadata() + self.assertEqual(meta['dim_map'], dimension_map) + def test_get_vector_dimension(self): # test exception raised with no row and column path = self.get_netcdf_path_no_row_column() diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py index 4a10f8b45..c46e35ead 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_dimension/test_spatial.py @@ -1392,6 +1392,8 @@ def test_init(self): self.assertEqual(grid.name, 'grid') self.assertEqual(grid.row.name, 'yc') self.assertEqual(grid.col.name, 'xc') + self.assertEqual(grid.name_row, constants.DEFAULT_NAME_ROW_COORDINATES) + self.assertEqual(grid.name_col, constants.DEFAULT_NAME_COL_COORDINATES) grid = SpatialGridDimension(row=row, col=col, name_row='foo', name_col='whatever') self.assertEqual(grid.name_row, 'foo') @@ -1575,3 +1577,16 @@ def test_write_to_netcdf_dataset(self): self.assertNumpyAll(var[:], grid.corners[idx].data) self.assertEqual(ds.variables[constants.DEFAULT_NAME_ROW_COORDINATES].corners, name_yc_corners) self.assertEqual(ds.variables[constants.DEFAULT_NAME_COL_COORDINATES].corners, name_xc_corners) + + # test with names for the rows and columns and no row/col objects + row = VectorDimension(value=[4., 5.]) + col = VectorDimension(value=[6., 7.]) + grid = SpatialGridDimension(row=row, col=col, name_row='imrow', name_col='im_col') + grid.value + grid.row = None + grid.col = None + path = self.get_temporary_file_path('foo.nc') + with self.nc_scope(path, 'w') as ds: + grid.write_to_netcdf_dataset(ds) + with self.nc_scope(path) as ds: + self.assertEqual(ds.variables.keys(), ['imrow', 'im_col']) \ No newline at end of file diff --git a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py index 418a9025a..414ffae05 100644 --- a/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py +++ b/src/ocgis/test/test_ocgis/test_interface/test_base/test_field.py @@ -3,6 +3,8 @@ from copy import deepcopy from collections import OrderedDict import numpy as np +from datetime import datetime as dt +import datetime from shapely import wkb import fiona @@ -10,8 +12,6 @@ from shapely.geometry import shape from shapely.ops import cascaded_union -from datetime import datetime as dt -import datetime from ocgis import constants, SpatialCollection, ShpCabinet from ocgis import RequestDataset from ocgis.constants import NAME_UID_FIELD, NAME_UID_DIMENSION_LEVEL @@ -838,6 +838,23 @@ def test_write_to_netcdf_dataset_without_row_column_on_grid(self): ['time', 'bounds', 'level', constants.DEFAULT_NAME_ROW_COORDINATES, constants.DEFAULT_NAME_COL_COORDINATES, constants.DEFAULT_NAME_CORNERS_DIMENSION]) + # test with name on the grid + field = self.get_field(with_value=True, with_realization=False) + field.spatial.grid.value + field.spatial.grid.corners + field.spatial.grid.name_row = 'nr' + field.spatial.grid.name_col = 'nc' + field.spatial.grid.row = None + field.spatial.grid.col = None + path = os.path.join(self.current_dir_output, 'foo.nc') + with nc_scope(path, 'w') as ds: + field.write_to_netcdf_dataset(ds) + self.assertAsSetEqual(ds.variables.keys(), + ['time', 'time_bounds', 'level', 'level_bounds', 'nr', 'nc', 'nr_corners', + 'nc_corners', 'tmax']) + self.assertAsSetEqual(ds.dimensions.keys(), + ['time', 'bounds', 'level', 'nr', 'nc', constants.DEFAULT_NAME_CORNERS_DIMENSION]) + def test_write_to_netcdf_dataset_without_temporal(self): """Test without a temporal dimensions.""" From ef22671eb5c0858d9bff958c2c01406b945cb13f Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 6 Feb 2015 09:28:17 -0700 Subject: [PATCH 67/71] minor - Reordered argument names in the API documentation. - Added a test for the percentile dictionary calculation in ICCLIM. - Changed version number in setup.py. --- doc/api.rst | 376 ++++++++++-------- setup.py | 5 +- .../test_contrib/test_library_icclim.py | 16 + 3 files changed, 220 insertions(+), 177 deletions(-) diff --git a/doc/api.rst b/doc/api.rst index 7fe9dd5f7..c2c9585b2 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -53,98 +53,22 @@ Detailed Argument Information Additional information on arguments are found in their respective sections. -dataset -~~~~~~~ - -A ``dataset`` is the target file(s) or object(s) containing data to process. A ``dataset`` may be: - 1. A file on the local machine or network location accessible by the software (use :class:`~ocgis.RequestDataset` or :class:`~ocgis.RequestDatasetCollection`). - 2. A URL to an unsecured OpenDAP dataset (use :class:`~ocgis.RequestDataset` or :class:`~ocgis.RequestDatasetCollection`). - 3. An OpenClimateGIS field object (use :class:`~Field` or :class:`~ocgis.RequestDatasetCollection`). If a :class:`~ocgis.Field` object is used, be aware operations may modify the object inplace. - -.. autoclass:: ocgis.RequestDataset - :members: inspect, inspect_as_dct - -.. autoclass:: ocgis.RequestDatasetCollection - :members: update - -dir_output -~~~~~~~~~~ - -This sets the output folder for any disk formats. If this is ``None`` and ``env.DIR_OUTPUT`` is ``None``, then output will be written to the current working directory. - -spatial_operation -~~~~~~~~~~~~~~~~~ - -======================== ============================================================================================================================================= -Value Description -======================== ============================================================================================================================================= -``intersects`` (default) Source geometries touching or overlapping selection geometries are returned (see :ref:`appendix-intersects`). -``clip`` A full geometric intersection is performed between source and selection geometries. New geometries may be created. (see :ref:`appendix-clip`) -======================== ============================================================================================================================================= - -.. _geom: - -geom -~~~~ - -.. warning:: Subsetting with multiple geometries to netCDF will result in :ref:`agg_selection` being set to ``True``. Indexing multiple geometries using netCDF-CF convention is currently not possible. - -If a geometry(s) is provided, it is used to subset `every` :class:`ocgis.RequestDataset` object. Supplying a value of ``None`` (the default) results in the return of the entire spatial domain. Any shapefiles used for subsetting must include a unique integer attribute matching the value of :attr:`ocgis.constants.ocgis_unique_geometry_identifier` and have a WGS84 latitude/longitude geographic coordinate system. If an ESRI Shapefile is being accessed and the file does not contain the unique identifier, the function :func:`~ocgis.util.helpers.add_shapefile_unique_identifier` may be used to add the appropriate unique identifier attribute. - -There are a number of ways to parameterize the ``geom`` keyword argument: - -1. Bounding Box - -This is a list of floats corresponding to: `[min x, min y, max x, max y]`. The coordinates should be WGS84 geographic. - ->>> geom = [-120.4, 30.0, -110.3, 41.4] - -2. Point - -This is a list of floats corresponding to: `[longitude,latitude]`. The coordinates should be WGS84 geographic. For point geometries, the geometry is actually buffered by `search_radius_mult` * (data resolution). Hence, output geometries are in fact polygons. - ->>> geom = [-120.4,36.5] - -3. Using :class:`ocgis.ShpCabinetIterator` - ->>> from ocgis import ShpCabinetIterator ->>> geom = ShpCabinetIterator('state_boundaries',select_ugid=[16]) - -.. _geom key: - -4. Using a :class:`ocgis.ShpCabinet` Key - ->>> geom = 'state_boundaries' - -5. Custom Sequence of Shapely Geometry Dictionaries - -The `crs` key is optional. If it is not included, WGS84 is assumed. The `properties` key is also optional. If not 'UGID' property is provided, defaults will be inserted. - ->>> geom = [{'geom':Point(x,y),'properties':{'UGID':23,'NAME':'geometry23'},'crs':CoordinateReferenceSystem(epsg=4326)},...] - -6. Path to a Shapefile - ->>> geom = '/path/to/shapefile.shp' - -.. _search_radius_mult key: - -search_radius_mult -~~~~~~~~~~~~~~~~~~ - -This is a scalar float value multiplied by the target data's resolution to determine the buffer radius for the point. The default is ``2.0``. - -select_nearest -~~~~~~~~~~~~~~ +abstraction +~~~~~~~~~~~ -If ``True``, the nearest geometry to the centroid of the current selection geometry is returned. This is useful when subsetting by a point, and it is preferred to not return all geometries within the selection radius. +.. note:: OpenClimateGIS uses the `bounds` attribute of NetCDF file to construct polygon representations of datasets. If no `bounds` attribute is found, the software defaults to the `point` geometry abstraction. -output_crs -~~~~~~~~~~ +====================== ============================================================= +Value Description +====================== ============================================================= +`polygon` (default) Represent cells as :class:`shapely.geometry.Polygon` objects. +`point` Represent cells as :class:`shapely.geometry.Point` objects. +====================== ============================================================= -By default, the coordinate reference system (CRS) is the CRS of the input :class:`ocgis.RequestDataset` object. If multiple :class:`ocgis.RequestDataset` objects are part of an :class:`ocgis.OcgOperations` call, then ``output_crs`` must be provided if the input CRS values of the :class:`ocgis.RequestDataset` objects differ. The value for ``output_crs`` is an instance of :class:`ocgis.crs.CoordinateReferenceSystem`. +add_auxiliary_files +~~~~~~~~~~~~~~~~~~~ ->>> import ocgis ->>> output_crs = ocgis.crs.CFWGS84() +If ``True``, create a new directory and add metadata and other informational files in addition to the converted file. If ``False``, write the target file only to :attr:`dir_output` and do not create a new directory. aggregate ~~~~~~~~~ @@ -156,6 +80,30 @@ Value Description ``False`` (default) Selected geometries are not combined. =================== ======================================================================================== +.. _agg_selection: + +agg_selection +~~~~~~~~~~~~~ + +=================== =============================================== +Value Description +=================== =============================================== +``True`` Aggregate (union) `geom`_ to a single geometry. +``False`` (default) Leave `geom`_ as is. +=================== =============================================== + +The purpose of this data manipulation is to ease the method required to aggregate (union) geometries into arbitrary regions. A simple example would be unioning the U.S. state boundaries of Utah, Nevada, Arizona, and New Mexico into a single polygon representing a "Southwestern Region". + +allow_empty +~~~~~~~~~~~ + +================= ==================================================================================================== +Value Description +================= ==================================================================================================== +`True` Allow the empty set for geometries not geographically coincident with a source geometry. +`False` (default) Raise :class:`~ocgis.exc.EmptyDataNotAllowed` if the empty set is encountered. +================= ==================================================================================================== + .. _calc_headline: calc @@ -223,33 +171,129 @@ Value Description `False` (default) Use aggregated values during computation. ====================== ===================================================================================================== -abstraction -~~~~~~~~~~~ +callback +~~~~~~~~ -.. note:: OpenClimateGIS uses the `bounds` attribute of NetCDF file to construct polygon representations of datasets. If no `bounds` attribute is found, the software defaults to the `point` geometry abstraction. +A callback function that may be used for custom messaging. This function integrates with the log handler and will receive messages at or above the :attr:`logging.INFO` level. -====================== ============================================================= -Value Description -====================== ============================================================= -`polygon` (default) Represent cells as :class:`shapely.geometry.Polygon` objects. -`point` Represent cells as :class:`shapely.geometry.Point` objects. -====================== ============================================================= +>>> def callback(percent,message): +>>> print(percent,message) -.. _snippet_headline: +conform_units_to +~~~~~~~~~~~~~~~~ -snippet +Destination units for conversion. If this parameter is set, then the :mod:`cfunits` module must be installed. Setting this parameter will override conformed units set on ``dataset`` objects. + +dataset ~~~~~~~ -.. note:: The entire spatial domain is returned unless `geom` is specified. +A ``dataset`` is the target file(s) or object(s) containing data to process. A ``dataset`` may be: + 1. A file on the local machine or network location accessible by the software (use :class:`~ocgis.RequestDataset` or :class:`~ocgis.RequestDatasetCollection`). + 2. A URL to an unsecured OpenDAP dataset (use :class:`~ocgis.RequestDataset` or :class:`~ocgis.RequestDatasetCollection`). + 3. An OpenClimateGIS field object (use :class:`~Field` or :class:`~ocgis.RequestDatasetCollection`). If a :class:`~ocgis.Field` object is used, be aware operations may modify the object inplace. -.. note:: Only applies for pure subsetting for limiting computations use `time_range` and/or `time_region`. +.. autoclass:: ocgis.RequestDataset + :members: inspect, inspect_as_dct -====================== =========================================================================== -Value Description -====================== =========================================================================== -``True`` Return only the first time point and the first level slice (if applicable). -``False`` (default) Return all data. -====================== =========================================================================== +.. autoclass:: ocgis.RequestDatasetCollection + :members: update + +dir_output +~~~~~~~~~~ + +This sets the output folder for any disk formats. If this is ``None`` and ``env.DIR_OUTPUT`` is ``None``, then output will be written to the current working directory. + +.. _geom: + +geom +~~~~ + +.. warning:: Subsetting with multiple geometries to netCDF will result in :ref:`agg_selection` being set to ``True``. Indexing multiple geometries using netCDF-CF convention is currently not possible. + +If a geometry(s) is provided, it is used to subset `every` :class:`ocgis.RequestDataset` object. Supplying a value of ``None`` (the default) results in the return of the entire spatial domain. Any shapefiles used for subsetting must include a unique integer attribute matching the value of :attr:`ocgis.constants.ocgis_unique_geometry_identifier` and have a WGS84 latitude/longitude geographic coordinate system. If an ESRI Shapefile is being accessed and the file does not contain the unique identifier, the function :func:`~ocgis.util.helpers.add_shapefile_unique_identifier` may be used to add the appropriate unique identifier attribute. + +There are a number of ways to parameterize the ``geom`` keyword argument: + +1. Bounding Box + +This is a list of floats corresponding to: `[min x, min y, max x, max y]`. The coordinates should be WGS84 geographic. + +>>> geom = [-120.4, 30.0, -110.3, 41.4] + +2. Point + +This is a list of floats corresponding to: `[longitude,latitude]`. The coordinates should be WGS84 geographic. For point geometries, the geometry is actually buffered by `search_radius_mult` * (data resolution). Hence, output geometries are in fact polygons. + +>>> geom = [-120.4,36.5] + +3. Using :class:`ocgis.ShpCabinetIterator` + +>>> from ocgis import ShpCabinetIterator +>>> geom = ShpCabinetIterator('state_boundaries',select_ugid=[16]) + +.. _geom key: + +4. Using a :class:`ocgis.ShpCabinet` Key + +>>> geom = 'state_boundaries' + +5. Custom Sequence of Shapely Geometry Dictionaries + +The `crs` key is optional. If it is not included, WGS84 is assumed. The `properties` key is also optional. If not 'UGID' property is provided, defaults will be inserted. + +>>> geom = [{'geom': Point(x,y), 'properties': {'UGID': 23, 'NAME': 'geometry23'}, 'crs': CoordinateReferenceSystem(epsg=4326)} ,...] + +6. Path to a Shapefile + +>>> geom = '/path/to/shapefile.shp' + +headers +~~~~~~~ + +Useful to limit the number of attributes included in an output file. + +>>> headers = ['did','time','value'] + +interpolate_spatial_bounds +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If ``True``, attempt to interpolate bounds coordinates if they are absent. This will also extrapolate exterior bounds to avoid losing spatial coverage. + +melted +~~~~~~ + +If ``False``, variable names will be individual column headers (non-melted). If ``True``, variable names will be placed into a single column. + +A non-melted format: + +==== ==== ====== +TIME TAS TASMAX +==== ==== ====== +1 30.3 40.3 +2 32.2 41.7 +3 31.7 40.9 +==== ==== ====== + +A melted format: + +==== ====== ===== +TIME NAME VALUE +==== ====== ===== +1 TAS 30.3 +2 TAS 32.2 +3 TAS 31.7 +1 TASMAX 40.3 +2 TASMAX 41.7 +3 TASMAX 40.9 +==== ====== ===== + +output_crs +~~~~~~~~~~ + +By default, the coordinate reference system (CRS) is the CRS of the input :class:`ocgis.RequestDataset` object. If multiple :class:`ocgis.RequestDataset` objects are part of an :class:`ocgis.OcgOperations` call, then ``output_crs`` must be provided if the input CRS values of the :class:`ocgis.RequestDataset` objects differ. The value for ``output_crs`` is an instance of :class:`ocgis.crs.CoordinateReferenceSystem`. + +>>> import ocgis +>>> output_crs = ocgis.crs.CFWGS84() .. _output_format_headline: @@ -268,19 +312,33 @@ Value Description ``'nc-ugrid-2d-flexible-mesh'`` A flexible mesh representation. See :ref:`2d-flexible-mesh-label` for more details and :ref:`2d-flexible-mesh-example-label` for an example. =============================== ============================================================================================================================================ -.. _agg_selection: +regrid_destination +~~~~~~~~~~~~~~~~~~ -agg_selection -~~~~~~~~~~~~~ +Please see :ref:`esmpy-regridding` for an overview and limitations. -=================== =============================================== -Value Description -=================== =============================================== -``True`` Aggregate (union) `geom`_ to a single geometry. -``False`` (default) Leave `geom`_ as is. -=================== =============================================== +If provided, all :class:`~ocgis.RequestDataset` objects in ``dataset`` will be regridded to match the grid provided in the argument’s object. This argument may be one of three types: :class:`~ocgis.RequestDataset`, :class:`~ocgis.interface.base.dimension.spatial.SpatialDimension`, or :class:`~ocgis.interface.base.field.Field`. -The purpose of this data manipulation is to ease the method required to aggregate (union) geometries into arbitrary regions. A simple example would be unioning the U.S. state boundaries of Utah, Nevada, Arizona, and New Mexico into a single polygon representing a "Southwestern Region". +>>> regrid_destination = ocgis.RequestDataset(uri='/path/to/destination.nc') + +regrid_options +~~~~~~~~~~~~~~ + +A dictionary with regridding options. Please see the documentation for :meth:`~ocgis.regrid.base.iter_regridded_fields`. Dictionary elements of ``regrid_options`` correspond to the keyword arguments of this function. + +>>> regrid_options = {'with_value': True} + +.. _search_radius_mult key: + +search_radius_mult +~~~~~~~~~~~~~~~~~~ + +This is a scalar float value multiplied by the target data's resolution to determine the buffer radius for the point. The default is ``2.0``. + +select_nearest +~~~~~~~~~~~~~~ + +If ``True``, the nearest geometry to the centroid of the current selection geometry is returned. This is useful when subsetting by a point, and it is preferred to not return all geometries within the selection radius. .. _select_ugid: @@ -309,57 +367,31 @@ If the goal is to subset the data by the boundary of "Basin A" and write the res >>> rd = ocgis.RequestDataset(uri='/path/to/data.nc',variable='tas') >>> path = ocgis.OcgOperations(dataset=rd,geom='basins',select_ugid=[1],output_format='nc').execute() -vector_wrap -~~~~~~~~~~~ - -.. note:: Only applicable for WGS84 spatial references. - -================= ==================================================================================================== -Value Description -================= ==================================================================================================== -`True` (default) For vector geometry outputs (e.g. `shp`,`keyed`) , ensure output longitudinal domain is -180 to 180. -`False` Maintain the :class:`~ocgis.RequestDataset`'s longitudinal domain. -================= ==================================================================================================== - -add_auxiliary_files -~~~~~~~~~~~~~~~~~~~ - -If ``True``, create a new directory and add metadata and other informational files in addition to the converted file. If ``False``, write the target file only to :attr:`dir_output` and do not create a new directory. - -allow_empty -~~~~~~~~~~~ - -================= ==================================================================================================== -Value Description -================= ==================================================================================================== -`True` Allow the empty set for geometries not geographically coincident with a source geometry. -`False` (default) Raise :class:`~ocgis.exc.EmptyDataNotAllowed` if the empty set is encountered. -================= ==================================================================================================== +.. _snippet_headline: -headers +snippet ~~~~~~~ -Useful to limit the number of attributes included in an output file. - ->>> headers = ['did','time','value'] - -interpolate_spatial_bounds -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -If ``True``, attempt to interpolate bounds coordinates if they are absent. This will also extrapolate exterior bounds to avoid losing spatial coverage. - -callback -~~~~~~~~ +.. note:: The entire spatial domain is returned unless `geom` is specified. -A callback function that may be used for custom messaging. This function integrates with the log handler and will receive messages at or above the :attr:`logging.INFO` level. +.. note:: Only applies for pure subsetting for limiting computations use `time_range` and/or `time_region`. ->>> def callback(percent,message): ->>> print(percent,message) +====================== =========================================================================== +Value Description +====================== =========================================================================== +``True`` Return only the first time point and the first level slice (if applicable). +``False`` (default) Return all data. +====================== =========================================================================== -conform_units_to -~~~~~~~~~~~~~~~~ +spatial_operation +~~~~~~~~~~~~~~~~~ -Destination units for conversion. If this parameter is set, then the :mod:`cfunits` module must be installed. Setting this parameter will override conformed units set on ``dataset`` objects. +======================== ============================================================================================================================================= +Value Description +======================== ============================================================================================================================================= +``intersects`` (default) Source geometries touching or overlapping selection geometries are returned (see :ref:`appendix-intersects`). +``clip`` A full geometric intersection is performed between source and selection geometries. New geometries may be created. (see :ref:`appendix-clip`) +======================== ============================================================================================================================================= time_range ~~~~~~~~~~ @@ -374,21 +406,17 @@ A dictionary with keys of 'month' and/or 'year' and values as sequences correspo >>> time_region = {'month':[6,7],'year':[2010,2011]} >>> time_region = {'year':[2010]} -regrid_destination -~~~~~~~~~~~~~~~~~~ - -Please see :ref:`esmpy-regridding` for an overview and limitations. - -If provided, all :class:`~ocgis.RequestDataset` objects in ``dataset`` will be regridded to match the grid provided in the argument’s object. This argument may be one of three types: :class:`~ocgis.RequestDataset`, :class:`~ocgis.interface.base.dimension.spatial.SpatialDimension`, or :class:`~ocgis.interface.base.field.Field`. - ->>> regrid_destination = ocgis.RequestDataset(uri='/path/to/destination.nc') - -regrid_options -~~~~~~~~~~~~~~ +vector_wrap +~~~~~~~~~~~ -A dictionary with regridding options. Please see the documentation for :meth:`~ocgis.regrid.base.iter_regridded_fields`. Dictionary elements of ``regrid_options`` correspond to the keyword arguments of this function. +.. note:: Only applicable for WGS84 spatial references. ->>> regrid_options = {'with_value': True} +================= ==================================================================================================== +Value Description +================= ==================================================================================================== +`True` (default) For vector geometry outputs (e.g. `shp`,`keyed`) , ensure output longitudinal domain is -180 to 180. +`False` Maintain the :class:`~ocgis.RequestDataset`'s longitudinal domain. +================= ==================================================================================================== :class:`ocgis.ShpCabinet` ========================= diff --git a/setup.py b/setup.py index ea8cbe95f..32e66909e 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ import tempfile -VERSION = '1.0.1-next' +VERSION = '1.1.0' ######################################################################################################################## @@ -134,8 +134,7 @@ def nc4p_make(): python_version = float(sys.version_info[0]) + float(sys.version_info[1]) / 10 if python_version != 2.7: - raise (ImportError( - 'This software requires Python version 2.7.x. You have {0}.x'.format(python_version))) + raise (ImportError('This software requires Python version 2.7.x. You have {0}.x'.format(python_version))) ######################################################################################################################## diff --git a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py index 917ff0978..737bbc97c 100644 --- a/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py +++ b/src/ocgis/test/test_ocgis/test_contrib/test_library_icclim.py @@ -7,6 +7,8 @@ from datetime import datetime from unittest import SkipTest +from icclim.percentile_dict import get_percentile_dict + from ocgis.calc.temporal_groups import SeasonalTemporalGroup from ocgis.interface.base.dimension.temporal import TemporalDimension from ocgis.calc.base import AbstractParameterizedFunction @@ -84,6 +86,20 @@ def test_calc_argument_to_operations(self): self.assertEqual(len(calc.value), 1) self.assertEqual(calc.value[0]['ref'], IcclimTG) + def test_get_percentile_dict(self): + start = 25567.5 + dts = np.arange(start, start + 1000, step=1.0) + value = np.random.rand(dts.shape[0], 1, 5) + value = np.ma.array(value, mask=False) + calendar = 'gregorian' + units = 'days since 2006-1-1' + dts = TemporalDimension(value=dts, calendar=calendar, units=units).value_datetime + percentile = 10 + window_width = 5 + only_leap_years = False + prd = get_percentile_dict(value, dts, percentile, window_width, only_leap_years=only_leap_years) + self.assertEqual(len(prd), 366) + @attr('slow') def test_icclim_combinatorial(self): shapes = ([('month',), 12], [('month', 'year'), 24], [('year',), 2]) From d51df58385a36ce50f26e29b3cbd397bcf507dc5 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 6 Feb 2015 09:31:41 -0700 Subject: [PATCH 68/71] add specific push for documentation branch --- sh/build_documentation.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sh/build_documentation.sh b/sh/build_documentation.sh index 394dfe524..915dd806e 100644 --- a/sh/build_documentation.sh +++ b/sh/build_documentation.sh @@ -1,3 +1,5 @@ +#!/bin/sh + # 1. commit any changes # 2. git checkout desired doc branch # 3. cd to doc directory @@ -16,5 +18,6 @@ cp -r $TDIR/* . git commit -a -m 'doc changes' git checkout - -git push +git push origin gh-pages + From 3511e98696f8ec17779e8a0d184a59dd78ccb795 Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 6 Feb 2015 09:36:53 -0700 Subject: [PATCH 69/71] minor - Changed version number in ocgis/__init__.py - Removed extra spaces in UGRID example. --- doc/sphinx_examples/to_ugrid_2d_flexible_mesh.py | 3 +-- src/ocgis/__init__.py | 5 ++--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/doc/sphinx_examples/to_ugrid_2d_flexible_mesh.py b/doc/sphinx_examples/to_ugrid_2d_flexible_mesh.py index fe7de70a6..11bf1de36 100644 --- a/doc/sphinx_examples/to_ugrid_2d_flexible_mesh.py +++ b/doc/sphinx_examples/to_ugrid_2d_flexible_mesh.py @@ -4,7 +4,6 @@ from ocgis.constants import OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH - # This is the input shapefile with no gaps between the polygons. Multipolygons not allowed! SHP = '/path/to/no_gaps/shapefile.shp' # Write the data to a temporary directory. @@ -13,4 +12,4 @@ rd = ocgis.RequestDataset(uri=SHP) ops = ocgis.OcgOperations(dataset=rd, output_format=OUTPUT_FORMAT_NETCDF_UGRID_2D_FLEXIBLE_MESH) -ret = ops.execute() \ No newline at end of file +ret = ops.execute() diff --git a/src/ocgis/__init__.py b/src/ocgis/__init__.py index 9bf96d6ff..ff9244e7b 100644 --- a/src/ocgis/__init__.py +++ b/src/ocgis/__init__.py @@ -1,7 +1,6 @@ from osgeo import ogr, osr from ocgis.util.environment import env - from ocgis.api.collection import SpatialCollection from ocgis.api.operations import OcgOperations from ocgis.api.request.base import RequestDataset, RequestDatasetCollection @@ -19,8 +18,8 @@ from ocgis.interface.base.dimension.base import VectorDimension -__version__ = '1.0.1' -__release__ = '1.0.1-next' +__version__ = '1.1.0' +__release__ = '1.1.0' # tell ogr/osr to raise exceptions From 3c7c957df7a8daacf144bf2a6b03355b46bfe0dc Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Fri, 6 Feb 2015 11:26:48 -0700 Subject: [PATCH 70/71] updated license --- LICENSE.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE.txt b/LICENSE.txt index fe60dde9e..85d39bdc1 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,6 +1,6 @@ OpenClimateGIS (OCGIS) -Copyright 2012-2014: University of Michigan-Ann Arbor, Michigan Technological +Copyright 2012-2015: University of Michigan-Ann Arbor, Michigan Technological University, University of Colorado-Boulder Licensed under the University of Illinois-NCSA License. From feafc339448e6ba2e75fe59770a4d5e52a57c52a Mon Sep 17 00:00:00 2001 From: Ben Koziol Date: Mon, 9 Feb 2015 10:46:27 -0700 Subject: [PATCH 71/71] support unmelted tabular output format #353 Added UGID to collection iterator. --- src/ocgis/api/collection.py | 1 - src/ocgis/conv/meta.py | 64 ++++++++++--------- .../test_ocgis/test_api/test_collection.py | 7 +- .../test/test_ocgis/test_conv/test_csv_.py | 35 +++++++++- src/ocgis/test/test_simple/test_simple.py | 6 +- 5 files changed, 76 insertions(+), 37 deletions(-) diff --git a/src/ocgis/api/collection.py b/src/ocgis/api/collection.py index 46d4be568..9a44fbb75 100644 --- a/src/ocgis/api/collection.py +++ b/src/ocgis/api/collection.py @@ -156,7 +156,6 @@ def get_iter_dict(self, use_upper_keys=False, conversion_map=None, melted=False) else: use_conversion = True for ugid, field_dict in self.iteritems(): - ugid = ugid if melted else None for field in field_dict.itervalues(): for yld_geom, row in field.get_iter(value_keys=self.value_keys, melted=melted, use_upper_keys=use_upper_keys, headers=self.headers, ugid=ugid): diff --git a/src/ocgis/conv/meta.py b/src/ocgis/conv/meta.py index 96eb61d06..e6e75ac21 100644 --- a/src/ocgis/conv/meta.py +++ b/src/ocgis/conv/meta.py @@ -1,67 +1,69 @@ import datetime + import ocgis from ocgis.api.parms.base import OcgParameter from ocgis.util.justify import justify_row HEADERS = { -'ugid':'User geometry identifier pulled from a provided set of selection geometries. Reduces to "1" for the case of no provided geometry.', -'gid':'Geometry identifier assigned by OpenClimateGIS to a dataset geometry. In the case of "aggregate=True" this is equivalent to "UGID".', -'tid':'Unique time identifier.', -'tgid':'Unique grouped time identifier.', -'vid':'Unique variable identifier.', -'lid':'Level identifier unique within a variable.', -'vlid':'Globally unique level identifier.', -'variable':'Name of request variable.', -'calc_name':'User-supplied name for a calculation.', -'level':'Level name.', -'time':'Time string.', -'year':'Year extracted from time string.', -'month':'Month extracted from time string.', -'day':'Day extracted from time string.', -'cid':'Unique identifier for a calculation name.', -'value':'Value associated with a variable or calculation.', -'did':'Dataset identifier see *_did.csv file for additional information on dataset requests.', -'uri':'Path to input data at execution time.', -'alias':'Dataset alias - may be equivalent to VARIABLE.' + 'ugid': 'User geometry identifier pulled from a provided set of selection geometries. Reduces to "1" for the case of no provided geometry.', + 'gid': 'Geometry identifier assigned by OpenClimateGIS to a dataset geometry. In the case of "aggregate=True" this is equivalent to "UGID".', + 'tid': 'Unique time identifier.', + 'vid': 'Unique variable identifier.', + 'lid': 'Level identifier unique within a variable.', + 'name': 'Name of the requested variable.', + 'calc_alias': 'User-supplied name for a calculation.', + 'calc_key': 'The unique key name assigned to a calculation.', + 'level': 'Level name.', + 'time': 'Time string.', + 'year': 'Year extracted from time string.', + 'month': 'Month extracted from time string.', + 'day': 'Day extracted from time string.', + 'cid': 'Unique identifier for a calculation name.', + 'value': 'Value associated with a variable or calculation.', + 'did': 'Dataset identifier see *_did.csv file for additional information on dataset requests.', + 'uri': 'Path to input data at execution time.', + 'alias': 'If not assigned, this will be the same as the variable name.' } class MetaConverter(object): _meta_filename = 'metadata.txt' - - def __init__(self,ops): + + def __init__(self, ops): self.ops = ops - + def get_rows(self): lines = ['OpenClimateGIS v{0} Metadata File'.format(ocgis.__release__)] lines.append(' Generated (UTC): {0}'.format(datetime.datetime.utcnow())) lines.append('') if self.ops.output_format != 'meta': - lines.append('This is OpenClimateGIS-related metadata. Data-level metadata may be found in the file named: {0}'.format(self.ops.prefix+'_source_metadata.txt')) + lines.append( + 'This is OpenClimateGIS-related metadata. Data-level metadata may be found in the file named: {0}'.format( + self.ops.prefix + '_source_metadata.txt')) lines.append('') lines.append('== Potential Header Names with Definitions ==') lines.append('') sh = sorted(HEADERS) for key in sh: - msg = ' {0} :: {1}'.format(key.upper(),'\n'.join(justify_row(HEADERS[key]))).replace(':: ',':: ') + msg = ' {0} :: {1}'.format(key.upper(), '\n'.join(justify_row(HEADERS[key]))).replace(':: ', ':: ') lines.append(msg) lines.append('') lines.append('== Argument Definitions and Content Descriptions ==') lines.append('') for v in sorted(self.ops.__dict__.itervalues()): - if isinstance(v,OcgParameter): + if isinstance(v, OcgParameter): lines.append(v.get_meta()) - - ## collapse lists + + # collapse lists ret = [] for line in lines: - if not isinstance(line,basestring): + if not isinstance(line, basestring): for item in line: ret.append(item) else: ret.append(line) - return(ret) + return ret @classmethod def validate_ops(cls, ops): @@ -72,6 +74,6 @@ def validate_ops(cls, ops): :type ops: :class:`ocgis.OcgOperations` :raises: DefinitionValidationError """ - + def write(self): - return('\n'.join(self.get_rows())) + return '\n'.join(self.get_rows()) diff --git a/src/ocgis/test/test_ocgis/test_api/test_collection.py b/src/ocgis/test/test_ocgis/test_api/test_collection.py index af21c116c..192796e92 100644 --- a/src/ocgis/test/test_ocgis/test_api/test_collection.py +++ b/src/ocgis/test/test_ocgis/test_api/test_collection.py @@ -2,13 +2,13 @@ import os from copy import copy, deepcopy import numpy as np +import datetime import fiona from shapely.geometry import Point, shape, MultiPoint from shapely.geometry.base import BaseGeometry from shapely.geometry.multipolygon import MultiPolygon -import datetime from ocgis.api.collection import SpatialCollection, AbstractCollection from ocgis.interface.base.crs import CoordinateReferenceSystem, Spherical from ocgis.test.base import TestBase @@ -253,6 +253,11 @@ def test_get_iter_dict(self): self.assertIsInstance(row[1], OrderedDict) self.assertEqual(row[1].keys(), headers) + # test ugid always in dictionary + coll = field.as_spatial_collection() + row = coll.get_iter_dict(melted=False).next()[1] + self.assertEqual(row[constants.HEADERS.ID_SELECTION_GEOMETRY], 1) + def test_get_iter_melted(self): sp = self.get_collection() for row in sp.get_iter_melted(): diff --git a/src/ocgis/test/test_ocgis/test_conv/test_csv_.py b/src/ocgis/test/test_ocgis/test_conv/test_csv_.py index 6b9ed8649..b18b9cf48 100644 --- a/src/ocgis/test/test_ocgis/test_conv/test_csv_.py +++ b/src/ocgis/test/test_ocgis/test_conv/test_csv_.py @@ -1,6 +1,9 @@ +from csv import DictReader import os import tempfile +from ocgis import constants + from ocgis.conv.csv_ import CsvShapefileConverter, CsvConverter from ocgis import OcgOperations, RequestDataset from ocgis.api.subset import SubsetOperation @@ -9,7 +12,37 @@ class TestCsvConverter(AbstractTestConverter): - pass + def get(self, kwargs_conv=None, kwargs_ops=None): + rd = self.test_data.get_rd('cancm4_tas') + + kwds_ops = Dict(dataset=rd, geom='state_boundaries', select_ugid=[15, 18], snippet=True) + if kwargs_ops is not None: + kwds_ops.update(kwargs_ops) + + ops = OcgOperations(**kwds_ops) + so = SubsetOperation(ops) + + kwds_conv = Dict() + kwds_conv.outdir = self.current_dir_output + kwds_conv.prefix = 'foo' + kwds_conv.ops = ops + if kwargs_conv is not None: + kwds_conv.update(kwargs_conv) + + conv = CsvConverter(so, **kwds_conv) + + return conv + + def test_write(self): + conv = self.get() + self.assertFalse(conv.melted) + ret = conv.write() + ugids = [] + with open(ret) as f: + reader = DictReader(f) + for row in reader: + ugids.append(row[constants.OCGIS_UNIQUE_GEOMETRY_IDENTIFIER]) + self.assertAsSetEqual(['15', '18'], ugids) class TestCsvShpConverter(AbstractTestConverter): diff --git a/src/ocgis/test/test_simple/test_simple.py b/src/ocgis/test/test_simple/test_simple.py index 66bf63320..379e517bd 100644 --- a/src/ocgis/test/test_simple/test_simple.py +++ b/src/ocgis/test/test_simple/test_simple.py @@ -9,6 +9,7 @@ from csv import DictReader import tempfile import numpy as np +import datetime from fiona.crs import from_string from osgeo.osr import SpatialReference @@ -18,7 +19,6 @@ from shapely.geometry.geo import mapping from shapely import wkt -import datetime from ocgis.api.operations import OcgOperations from ocgis.api.interpreter import OcgInterpreter from ocgis.api.parms.definition import SpatialOperation @@ -1057,7 +1057,7 @@ def test_csv_conversion(self): else: actual = {'LID': '1', 'LB_LEVEL': '0', 'LEVEL': '50', 'TIME': '2000-03-01 12:00:00', 'MONTH': '3', 'UB_LEVEL': '100', 'LB_TIME': '2000-03-01 00:00:00', 'YEAR': '2000', 'TID': '1', - 'FOO': '1.0', 'UB_TIME': '2000-03-02 00:00:00', 'DAY': '1'} + 'FOO': '1.0', 'UB_TIME': '2000-03-02 00:00:00', 'DAY': '1', 'UGID': '1'} self.assertDictEqual(row, actual) did_file = os.path.join(output_dir, ops.prefix + '_did.csv') @@ -1097,7 +1097,7 @@ def test_csv_calc_conversion(self): else: actual = {'LID': '1', 'LB_LEVEL': '0', 'LEVEL': '50', 'TIME': '2000-03-16 00:00:00', 'MONTH': '3', 'MY_MEAN': '1.0', 'UB_LEVEL': '100', 'LB_TIME': '2000-03-01 00:00:00', 'YEAR': '2000', - 'TID': '1', 'UB_TIME': '2000-04-01 00:00:00', 'DAY': '16'} + 'TID': '1', 'UB_TIME': '2000-04-01 00:00:00', 'DAY': '16', 'UGID': '1'} self.assertDictEqual(row, actual) def test_csv_calc_conversion_two_calculations(self):