From 369618bec14bfc231f8bfcc2511c3277be09fe8a Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Nov 2016 15:52:50 +0000 Subject: [PATCH 1/4] Refactor to slightly tidy test code. --- .../integration/fast_load/test_fast_load.py | 100 ++++++++++-------- 1 file changed, 53 insertions(+), 47 deletions(-) diff --git a/lib/iris/tests/integration/fast_load/test_fast_load.py b/lib/iris/tests/integration/fast_load/test_fast_load.py index efac74014f..cd48688e32 100644 --- a/lib/iris/tests/integration/fast_load/test_fast_load.py +++ b/lib/iris/tests/integration/fast_load/test_fast_load.py @@ -50,8 +50,7 @@ class Mixin_FieldTest(object): def setUp(self): # Create a private temporary directory. self.temp_dir_path = tempfile.mkdtemp() - # Note: these are used to keep the files in a definite order, - # otherwise random filenames --> random load results !! + # Initialise temporary filename generation. self.tempfile_count = 0 self.tempfile_path_fmt = \ '{dir_path}/tempfile_{prefix}_{file_number:06d}{suffix}' @@ -61,6 +60,7 @@ def tearDown(self): shutil.rmtree(self.temp_dir_path) def temp_filepath(self, user_name='', suffix='.pp'): + # Return the filepath for a new temporary file. self.tempfile_count += 1 file_path = self.tempfile_path_fmt.format( dir_path=self.temp_dir_path, @@ -69,7 +69,15 @@ def temp_filepath(self, user_name='', suffix='.pp'): suffix=suffix) return file_path + def save_fieldcubes(self, cubes, basename=''): + # Save cubes to a temporary file, and return its filepath. + file_path = self.temp_filepath(user_name=basename, suffix='.pp') + iris.save(cubes, file_path) + return file_path + def load_function(self, *args, **kwargs): + # Return data from "iris.load", using either 'normal' or 'fast' method + # as selected by the test class. if self.load_type == 'iris': return iris_load(*args, **kwargs) elif self.load_type == 'fast': @@ -85,11 +93,6 @@ def load_function(self, *args, **kwargs): # NOTE: in order to write/readback as identical, these test phenomena # settings also provide the canonical unit and a matching STASH attribute. # These could in principle be looked up, but it's a bit awkward. - phenomena = [('air_temperature', 'K'), - ('air_density', 'kg m-3'), - ('air_pressure', 'm s-1'), - ('wind_speed', 'm s-1'), - ] phenomena = [ ('air_temperature', 'K', 'm01s01i004'), ('x_wind', 'm s-1', 'm01s00i002'), @@ -98,23 +101,27 @@ def load_function(self, *args, **kwargs): ] def fields(self, c_t=None, cft=None, ctp=None, - c_h=None, c_p=None, mmm=None, phn=0): + c_h=None, c_p=None, phn=0, mmm=None): # Return a list of 2d cubes representing raw PPFields, from args # specifying sequences of (scalar) coordinate values. # TODO? : add bounds somehow ? # # Arguments 'c' are either a single int value, making a scalar - # coord, or a string of characters 0-9 (value) or '-' (missing). - # - # Argument 'mmm' denotes existence (or not) of a cell method of type - # 'average' or 'min' or 'max' (values '012' respectively), applying to - # the time values -- ultimately, this controls LBTIM. + # coord, or a string of characters : '0'-'9' (index) or '-' (missing). + # The indexes select point values from fixed list of possibles. # # Argument 'c_h' and 'c_p' represent height or pressure values, so # ought to be mutually exclusive -- these control LBVC. # # Argument 'phn' indexes phenomenon types. + # + # Argument 'mmm' denotes existence (or not) of a cell method of type + # 'average' or 'min' or 'max' (values '012' respectively), applying to + # the time values -- ultimately, this controls LBTIM. + + # Get the number of result cubes, defined by the 'longest' arg. def arglen(arg): + # Get the 'length' of a control argument. if arg is None: result = 0 elif isinstance(arg, six.string_types): @@ -126,8 +133,19 @@ def arglen(arg): n_flds = max(arglen(x) for x in (c_t, cft, ctp, c_h, c_p, mmm)) - def arg_inds(arg): - # Return an argument decoded as an array of n_flds integers. + # Make basic anonymous test cubes. + ny, nx = 3, 5 + data = np.arange(n_flds * ny * nx, dtype=np.float32) + data = data.reshape((n_flds, ny, nx)) + cubes = [Cube(data[i]) for i in range(n_flds)] + + # Apply phenomena definitions. + def arg_vals(arg, vals): + # Decode an argument to a list of 'n_flds' coordinate point values. + # (or 'None' where missing) + + # First get a list of value indices from the argument. + # Can be: a single index value; a list of indices; or a string. if (isinstance(arg, Iterable) and not isinstance(arg, six.string_types)): # Can also just pass a simple iterable of values. @@ -142,32 +160,13 @@ def arg_inds(arg): assert isinstance(arg, six.string_types) inds = [None if char == '-' else int(char) for char in arg] - return inds - - def arg_vals(arg, vals): - return [None if ind is None else vals[int(ind)] - for ind in arg_inds(arg)] - - def arg_coords(arg, name, unit, vals=None): - if vals is None: - vals = np.arange(n_flds + 2) # Note allowance - vals = arg_vals(arg, vals) - coords = [None if val is None else DimCoord([val], units=unit) - for val in vals] - # Apply names separately, as 'pressure' is not a standard name. - for coord in coords: - if coord: - coord.rename(name) - return coords - ny, nx = 3, 5 - data = np.arange(n_flds * ny * nx, dtype=np.float32) - data = data.reshape((n_flds, ny, nx)) + # Convert indices to selected point values. + values = [None if ind is None else vals[int(ind)] + for ind in inds] - # Make basic anonymous test cubes. - cubes = [Cube(data[i]) for i in range(n_flds)] + return values - # Apply phenomena definitions. phenomena = arg_vals(phn, self.phenomena) for cube, (name, units, stash) in zip(cubes, phenomena): cube.rename(name) @@ -192,27 +191,34 @@ def arg_coords(arg, name, unit, vals=None): cube.add_dim_coord(co_y, 0) cube.add_dim_coord(co_x, 1) - # Add multiple scalar coordinates as requested. + # Add multiple scalar coordinates as defined by the arguments. + def arg_coords(arg, name, unit, vals=None): + # Decode an argument to a list of scalar coordinates. + if vals is None: + vals = np.arange(n_flds + 2) # Note allowance + vals = arg_vals(arg, vals) + coords = [None if val is None else DimCoord([val], units=unit) + for val in vals] + # Apply names separately, as 'pressure' is not a standard name. + for coord in coords: + if coord: + coord.rename(name) + return coords + def add_arg_coords(arg, name, unit, vals=None): + # Add scalar coordinates to each cube, for one argument. coords = arg_coords(arg, name, unit, vals) for cube, coord in zip(cubes, coords): if coord: cube.add_aux_coord(coord) -# ? DON'T have a model_level_number coord ? -# add_arg_coords(np.arange(1, n_flds+1), 'model_level_number', '1') - add_arg_coords(c_t, 'time', self.time_unit, self.time_values) add_arg_coords(cft, 'forecast_reference_time', self.time_unit) add_arg_coords(ctp, 'forecast_period', 'hours', self.time_values) add_arg_coords(c_h, 'height', 'm', self.height_values) add_arg_coords(c_p, 'pressure', 'hPa', self.pressure_values) - return cubes - def save_fieldcubes(self, cubes, basename=''): - file_path = self.temp_filepath(user_name=basename, suffix='.pp') - iris.save(cubes, file_path) - return file_path + return cubes class MixinBasic(Mixin_FieldTest): From c7659951adad5d71e3c9904f902d075dab582dfd Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Nov 2016 18:22:35 +0000 Subject: [PATCH 2/4] Add tests for callbacks and constraints. --- .../integration/fast_load/test_fast_load.py | 81 ++++++++++++++++++- 1 file changed, 77 insertions(+), 4 deletions(-) diff --git a/lib/iris/tests/integration/fast_load/test_fast_load.py b/lib/iris/tests/integration/fast_load/test_fast_load.py index cd48688e32..389681f6e8 100644 --- a/lib/iris/tests/integration/fast_load/test_fast_load.py +++ b/lib/iris/tests/integration/fast_load/test_fast_load.py @@ -38,6 +38,7 @@ from iris.coords import DimCoord, AuxCoord from iris.coord_systems import GeogCS from iris.cube import Cube, CubeList +from iris.exceptions import IgnoreCubeException from iris.fileformats.pp import EARTH_RADIUS, STASH from iris import load as iris_load @@ -203,6 +204,9 @@ def arg_coords(arg, name, unit, vals=None): for coord in coords: if coord: coord.rename(name) + # Also fix heights to match what comes from a PP file. + if name == 'height': + coord.attributes['positive'] = 'up' return coords def add_arg_coords(arg, name, unit, vals=None): @@ -288,6 +292,7 @@ def test_FAIL_phenomena_nostash(self): self.assertEqual(results, expected) def test_cross_file_concatenate(self): + # Combine vector dimensions (i.e. concatenate) across multiple files. per_file_cubes = [self.fields(c_t=times) for times in ('12', '34')] files = [self.save_fieldcubes(flds) @@ -299,6 +304,8 @@ def test_cross_file_concatenate(self): self.assertEqual(results, expected) def test_FAIL_scalar_vector_concatenate(self): + # Structured load can produce a scalar coordinate from one file, and a + # matching vector one from another file, but these won't "combine". # We'd really like to fix this one... single_timepoint_fld, = self.fields(c_t='1') multi_timepoint_flds = self.fields(c_t='23') @@ -313,15 +320,81 @@ def test_FAIL_scalar_vector_concatenate(self): expected = CubeList(multi_timepoint_flds + [single_timepoint_fld]).merge() else: + # This is what we ACTUALLY get at present. + # It can't combine the scalar and vector time coords. + expected = CubeList([CubeList(multi_timepoint_flds).merge_cube(), + single_timepoint_fld]) # NOTE: in this case, we need to sort the results to ensure a # repeatable ordering, because ??somehow?? the random temporary # directory name affects the ordering of the cubes in the result ! results = CubeList(sorted(results, key=lambda cube: cube.shape)) - # This is what we ACTUALLY get at present. - # It can't combine the scalar and vector time coords. - expected = CubeList([CubeList(multi_timepoint_flds).merge_cube(), - single_timepoint_fld]) + + self.assertEqual(results, expected) + + def test_stash_constraint(self): + # Check that an attribute constraint functions correctly. + # Note: this is a special case in "fileformats.pp". + flds = self.fields(c_t='1122', phn='0101') + file = self.save_fieldcubes(flds) + airtemp_flds = [fld for fld in flds + if fld.name() == 'air_temperature'] + stash_attribute = airtemp_flds[0].attributes['STASH'] + results = self.load_function( + file, + iris.AttributeConstraint(STASH=stash_attribute)) + expected = CubeList(airtemp_flds).merge() + self.assertEqual(results, expected) + + def test_ordinary_constraint(self): + # Check that a 'normal' constraint functions correctly. + # Note: *should* be independent of structured loading. + flds = self.fields(c_h='0123') + file = self.save_fieldcubes(flds) + height_constraint = iris.Constraint( + height=lambda h: 150.0 < h < 350.0) + results = self.load_function(file, height_constraint) + expected = CubeList(flds[1:3]).merge() + self.assertEqual(results, expected) + + def test_callback(self): + # Use 2 timesteps each of (air-temp on height) and (rh on pressure). + flds = self.fields(c_t='0011', + phn='0303', + c_h='0-1-', + c_p='-2-3') + file = self.save_fieldcubes(flds) + + if self.load_type == 'iris': + def callback(cube, field, filename): + self.assertEqual(filename, file) + lbvc = field.lbvc + if lbvc == 1: + # reject the height level data (accept only pressure). + raise IgnoreCubeException() + else: + # Record the LBVC value. + cube.attributes['LBVC'] = lbvc + else: + def callback(cube, collation, filename): + self.assertEqual(filename, file) + lbvcs = [fld.lbvc + for fld in collation.fields] + if lbvcs[0] == 1: + # reject the height level data (accept only pressure). + raise IgnoreCubeException() + else: + # Record the LBVC values. + cube.attributes['A_LBVC'] = lbvcs + + results = self.load_function(file, callback=callback) + + # Make an 'expected' from selected fields, with the expected attribute. + expected = CubeList([flds[1], flds[3]]).merge() + if self.load_type == 'iris': + expected[0].attributes['LBVC'] = 8 + else: + expected[0].attributes['A_LBVC'] = [8, 8] self.assertEqual(results, expected) From be2f527bf8f4fca371cfec7d1226bef26e8df780 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Nov 2016 18:44:53 +0000 Subject: [PATCH 3/4] Add cell-method (i.e. LBPROC) test. --- .../integration/fast_load/test_fast_load.py | 28 +++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/lib/iris/tests/integration/fast_load/test_fast_load.py b/lib/iris/tests/integration/fast_load/test_fast_load.py index 389681f6e8..67a914941c 100644 --- a/lib/iris/tests/integration/fast_load/test_fast_load.py +++ b/lib/iris/tests/integration/fast_load/test_fast_load.py @@ -35,7 +35,7 @@ import six import iris.coords -from iris.coords import DimCoord, AuxCoord +from iris.coords import DimCoord, AuxCoord, CellMethod from iris.coord_systems import GeogCS from iris.cube import Cube, CubeList from iris.exceptions import IgnoreCubeException @@ -88,7 +88,7 @@ def load_function(self, *args, **kwargs): # Reference values for making coordinate contents. time_unit = 'hours since 1970-01-01' period_unit = 'hours' - time_values = 24.0 * np.arange(5) + time_values = 24.0 * np.arange(10) height_values = [100.0, 200.0, 300.0, 400.0] pressure_values = [300.0, 500.0, 850.0, 1000.0] # NOTE: in order to write/readback as identical, these test phenomena @@ -100,6 +100,14 @@ def load_function(self, *args, **kwargs): ('y_wind', 'm s-1', 'm01s00i003'), ('specific_humidity', 'kg kg-1', 'm01s00i010'), ] + cell_method_values = [ + CellMethod('mean', 'time'), + # NOTE: if you add an *interval* to any of these, it is not saved + # in the PP file (or loaded back again). + # Could be a PP save/load bug, but could be because no bounds ?? + CellMethod('maximum', 'time'), + CellMethod('minimum', 'time'), + ] def fields(self, c_t=None, cft=None, ctp=None, c_h=None, c_p=None, phn=0, mmm=None): @@ -222,6 +230,12 @@ def add_arg_coords(arg, name, unit, vals=None): add_arg_coords(c_h, 'height', 'm', self.height_values) add_arg_coords(c_p, 'pressure', 'hPa', self.pressure_values) + # Add cell methods as required. + methods = arg_vals(mmm, self.cell_method_values) + for cube, method in zip(cubes, methods): + if method: + cube.add_cell_method(method) + return cubes @@ -398,6 +412,16 @@ def callback(cube, collation, filename): self.assertEqual(results, expected) + def test_cell_methods(self): + # Check that cell methods (i.e. LBPROC values) produce distinct + # phenomena. + flds = self.fields(mmm='-01-01-01', c_t=range(9)) + file = self.save_fieldcubes(flds) + results = self.load_function(file) + expected = CubeList(CubeList(flds[i_start::3]).merge_cube() + for i_start in range(3)) + self.assertEqual(results, expected) + class TestBasicIris(MixinBasic, tests.IrisTest): load_type = 'iris' From b9634a5b5298e58b7e4fb34b24bd8a924d91130a Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 9 Nov 2016 07:32:52 +0000 Subject: [PATCH 4/4] Tests in 3 classes; test full load api; test dimensions. --- .../integration/fast_load/test_fast_load.py | 258 +++++++++++++++--- 1 file changed, 220 insertions(+), 38 deletions(-) diff --git a/lib/iris/tests/integration/fast_load/test_fast_load.py b/lib/iris/tests/integration/fast_load/test_fast_load.py index 67a914941c..6b1f3c63ec 100644 --- a/lib/iris/tests/integration/fast_load/test_fast_load.py +++ b/lib/iris/tests/integration/fast_load/test_fast_load.py @@ -41,7 +41,7 @@ from iris.exceptions import IgnoreCubeException from iris.fileformats.pp import EARTH_RADIUS, STASH -from iris import load as iris_load +import iris from iris.fileformats.um import structured_um_loading @@ -80,19 +80,45 @@ def load_function(self, *args, **kwargs): # Return data from "iris.load", using either 'normal' or 'fast' method # as selected by the test class. if self.load_type == 'iris': - return iris_load(*args, **kwargs) + return iris.load(*args, **kwargs) elif self.load_type == 'fast': with structured_um_loading(): - return iris_load(*args, **kwargs) + return iris.load(*args, **kwargs) + + def load_cube_function(self, *args, **kwargs): + # Return data from "iris.load_cube". + if self.load_type == 'iris': + return iris.load_cube(*args, **kwargs) + elif self.load_type == 'fast': + with structured_um_loading(): + return iris.load_cube(*args, **kwargs) + + def load_cubes_function(self, *args, **kwargs): + # Return data from "iris.load_cubes". + if self.load_type == 'iris': + return iris.load_cubes(*args, **kwargs) + elif self.load_type == 'fast': + with structured_um_loading(): + return iris.load_cubes(*args, **kwargs) + + def load_raw_function(self, *args, **kwargs): + # Return data from "iris.load_raw". + if self.load_type == 'iris': + return iris.load_raw(*args, **kwargs) + elif self.load_type == 'fast': + with structured_um_loading(): + return iris.load_raw(*args, **kwargs) # Reference values for making coordinate contents. time_unit = 'hours since 1970-01-01' period_unit = 'hours' time_values = 24.0 * np.arange(10) - height_values = [100.0, 200.0, 300.0, 400.0] - pressure_values = [300.0, 500.0, 850.0, 1000.0] - # NOTE: in order to write/readback as identical, these test phenomena - # settings also provide the canonical unit and a matching STASH attribute. + height_values = 100.0 * np.arange(1, 11) + pressure_values = [100.0, 150.0, 200.0, 250.0, 300.0, 500.0, 850.0, 1000.0] + + # Phenomenon test values. + # NOTE: in order to write/readback as identical, these include the + # canonical unit and a matching STASH attribute. # These could in principle be looked up, but it's a bit awkward. phenomena = [ ('air_temperature', 'K', 'm01s01i004'), @@ -100,11 +126,13 @@ def load_function(self, *args, **kwargs): ('y_wind', 'm s-1', 'm01s00i003'), ('specific_humidity', 'kg kg-1', 'm01s00i010'), ] + + # Cell method test values. + # NOTE: if you add an *interval* to any of these cell-methods, it is not + # saved into the PP file (?? or maybe not loaded back again ??). + # This could be a PP save/load bug, or maybe just because no bounds ?? cell_method_values = [ CellMethod('mean', 'time'), - # NOTE: if you add an *interval* to any of these, it is not saved - # in the PP file (or loaded back again). - # Could be a PP save/load bug, but could be because no bounds ?? CellMethod('maximum', 'time'), CellMethod('minimum', 'time'), ] @@ -240,20 +268,13 @@ def add_arg_coords(arg, name, unit, vals=None): class MixinBasic(Mixin_FieldTest): - # A set of tests that can be applied to *either* standard iris load - # functions, for confirmation of test results, or to fast-load. - # "Real" tests for each interface inherit this. - - def _debug(self, expected, results): - def pcubes(name, cubes): - print('\n\n{}:\n'.format(name), cubes) - for i, cube in enumerate(cubes): - print('@{}'.format(i)) - print(cube) - pcubes('expected', expected) - pcubes('results', results) + # A mixin of tests that can be applied to *either* standard or fast load. + # "Real" test classes inherit this + define 'self.load_type'. + # + # Basic functional tests. def test_basic(self): + # Show that basic load merging works. flds = self.fields(c_t='123', cft='000', ctp='123', c_p=0) file = self.save_fieldcubes(flds) results = self.load_function(file) @@ -261,6 +282,7 @@ def test_basic(self): self.assertEqual(results, expected) def test_phenomena(self): + # Show that different phenomena are merged into distinct cubes. flds = self.fields(c_t='1122', phn='0101') file = self.save_fieldcubes(flds) results = self.load_function(file) @@ -307,14 +329,12 @@ def test_FAIL_phenomena_nostash(self): def test_cross_file_concatenate(self): # Combine vector dimensions (i.e. concatenate) across multiple files. - per_file_cubes = [self.fields(c_t=times) - for times in ('12', '34')] - files = [self.save_fieldcubes(flds) - for flds in per_file_cubes] - results = self.load_function(files) - expected = CubeList(fld_cube - for cubes in per_file_cubes - for fld_cube in cubes).merge() + fldset_1 = self.fields(c_t='12') + fldset_2 = self.fields(c_t='34') + file_1 = self.save_fieldcubes(fldset_1) + file_2 = self.save_fieldcubes(fldset_2) + results = self.load_function((file_1, file_2)) + expected = CubeList(fldset_1 + fldset_2).merge() self.assertEqual(results, expected) def test_FAIL_scalar_vector_concatenate(self): @@ -346,6 +366,24 @@ def test_FAIL_scalar_vector_concatenate(self): self.assertEqual(results, expected) + def test_cell_method(self): + # Check that cell methods (i.e. LBPROC values) produce distinct + # phenomena. + flds = self.fields(c_t='000111222', + mmm='-01-01-01') + file = self.save_fieldcubes(flds) + results = self.load_function(file) + expected = CubeList(CubeList(flds[i_start::3]).merge_cube() + for i_start in range(3)) + self.assertEqual(results, expected) + + +class MixinCallDetails(Mixin_FieldTest): + # A mixin of tests that can be applied to *either* standard or fast load. + # "Real" test classes inherit this + define 'self.load_type'. + # + # Tests for different load calls and load-call arguments. + def test_stash_constraint(self): # Check that an attribute constraint functions correctly. # Note: this is a special case in "fileformats.pp". @@ -412,22 +450,166 @@ def callback(cube, collation, filename): self.assertEqual(results, expected) - def test_cell_methods(self): - # Check that cell methods (i.e. LBPROC values) produce distinct - # phenomena. - flds = self.fields(mmm='-01-01-01', c_t=range(9)) + def test_load_cube(self): + flds = self.fields(c_t='123', cft='000', ctp='123', c_p=0) + file = self.save_fieldcubes(flds) + results = self.load_cube_function(file) + expected = CubeList(flds).merge_cube() + self.assertEqual(results, expected) + + def test_load_cubes(self): + flds = self.fields(c_h='0123') + file = self.save_fieldcubes(flds) + height_constraints = [ + iris.Constraint(height=300.0), + iris.Constraint(height=lambda h: 150.0 < h < 350.0), + iris.Constraint('air_temperature')] + results = self.load_cubes_function(file, height_constraints) + expected = CubeList([flds[2], + CubeList(flds[1:3]).merge_cube(), + CubeList(flds).merge_cube()]) + self.assertEqual(results, expected) + + def test_load_raw(self): + fldset_1 = self.fields(c_t='015', phn='001') + fldset_2 = self.fields(c_t='234') + file_1 = self.save_fieldcubes(fldset_1) + file_2 = self.save_fieldcubes(fldset_2) + results = self.load_raw_function((file_1, file_2)) + if self.load_type == 'iris': + # Each 'raw' cube is just one field. + expected = CubeList(fldset_1 + fldset_2) + else: + # 'Raw' cubes have combined (vector) times within each file. + # The 'other' phenomenon appears seperately. + expected = CubeList([ + CubeList(fldset_1[:2]).merge_cube(), + CubeList(fldset_2).merge_cube(), + fldset_1[2], + ]) + + # Again here, the order of these results is not stable : + # It varies with random characters in the temporary filepath. + # + # ***************************************************************** + # *** Here, this is clearly ALSO the case for "standard" loads. *** + # ***************************************************************** + # + # E.G. run "test_fast_load.py -v TestCallDetails__Iris.test_load_raw" : + # If you remove the sort operations, this fails "sometimes". + # + # To fix this, sort both expected and results by (first) timepoint + # - for which purpose we made all the time values different. + + def timeorder(cube): + return cube.coord('time').points[0] + + expected = sorted(expected, key=timeorder) + results = sorted(results, key=timeorder) + + self.assertEqual(results, expected) + + +class MixinDimsAndOrdering(Mixin_FieldTest): + # A mixin of tests that can be applied to *either* standard or fast load. + # "Real" test classes inherit this + define 'self.load_type'. + # + # Tests for multidimensional results and dimension orderings. + + def test_multidim(self): + # Check that a full 2-phenom * 2d structure all works properly. + flds = self.fields(c_t='00001111', + c_h='00110011', + phn='01010101') file = self.save_fieldcubes(flds) results = self.load_function(file) - expected = CubeList(CubeList(flds[i_start::3]).merge_cube() - for i_start in range(3)) + expected = CubeList(flds).merge() self.assertEqual(results, expected) + def test_odd_order(self): + # Show that an erratic interleaving of phenomena fields still works. + # N.B. field sequences *within* each phenomenon are properly ordered. + flds = self.fields(c_t='00010111', + c_h='00101101', + phn='01001011') + file = self.save_fieldcubes(flds) + results = self.load_function(file) + expected = CubeList(flds).merge() + self.assertEqual(results, expected) + + def test_v_t_order(self): + # With height varying faster than time, first dimension is time, + # which matches the 'normal' load behaviour. + flds = self.fields(c_t='000111', + c_h='012012') + file = self.save_fieldcubes(flds) + results = self.load_function(file) + expected = CubeList(flds).merge() + # Order is (t, h, y, x), which is "standard". + self.assertEqual(expected[0].coord_dims('time'), (0,)) + self.assertEqual(expected[0].coord_dims('height'), (1,)) + self.assertEqual(results, expected) + + def test_t_v_order(self): + # With time varying faster than height, first dimension is height, + # which does not match the 'normal' load. + flds = self.fields(c_t='010101', + c_h='001122') + file = self.save_fieldcubes(flds) + results = self.load_function(file) + expected = CubeList(flds).merge() + if self.load_type == 'iris': + # Order is (t, h, y, x), which is "standard". + self.assertEqual(results[0].coord_dims('time'), (0,)) + self.assertEqual(results[0].coord_dims('height'), (1,)) + else: + # Order is (h, t, y, x), which is *not* "standard". + self.assertEqual(results[0].coord_dims('time'), (1,)) + self.assertEqual(results[0].coord_dims('height'), (0,)) + expected[0].transpose((1, 0, 2, 3)) + self.assertEqual(results, expected) + + def test_missing_combination(self): + # A case where one field is 'missing' to make a 2d result. + flds = self.fields(c_t='00011', + c_h='01202') + file = self.save_fieldcubes(flds) + results = self.load_function(file) + expected = CubeList(flds).merge() + self.assertEqual(expected[0].coord_dims('time'), (0,)) + self.assertEqual(expected[0].coord_dims('height'), (0,)) + if self.load_type == 'fast': + # Something a bit weird happens to the 'height' coordinate in this + # case (and not for standard load). + for cube in expected: + cube.coord('height').points = np.array( + cube.coord('height').points, + dtype=np.float32) + cube.coord('height').attributes = {} + self.assertEqual(results, expected) + + +class TestBasic__Iris(MixinBasic, tests.IrisTest): + load_type = 'iris' + + +class TestBasic__Fast(MixinBasic, tests.IrisTest): + load_type = 'fast' + + +class TestCallDetails__Iris(MixinCallDetails, tests.IrisTest): + load_type = 'iris' + + +class TestCallDetails__Fast(MixinCallDetails, tests.IrisTest): + load_type = 'fast' + -class TestBasicIris(MixinBasic, tests.IrisTest): +class TestDimsAndOrdering__Iris(MixinDimsAndOrdering, tests.IrisTest): load_type = 'iris' -class TestBasicFast(MixinBasic, tests.IrisTest): +class TestDimsAndOrdering__Fast(MixinDimsAndOrdering, tests.IrisTest): load_type = 'fast'