diff --git a/docs/src/userguide/real_and_lazy_data.rst b/docs/src/userguide/real_and_lazy_data.rst index 38f06aab99..ef4de0c429 100644 --- a/docs/src/userguide/real_and_lazy_data.rst +++ b/docs/src/userguide/real_and_lazy_data.rst @@ -6,6 +6,7 @@ import dask.array as da import iris + from iris.cube import CubeList import numpy as np @@ -227,10 +228,47 @@ coordinates' lazy points and bounds: Dask Processing Options ----------------------- -Iris uses dask to provide lazy data arrays for both Iris cubes and coordinates, -and for computing deferred operations on lazy arrays. +Iris uses `Dask `_ to provide lazy data arrays for +both Iris cubes and coordinates, and for computing deferred operations on lazy arrays. Dask provides processing options to control how deferred operations on lazy arrays are computed. This is provided via the ``dask.set_options`` interface. See the `dask documentation `_ for more information on setting dask processing options. + + +.. _delayed_netcdf_save: + +Delayed NetCDF Saving +--------------------- + +When saving data to NetCDF files, it is possible to *delay* writing lazy content to the +output file, to be performed by `Dask `_ later, +thus enabling parallel save operations. + +This works in the following way : + 1. an :func:`iris.save` call is made, with a NetCDF file output and the additional + keyword ``compute=False``. + This is currently *only* available when saving to NetCDF, so it is documented in + the Iris NetCDF file format API. See: :func:`iris.fileformats.netcdf.save`. + + 2. the call creates the output file, but does not fill in variables' data, where + the data is a lazy array in the Iris object. Instead, these variables are + initially created "empty". + + 3. the :meth:`~iris.save` call returns a ``result`` which is a + :class:`~dask.delayed.Delayed` object. + + 4. the save can be completed later by calling ``result.compute()``, or by passing it + to the :func:`dask.compute` call. + +The benefit of this, is that costly data transfer operations can be performed in +parallel with writes to other data files. Also, where array contents are calculated +from shared lazy input data, these can be computed in parallel efficiently by Dask +(i.e. without re-fetching), similar to what :meth:`iris.cube.CubeList.realise_data` +can do. + +.. note:: + This feature does **not** enable parallel writes to the *same* NetCDF output file. + That can only be done on certain operating systems, with a specially configured + build of the NetCDF C library, and is not supported by Iris at present. diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 8e3903b802..2cb22a7b53 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -30,7 +30,33 @@ This document explains the changes made to Iris for this release ✨ Features =========== -#. N/A +#. `@bsherratt`_ added support for plugins - see the corresponding + :ref:`documentation page` for further information. + (:pull:`5144`) + +#. `@rcomer`_ enabled lazy evaluation of :obj:`~iris.analysis.RMS` calcuations + with weights. (:pull:`5017`) + +#. `@schlunma`_ allowed the usage of cubes, coordinates, cell measures, or + ancillary variables as weights for cube aggregations + (:meth:`iris.cube.Cube.collapsed`, :meth:`iris.cube.Cube.aggregated_by`, and + :meth:`iris.cube.Cube.rolling_window`). This automatically adapts cube units + if necessary. (:pull:`5084`) + +#. `@lbdreyer`_ and `@trexfeathers`_ (reviewer) added :func:`iris.plot.hist` + and :func:`iris.quickplot.hist`. (:pull:`5189`) + +#. `@tinyendian`_ edited :func:`~iris.analysis.cartography.rotate_winds` to + enable lazy computation of rotated wind vector components (:issue:`4934`, + :pull:`4972`) + +#. `@ESadek-MO`_ updated to the latest CF Standard Names Table v80 + (07 February 2023). (:pull:`5244`) + +#. `@pp-mo`_ and `@lbdreyer`_ supported delayed saving of lazy data, when writing to + the netCDF file format. See : :ref:`delayed netCDF saves `. + Also with significant input from `@fnattino`_. + (:pull:`5191`) 🐛 Bugs Fixed @@ -97,7 +123,8 @@ This document explains the changes made to Iris for this release Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: - +.. _@fnattino: https://github.com/fnattino +.. _@tinyendian: https://github.com/tinyendian .. comment diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 8e6161bac5..bbf9c660c5 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -13,6 +13,8 @@ build routines, and which it does not use. """ +import re +from typing import List import warnings import cf_units @@ -28,10 +30,6 @@ import iris.exceptions import iris.fileformats.cf as cf import iris.fileformats.netcdf -from iris.fileformats.netcdf import ( - UnknownCellMethodWarning, - parse_cell_methods, -) from iris.fileformats.netcdf.loader import _get_cf_var_data import iris.std_names import iris.util @@ -184,6 +182,210 @@ CF_VALUE_STD_NAME_PROJ_Y = "projection_y_coordinate" +################################################################################ +# Handling of cell-methods. + +_CM_COMMENT = "comment" +_CM_EXTRA = "extra" +_CM_INTERVAL = "interval" +_CM_METHOD = "method" +_CM_NAME = "name" +_CM_PARSE_NAME = re.compile(r"([\w_]+\s*?:\s+)+") +_CM_PARSE = re.compile( + r""" + (?P([\w_]+\s*?:\s+)+) + (?P[\w_\s]+(?![\w_]*\s*?:))\s* + (?: + \(\s* + (?P.+) + \)\s* + )? + """, + re.VERBOSE, +) + +# Cell methods. +_CM_KNOWN_METHODS = [ + "point", + "sum", + "mean", + "maximum", + "minimum", + "mid_range", + "standard_deviation", + "variance", + "mode", + "median", +] + + +def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: + """ + Split a CF cell_methods attribute string into a list of zero or more cell + methods, each of which is then parsed with a regex to return a list of match + objects. + + Args: + + * nc_cell_methods: The value of the cell methods attribute to be split. + + Returns: + + * nc_cell_methods_matches: A list of the re.Match objects associated with + each parsed cell method + + Splitting is done based on words followed by colons outside of any brackets. + Validation of anything other than being laid out in the expected format is + left to the calling function. + """ + + # Find name candidates + name_start_inds = [] + for m in _CM_PARSE_NAME.finditer(nc_cell_methods): + name_start_inds.append(m.start()) + + # Remove those that fall inside brackets + bracket_depth = 0 + for ind, cha in enumerate(nc_cell_methods): + if cha == "(": + bracket_depth += 1 + elif cha == ")": + bracket_depth -= 1 + if bracket_depth < 0: + msg = ( + "Cell methods may be incorrectly parsed due to mismatched " + "brackets" + ) + warnings.warn(msg, UserWarning, stacklevel=2) + if bracket_depth > 0 and ind in name_start_inds: + name_start_inds.remove(ind) + + # List tuples of indices of starts and ends of the cell methods in the string + method_indices = [] + for ii in range(len(name_start_inds) - 1): + method_indices.append((name_start_inds[ii], name_start_inds[ii + 1])) + method_indices.append((name_start_inds[-1], len(nc_cell_methods))) + + # Index the string and match against each substring + nc_cell_methods_matches = [] + for start_ind, end_ind in method_indices: + nc_cell_method_str = nc_cell_methods[start_ind:end_ind] + nc_cell_method_match = _CM_PARSE.match(nc_cell_method_str.strip()) + if not nc_cell_method_match: + msg = ( + f"Failed to fully parse cell method string: {nc_cell_methods}" + ) + warnings.warn(msg, UserWarning, stacklevel=2) + continue + nc_cell_methods_matches.append(nc_cell_method_match) + + return nc_cell_methods_matches + + +class UnknownCellMethodWarning(Warning): + pass + + +def parse_cell_methods(nc_cell_methods): + """ + Parse a CF cell_methods attribute string into a tuple of zero or + more CellMethod instances. + + Args: + + * nc_cell_methods (str): + The value of the cell methods attribute to be parsed. + + Returns: + + * cell_methods + An iterable of :class:`iris.coords.CellMethod`. + + Multiple coordinates, intervals and comments are supported. + If a method has a non-standard name a warning will be issued, but the + results are not affected. + + """ + + cell_methods = [] + if nc_cell_methods is not None: + for m in _split_cell_methods(nc_cell_methods): + d = m.groupdict() + method = d[_CM_METHOD] + method = method.strip() + # Check validity of method, allowing for multi-part methods + # e.g. mean over years. + method_words = method.split() + if method_words[0].lower() not in _CM_KNOWN_METHODS: + msg = "NetCDF variable contains unknown cell method {!r}" + warnings.warn( + msg.format("{}".format(method_words[0])), + UnknownCellMethodWarning, + ) + d[_CM_METHOD] = method + name = d[_CM_NAME] + name = name.replace(" ", "") + name = name.rstrip(":") + d[_CM_NAME] = tuple([n for n in name.split(":")]) + interval = [] + comment = [] + if d[_CM_EXTRA] is not None: + # + # tokenise the key words and field colon marker + # + d[_CM_EXTRA] = d[_CM_EXTRA].replace( + "comment:", "<><<:>>" + ) + d[_CM_EXTRA] = d[_CM_EXTRA].replace( + "interval:", "<><<:>>" + ) + d[_CM_EXTRA] = d[_CM_EXTRA].split("<<:>>") + if len(d[_CM_EXTRA]) == 1: + comment.extend(d[_CM_EXTRA]) + else: + next_field_type = comment + for field in d[_CM_EXTRA]: + field_type = next_field_type + index = field.rfind("<>") + if index == 0: + next_field_type = interval + continue + elif index > 0: + next_field_type = interval + else: + index = field.rfind("<>") + if index == 0: + next_field_type = comment + continue + elif index > 0: + next_field_type = comment + if index != -1: + field = field[:index] + field_type.append(field.strip()) + # + # cater for a shared interval over multiple axes + # + if len(interval): + if len(d[_CM_NAME]) != len(interval) and len(interval) == 1: + interval = interval * len(d[_CM_NAME]) + # + # cater for a shared comment over multiple axes + # + if len(comment): + if len(d[_CM_NAME]) != len(comment) and len(comment) == 1: + comment = comment * len(d[_CM_NAME]) + d[_CM_INTERVAL] = tuple(interval) + d[_CM_COMMENT] = tuple(comment) + cell_method = iris.coords.CellMethod( + d[_CM_METHOD], + coords=d[_CM_NAME], + intervals=d[_CM_INTERVAL], + comments=d[_CM_COMMENT], + ) + cell_methods.append(cell_method) + return tuple(cell_methods) + + ################################################################################ def build_cube_metadata(engine): """Add the standard meta data to the cube.""" diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py index 505e173b0b..b696b200ff 100644 --- a/lib/iris/fileformats/netcdf/__init__.py +++ b/lib/iris/fileformats/netcdf/__init__.py @@ -18,6 +18,11 @@ # Note: *must* be done before importing from submodules, as they also use this ! logger = iris.config.get_logger(__name__) +# Note: these probably shouldn't be public, but for now they are. +from .._nc_load_rules.helpers import ( + UnknownCellMethodWarning, + parse_cell_methods, +) from .loader import DEBUG, NetCDFDataProxy, load_cubes from .saver import ( CF_CONVENTIONS_VERSION, @@ -25,8 +30,6 @@ SPATIO_TEMPORAL_AXES, CFNameCoordMap, Saver, - UnknownCellMethodWarning, - parse_cell_methods, save, ) diff --git a/lib/iris/fileformats/netcdf/_dask_locks.py b/lib/iris/fileformats/netcdf/_dask_locks.py new file mode 100644 index 0000000000..15ac117a8b --- /dev/null +++ b/lib/iris/fileformats/netcdf/_dask_locks.py @@ -0,0 +1,140 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Module containing code to create locks enabling dask workers to co-operate. + +This matter is complicated by needing different solutions for different dask scheduler +types, i.e. local 'threads' scheduler, local 'processes' or distributed. + +In any case, an "iris.fileformats.netcdf.saver.Saver" object contains a netCDF4.Dataset +targeting an output file, and creates a Saver.file_write_lock object to serialise +write-accesses to the file from dask tasks : All dask-task file writes go via a +"iris.fileformats.netcdf.saver.NetCDFWriteProxy" object, which also contains a link +to the Saver.file_write_lock, and uses it to prevent workers from fouling each other. + +For each chunk written, the NetCDFWriteProxy acquires the common per-file lock; +opens a Dataset on the file; performs a write to the relevant variable; closes the +Dataset and then releases the lock. This process is obviously very similar to what the +NetCDFDataProxy does for reading lazy chunks. + +For a threaded scheduler, the Saver.lock is a simple threading.Lock(). The workers +(threads) execute tasks which contain a NetCDFWriteProxy, as above. All of those +contain the common lock, and this is simply **the same object** for all workers, since +they share an address space. + +For a distributed scheduler, the Saver.lock is a `distributed.Lock()` which is +identified with the output filepath. This is distributed to the workers by +serialising the task function arguments, which will include the NetCDFWriteProxy. +A worker behaves like a process, though it may execute on a remote machine. When a +distributed.Lock is deserialised to reconstruct the worker task, this creates an object +that communicates with the scheduler. These objects behave as a single common lock, +as they all have the same string 'identity', so the scheduler implements inter-process +communication so that they can mutually exclude each other. + +It is also *conceivable* that multiple processes could write to the same file in +parallel, if the operating system supports it. However, this also requires that the +libnetcdf C library is built with parallel access option, which is not common. +With the "ordinary" libnetcdf build, a process which attempts to open for writing a file +which is _already_ open for writing simply raises an access error. +In any case, Iris netcdf saver will not support this mode of operation, at present. + +We don't currently support a local "processes" type scheduler. If we did, the +behaviour should be very similar to a distributed scheduler. It would need to use some +other serialisable shared-lock solution in place of 'distributed.Lock', which requires +a distributed scheduler to function. + +""" +import threading + +import dask.array +import dask.base +import dask.multiprocessing +import dask.threaded + + +# A dedicated error class, allowing filtering and testing of errors raised here. +class DaskSchedulerTypeError(ValueError): + pass + + +def dask_scheduler_is_distributed(): + """Return whether a distributed.Client is active.""" + # NOTE: this replicates logic in `dask.base.get_scheduler` : if a distributed client + # has been created + is still active, then the default scheduler will always be + # "distributed". + is_distributed = False + # NOTE: must still work when 'distributed' is not available. + try: + import distributed + + client = distributed.get_client() + is_distributed = client is not None + except (ImportError, ValueError): + pass + return is_distributed + + +def get_dask_array_scheduler_type(): + """ + Work out what type of scheduler an array.compute*() will use. + + Returns one of 'distributed', 'threads' or 'processes'. + The return value is a valid argument for dask.config.set(scheduler=). + This cannot distinguish between distributed local and remote clusters -- both of + those simply return 'distributed'. + + NOTE: this takes account of how dask is *currently* configured. It will be wrong + if the config changes before the compute actually occurs. + + """ + if dask_scheduler_is_distributed(): + result = "distributed" + else: + # Call 'get_scheduler', which respects the config settings, but pass an array + # so we default to the default scheduler for that type of object. + trial_dask_array = dask.array.zeros(1) + get_function = dask.base.get_scheduler(collections=[trial_dask_array]) + # Detect the ones which we recognise. + if get_function == dask.threaded.get: + result = "threads" + elif get_function == dask.local.get_sync: + result = "single-threaded" + elif get_function == dask.multiprocessing.get: + result = "processes" + else: + msg = f"Dask default scheduler for arrays is unrecognised : {get_function}" + raise DaskSchedulerTypeError(msg) + + return result + + +def get_worker_lock(identity: str): + """ + Return a mutex Lock which can be shared by multiple Dask workers. + + The type of Lock generated depends on the dask scheduler type, which must therefore + be set up before this is called. + + """ + scheduler_type = get_dask_array_scheduler_type() + if scheduler_type in ("threads", "single-threaded"): + # N.B. the "identity" string is never used in this case, as the same actual + # lock object is used by all workers. + lock = threading.Lock() + elif scheduler_type == "distributed": + from dask.distributed import Lock as DistributedLock + + lock = DistributedLock(identity) + else: + msg = ( + "The configured dask array scheduler type is " + f'"{scheduler_type}", ' + "which is not supported by the Iris netcdf saver." + ) + raise DaskSchedulerTypeError(msg) + + # NOTE: not supporting 'processes' scheduler, for now. + return lock diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index decca1535f..709696087b 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -340,3 +340,44 @@ def __getstate__(self): def __setstate__(self, state): for key, value in state.items(): setattr(self, key, value) + + +class NetCDFWriteProxy: + """ + The "opposite" of a NetCDFDataProxy : An object mimicking the data access of a + netCDF4.Variable, but where the data is to be ***written to***. + + It encapsulates the netcdf file and variable which are actually to be written to. + This opens the file each time, to enable writing the data chunk, then closes it. + TODO: could be improved with a caching scheme, but this just about works. + """ + + def __init__(self, filepath, cf_var, file_write_lock): + self.path = filepath + self.varname = cf_var.name + self.lock = file_write_lock + + def __setitem__(self, keys, array_data): + # Write to the variable. + # First acquire a file-specific lock for all workers writing to this file. + self.lock.acquire() + # Open the file for writing + write to the specific file variable. + # Exactly as above, in NetCDFDataProxy : a DatasetWrapper causes problems with + # invalid ID's and the netCDF4 library, for so-far unknown reasons. + # Instead, use _GLOBAL_NETCDF4_LOCK, and netCDF4 _directly_. + with _GLOBAL_NETCDF4_LOCK: + dataset = None + try: + dataset = netCDF4.Dataset(self.path, "r+") + var = dataset.variables[self.varname] + var[keys] = array_data + finally: + try: + if dataset: + dataset.close() + finally: + # *ALWAYS* let go ! + self.lock.release() + + def __repr__(self): + return f"<{self.__class__.__name__} path={self.path!r} var={self.varname!r}>" diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index e5e696d3c3..5c11d804db 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -23,9 +23,10 @@ import warnings import cf_units +import dask import dask.array as da +from dask.delayed import Delayed import numpy as np -import numpy.ma as ma from iris._lazy_data import _co_realise_lazy_arrays, is_lazy_data from iris.aux_factory import ( @@ -44,7 +45,7 @@ from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord import iris.exceptions import iris.fileformats.cf -from iris.fileformats.netcdf import _thread_safe_nc +from iris.fileformats.netcdf import _dask_locks, _thread_safe_nc import iris.io import iris.util @@ -156,207 +157,6 @@ } -# Cell methods. -_CM_KNOWN_METHODS = [ - "point", - "sum", - "mean", - "maximum", - "minimum", - "mid_range", - "standard_deviation", - "variance", - "mode", - "median", -] - -_CM_COMMENT = "comment" -_CM_EXTRA = "extra" -_CM_INTERVAL = "interval" -_CM_METHOD = "method" -_CM_NAME = "name" -_CM_PARSE_NAME = re.compile(r"([\w_]+\s*?:\s+)+") -_CM_PARSE = re.compile( - r""" - (?P([\w_]+\s*?:\s+)+) - (?P[\w_\s]+(?![\w_]*\s*?:))\s* - (?: - \(\s* - (?P.+) - \)\s* - )? - """, - re.VERBOSE, -) - - -class UnknownCellMethodWarning(Warning): - pass - - -def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: - """ - Split a CF cell_methods attribute string into a list of zero or more cell - methods, each of which is then parsed with a regex to return a list of match - objects. - - Args: - - * nc_cell_methods: The value of the cell methods attribute to be split. - - Returns: - - * nc_cell_methods_matches: A list of the re.Match objects associated with - each parsed cell method - - Splitting is done based on words followed by colons outside of any brackets. - Validation of anything other than being laid out in the expected format is - left to the calling function. - """ - - # Find name candidates - name_start_inds = [] - for m in _CM_PARSE_NAME.finditer(nc_cell_methods): - name_start_inds.append(m.start()) - - # Remove those that fall inside brackets - bracket_depth = 0 - for ind, cha in enumerate(nc_cell_methods): - if cha == "(": - bracket_depth += 1 - elif cha == ")": - bracket_depth -= 1 - if bracket_depth < 0: - msg = ( - "Cell methods may be incorrectly parsed due to mismatched " - "brackets" - ) - warnings.warn(msg, UserWarning, stacklevel=2) - if bracket_depth > 0 and ind in name_start_inds: - name_start_inds.remove(ind) - - # List tuples of indices of starts and ends of the cell methods in the string - method_indices = [] - for ii in range(len(name_start_inds) - 1): - method_indices.append((name_start_inds[ii], name_start_inds[ii + 1])) - method_indices.append((name_start_inds[-1], len(nc_cell_methods))) - - # Index the string and match against each substring - nc_cell_methods_matches = [] - for start_ind, end_ind in method_indices: - nc_cell_method_str = nc_cell_methods[start_ind:end_ind] - nc_cell_method_match = _CM_PARSE.match(nc_cell_method_str.strip()) - if not nc_cell_method_match: - msg = ( - f"Failed to fully parse cell method string: {nc_cell_methods}" - ) - warnings.warn(msg, UserWarning, stacklevel=2) - continue - nc_cell_methods_matches.append(nc_cell_method_match) - - return nc_cell_methods_matches - - -def parse_cell_methods(nc_cell_methods): - """ - Parse a CF cell_methods attribute string into a tuple of zero or - more CellMethod instances. - - Args: - - * nc_cell_methods (str): - The value of the cell methods attribute to be parsed. - - Returns: - - * cell_methods - An iterable of :class:`iris.coords.CellMethod`. - - Multiple coordinates, intervals and comments are supported. - If a method has a non-standard name a warning will be issued, but the - results are not affected. - - """ - - cell_methods = [] - if nc_cell_methods is not None: - for m in _split_cell_methods(nc_cell_methods): - d = m.groupdict() - method = d[_CM_METHOD] - method = method.strip() - # Check validity of method, allowing for multi-part methods - # e.g. mean over years. - method_words = method.split() - if method_words[0].lower() not in _CM_KNOWN_METHODS: - msg = "NetCDF variable contains unknown cell method {!r}" - warnings.warn( - msg.format("{}".format(method_words[0])), - UnknownCellMethodWarning, - ) - d[_CM_METHOD] = method - name = d[_CM_NAME] - name = name.replace(" ", "") - name = name.rstrip(":") - d[_CM_NAME] = tuple([n for n in name.split(":")]) - interval = [] - comment = [] - if d[_CM_EXTRA] is not None: - # - # tokenise the key words and field colon marker - # - d[_CM_EXTRA] = d[_CM_EXTRA].replace( - "comment:", "<><<:>>" - ) - d[_CM_EXTRA] = d[_CM_EXTRA].replace( - "interval:", "<><<:>>" - ) - d[_CM_EXTRA] = d[_CM_EXTRA].split("<<:>>") - if len(d[_CM_EXTRA]) == 1: - comment.extend(d[_CM_EXTRA]) - else: - next_field_type = comment - for field in d[_CM_EXTRA]: - field_type = next_field_type - index = field.rfind("<>") - if index == 0: - next_field_type = interval - continue - elif index > 0: - next_field_type = interval - else: - index = field.rfind("<>") - if index == 0: - next_field_type = comment - continue - elif index > 0: - next_field_type = comment - if index != -1: - field = field[:index] - field_type.append(field.strip()) - # - # cater for a shared interval over multiple axes - # - if len(interval): - if len(d[_CM_NAME]) != len(interval) and len(interval) == 1: - interval = interval * len(d[_CM_NAME]) - # - # cater for a shared comment over multiple axes - # - if len(comment): - if len(d[_CM_NAME]) != len(comment) and len(comment) == 1: - comment = comment * len(d[_CM_NAME]) - d[_CM_INTERVAL] = tuple(interval) - d[_CM_COMMENT] = tuple(comment) - cell_method = iris.coords.CellMethod( - d[_CM_METHOD], - coords=d[_CM_NAME], - intervals=d[_CM_INTERVAL], - comments=d[_CM_COMMENT], - ) - cell_methods.append(cell_method) - return tuple(cell_methods) - - class CFNameCoordMap: """Provide a simple CF name to CF coordinate mapping.""" @@ -467,61 +267,139 @@ def _setncattr(variable, name, attribute): return variable.setncattr(name, attribute) -class _FillValueMaskCheckAndStoreTarget: - """ - To be used with da.store. Remembers whether any element was equal to a - given value and whether it was masked, before passing the chunk to the - given target. +# NOTE : this matches :class:`iris.experimental.ugrid.mesh.Mesh.ELEMENTS`, +# but in the preferred order for coord/connectivity variables in the file. +MESH_ELEMENTS = ("node", "edge", "face") + + +_FillvalueCheckInfo = collections.namedtuple( + "_FillvalueCheckInfo", ["user_value", "check_value", "dtype", "varname"] +) + - NOTE: target needs to be a _thread_safe_nc._ThreadSafeWrapper subclass. +def _data_fillvalue_check(arraylib, data, check_value): + """ + Check whether an array is masked, and whether it contains a fill-value. + + Parameters + ---------- + arraylib : module + Either numpy or dask.array : When dask, results are lazy computations. + data : array-like + Array to check (numpy or dask) + check_value : number or None + If not None, fill-value to check for existence in the array. + If None, do not do value-in-array check + + Returns + ------- + is_masked : bool + True if array has any masked points. + contains_value : bool + True if array contains check_value. + Always False if check_value is None. """ + is_masked = arraylib.any(arraylib.ma.getmaskarray(data)) + if check_value is None: + contains_value = False + else: + contains_value = arraylib.any(data == check_value) + return is_masked, contains_value - def __init__(self, target, fill_value=None): - assert hasattr(target, "THREAD_SAFE_FLAG") - self.target = target - self.fill_value = fill_value - self.contains_value = False - self.is_masked = False - def __setitem__(self, keys, arr): - if self.fill_value is not None: - self.contains_value = self.contains_value or self.fill_value in arr - self.is_masked = self.is_masked or ma.is_masked(arr) - self.target[keys] = arr +class SaverFillValueWarning(UserWarning): + pass -# NOTE : this matches :class:`iris.experimental.ugrid.mesh.Mesh.ELEMENTS`, -# but in the preferred order for coord/connectivity variables in the file. -MESH_ELEMENTS = ("node", "edge", "face") +def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False): + """ + From the given information, work out whether there was a possible or actual + fill-value collision, and if so construct a warning. + + Parameters + ---------- + fill_info : _FillvalueCheckInfo + A named-tuple containing the context of the fill-value check + is_masked : bool + whether the data array was masked + contains_fill_value : bool + whether the data array contained the fill-value + warn : bool + if True, also issue any resulting warning immediately. + + Returns + ------- + None or :class:`Warning` + If not None, indicates a known or possible problem with filling + + """ + varname = fill_info.varname + user_value = fill_info.user_value + check_value = fill_info.check_value + is_byte_data = fill_info.dtype.itemsize == 1 + result = None + if is_byte_data and is_masked and user_value is None: + result = SaverFillValueWarning( + f"CF var '{varname}' contains byte data with masked points, but " + "no fill_value keyword was given. As saved, these " + "points will read back as valid values. To save as " + "masked byte data, `_FillValue` needs to be explicitly " + "set. For Cube data this can be done via the 'fill_value' " + "keyword during saving, otherwise use ncedit/equivalent." + ) + elif contains_fill_value: + result = SaverFillValueWarning( + f"CF var '{varname}' contains unmasked data points equal to the " + f"fill-value, {check_value}. As saved, these points will read back " + "as missing data. To save these as normal values, " + "`_FillValue` needs to be set to not equal any valid data " + "points. For Cube data this can be done via the 'fill_value' " + "keyword during saving, otherwise use ncedit/equivalent." + ) + + if warn and result is not None: + warnings.warn(result) + return result class Saver: """A manager for saving netcdf files.""" - def __init__(self, filename, netcdf_format): + def __init__(self, filename, netcdf_format, compute=True): """ A manager for saving netcdf files. - Args: - - * filename (string): + Parameters + ---------- + filename : string Name of the netCDF file to save the cube. - * netcdf_format (string): + netcdf_format : string Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format. - Returns: - None. - - For example:: - - # Initialise Manager for saving - with Saver(filename, netcdf_format) as sman: - # Iterate through the cubelist. - for cube in cubes: - sman.write(cube) + compute : bool, default=True + If True, delayed variable saves will be completed on exit from the Saver + context (after first closing the target file), equivalent to + :meth:`complete()`. + If False, the file is created and closed without writing the data of + variables for which the source data was lazy. These writes can be + completed later, see :meth:`delayed_completion`. + + Returns + ------- + None + + Example + ------- + >>> import iris + >>> from iris.fileformats.netcdf.saver import Saver + >>> cubes = iris.load(iris.sample_data_path('atlantic_profiles.nc')) + >>> with Saver("tmp.nc", "NETCDF4") as sman: + ... # Iterate through the cubelist. + ... for cube in cubes: + ... sman.write(cube) """ if netcdf_format not in [ @@ -548,18 +426,30 @@ def __init__(self, filename, netcdf_format): self._mesh_dims = {} #: A dictionary, mapping formula terms to owner cf variable name self._formula_terms_cache = {} + #: Target filepath + self.filepath = os.path.abspath(filename) + #: A list of delayed writes for lazy saving + self._delayed_writes = ( + [] + ) # a list of triples (source, target, fill-info) + #: Whether to complete delayed saves on exit (and raise associated warnings). + self.compute = compute + # N.B. the file-write-lock *type* actually depends on the dask scheduler type. + #: A per-file write lock to prevent dask attempting overlapping writes. + self.file_write_lock = _dask_locks.get_worker_lock(self.filepath) #: NetCDF dataset + self._dataset = None try: self._dataset = _thread_safe_nc.DatasetWrapper( - filename, mode="w", format=netcdf_format + self.filepath, mode="w", format=netcdf_format ) except RuntimeError: - dir_name = os.path.dirname(filename) + dir_name = os.path.dirname(self.filepath) if not os.path.isdir(dir_name): msg = "No such file or directory: {}".format(dir_name) raise IOError(msg) if not os.access(dir_name, os.R_OK | os.W_OK): - msg = "Permission denied: {}".format(filename) + msg = "Permission denied: {}".format(self.filepath) raise IOError(msg) else: raise @@ -572,6 +462,8 @@ def __exit__(self, type, value, traceback): self._dataset.sync() self._dataset.close() + if self.compute: + self.complete() def write( self, @@ -2444,8 +2336,7 @@ def _increment_name(self, varname): return "{}_{}".format(varname, num) - @staticmethod - def _lazy_stream_data(data, fill_value, fill_warn, cf_var): + def _lazy_stream_data(self, data, fill_value, fill_warn, cf_var): if hasattr(data, "shape") and data.shape == (1,) + cf_var.shape: # (Don't do this check for string data). # Reduce dimensionality where the data array has an extra dimension @@ -2454,25 +2345,8 @@ def _lazy_stream_data(data, fill_value, fill_warn, cf_var): # contains just 1 row, so the cf_var is 1D. data = data.squeeze(axis=0) - if is_lazy_data(data): - - def store(data, cf_var, fill_value): - # Store lazy data and check whether it is masked and contains - # the fill value - target = _FillValueMaskCheckAndStoreTarget(cf_var, fill_value) - da.store([data], [target]) - return target.is_masked, target.contains_value - - else: - - def store(data, cf_var, fill_value): - cf_var[:] = data - is_masked = np.ma.is_masked(data) - contains_value = fill_value is not None and fill_value in data - return is_masked, contains_value - + # Decide whether we are checking for fill-value collisions. dtype = cf_var.dtype - # fill_warn allows us to skip warning if packing attributes have been # specified. It would require much more complex operations to work out # what the values and fill_value _would_ be in such a case. @@ -2480,38 +2354,166 @@ def store(data, cf_var, fill_value): if fill_value is not None: fill_value_to_check = fill_value else: + # Retain 'fill_value == None', to show that no specific value was given. + # But set 'fill_value_to_check' to a calculated value fill_value_to_check = _thread_safe_nc.default_fillvals[ dtype.str[1:] ] + # Cast the check-value to the correct dtype. + # NOTE: In the case of 'S1' dtype (at least), the default (Python) value + # does not have a compatible type. This causes a deprecation warning at + # numpy 1.24, *and* was preventing correct fill-value checking of character + # data, since they are actually bytes (dtype 'S1'). + fill_value_to_check = np.array(fill_value_to_check, dtype=dtype) else: + # A None means we will NOT check for collisions. fill_value_to_check = None - # Store the data and check if it is masked and contains the fill value. - is_masked, contains_fill_value = store( - data, cf_var, fill_value_to_check + fill_info = _FillvalueCheckInfo( + user_value=fill_value, + check_value=fill_value_to_check, + dtype=dtype, + varname=cf_var.name, ) - if dtype.itemsize == 1 and fill_value is None: - if is_masked: - msg = ( - "CF var '{}' contains byte data with masked points, but " - "no fill_value keyword was given. As saved, these " - "points will read back as valid values. To save as " - "masked byte data, `_FillValue` needs to be explicitly " - "set. For Cube data this can be done via the 'fill_value' " - "keyword during saving, otherwise use ncedit/equivalent." + doing_delayed_save = is_lazy_data(data) + if doing_delayed_save: + # save lazy data with a delayed operation. For now, we just record the + # necessary information -- a single, complete delayed action is constructed + # later by a call to delayed_completion(). + def store(data, cf_var, fill_info): + # Create a data-writeable object that we can stream into, which + # encapsulates the file to be opened + variable to be written. + write_wrapper = _thread_safe_nc.NetCDFWriteProxy( + self.filepath, cf_var, self.file_write_lock ) - warnings.warn(msg.format(cf_var.name)) - elif contains_fill_value: + # Add to the list of delayed writes, used in delayed_completion(). + self._delayed_writes.append((data, write_wrapper, fill_info)) + # In this case, fill-value checking is done later. But return 2 dummy + # values, to be consistent with the non-streamed "store" signature. + is_masked, contains_value = False, False + return is_masked, contains_value + + else: + # Real data is always written directly, i.e. not via lazy save. + # We also check it immediately for any fill-value problems. + def store(data, cf_var, fill_info): + cf_var[:] = data + return _data_fillvalue_check(np, data, fill_info.check_value) + + # Store the data and check if it is masked and contains the fill value. + is_masked, contains_fill_value = store(data, cf_var, fill_info) + + if not doing_delayed_save: + # Issue a fill-value warning immediately, if appropriate. + _fillvalue_report( + fill_info, is_masked, contains_fill_value, warn=True + ) + + def delayed_completion(self) -> Delayed: + """ + Create and return a :class:`dask.delayed.Delayed` to perform file completion + for delayed saves. + + This contains all the delayed writes, which complete the file by filling out + the data of variables initially created empty, and also the checks for + potential fill-value collisions. + When computed, it returns a list of any warnings which were generated in the + save operation. + + Returns + ------- + completion : :class:`dask.delayed.Delayed` + + Notes + ----- + The dataset *must* be closed (saver has exited its context) before the + result can be computed, otherwise computation will hang (never return). + """ + if self._delayed_writes: + # Create a single delayed da.store operation to complete the file. + sources, targets, fill_infos = zip(*self._delayed_writes) + store_op = da.store(sources, targets, compute=False, lock=False) + + # Construct a delayed fill-check operation for each (lazy) source array. + delayed_fillvalue_checks = [ + # NB with arraylib=dask.array, this routine does lazy array computation + _data_fillvalue_check(da, source, fillinfo.check_value) + for source, fillinfo in zip(sources, fill_infos) + ] + + # Return a single delayed object which completes the delayed saves and + # returns a list of any fill-value warnings. + @dask.delayed + def compute_and_return_warnings(store_op, fv_infos, fv_checks): + # Note: we don't actually *do* anything with the 'store_op' argument, + # but including it here ensures that dask will compute it (thus + # performing all the delayed saves), before calling this function. + results = [] + # Pair each fill_check result (is_masked, contains_value) with its + # fillinfo and construct a suitable Warning if needed. + for fillinfo, (is_masked, contains_value) in zip( + fv_infos, fv_checks + ): + fv_warning = _fillvalue_report( + fill_info=fillinfo, + is_masked=is_masked, + contains_fill_value=contains_value, + ) + if fv_warning is not None: + # Collect the warnings and return them. + results.append(fv_warning) + return results + + result = compute_and_return_warnings( + store_op, + fv_infos=fill_infos, + fv_checks=delayed_fillvalue_checks, + ) + + else: + # Return a delayed, which returns an empty list, for usage consistency. + @dask.delayed + def no_op(): + return [] + + result = no_op() + + return result + + def complete(self, issue_warnings=True) -> List[Warning]: + """ + Complete file by computing any delayed variable saves. + + This requires that the Saver has closed the dataset (exited its context). + + Parameters + ---------- + issue_warnings : bool, default = True + If true, issue all the resulting warnings with :func:`warnings.warn`. + + Returns + ------- + warnings : list of Warning + Any warnings that were raised while writing delayed data. + + """ + if self._dataset.isopen(): msg = ( - "CF var '{}' contains unmasked data points equal to the " - "fill-value, {}. As saved, these points will read back " - "as missing data. To save these as normal values, " - "`_FillValue` needs to be set to not equal any valid data " - "points. For Cube data this can be done via the 'fill_value' " - "keyword during saving, otherwise use ncedit/equivalent." + "Cannot call Saver.complete() until its dataset is closed, " + "i.e. the saver's context has exited." ) - warnings.warn(msg.format(cf_var.name, fill_value)) + raise ValueError(msg) + + delayed_write = self.delayed_completion() + # Complete the saves now, and handle any delayed warnings that occurred + result_warnings = delayed_write.compute() + if issue_warnings: + # Issue any delayed warnings from the compute. + for delayed_warning in result_warnings: + warnings.warn(delayed_warning) + + return result_warnings def save( @@ -2530,6 +2532,7 @@ def save( least_significant_digit=None, packing=None, fill_value=None, + compute=True, ): """ Save cube(s) to a netCDF file, given the cube and the filename. @@ -2652,8 +2655,24 @@ def save( `:class:`iris.cube.CubeList`, or a single element, and each element of this argument will be applied to each cube separately. + * compute (bool): + When False, create the output file but don't write any lazy array content to + its variables, such as lazy cube data or aux-coord points and bounds. + + Instead return a :class:`dask.delayed.Delayed` which, when computed, will + stream all the lazy content via :meth:`dask.store`, to complete the file. + Several such data saves can be performed in parallel, by passing a list of them + into a :func:`dask.compute` call. + + Default is ``True``, meaning complete the file immediately, and return ``None``. + + .. Note:: + when computed, the returned :class:`dask.delayed.Delayed` object returns + a list of :class:`Warning` : These are any warnings which *would* have + been issued in the save call, if compute had been True. + Returns: - None. + A list of :class:`Warning`. .. note:: @@ -2752,7 +2771,9 @@ def is_valid_packspec(p): raise ValueError(msg) # Initialise Manager for saving - with Saver(filename, netcdf_format) as sman: + # N.B. make the Saver compute=False, as we want control over creation of the + # delayed-completion object. + with Saver(filename, netcdf_format, compute=compute) as sman: # Iterate through the cubelist. for cube, packspec, fill_value in zip(cubes, packspecs, fill_values): sman.write( @@ -2797,3 +2818,12 @@ def is_valid_packspec(p): # Add conventions attribute. sman.update_global_attributes(Conventions=conventions) + + if compute: + # No more to do, since we used Saver(compute=True). + result = None + else: + # Return a delayed completion object. + result = sman.delayed_completion() + + return result diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index a4f700cb51..7680d9bac6 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -454,7 +454,7 @@ def save(source, target, saver=None, **kwargs): # Single cube? if isinstance(source, Cube): - saver(source, target, **kwargs) + result = saver(source, target, **kwargs) # CubeList or sequence of cubes? elif isinstance(source, CubeList) or ( @@ -477,9 +477,13 @@ def save(source, target, saver=None, **kwargs): if i != 0: kwargs["append"] = True saver(cube, target, **kwargs) + + result = None # Netcdf saver. else: - saver(source, target, **kwargs) + result = saver(source, target, **kwargs) else: raise ValueError("Cannot save; non Cube found in source") + + return result diff --git a/lib/iris/tests/integration/netcdf/test__dask_locks.py b/lib/iris/tests/integration/netcdf/test__dask_locks.py new file mode 100644 index 0000000000..c41af1b356 --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test__dask_locks.py @@ -0,0 +1,115 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :mod:`iris.fileformats.netcdf._dask_locks` package. + +Note: these integration tests replace any unit testing of this module, due to its total +dependence on Dask, and even on Dask's implementation details rather than supported +and documented API and behaviour. +So (a) it is essential to check the module's behaviour against actual Dask operation, +and (b) mock-ist testing of the implementation code in isolation would not add anything +of much value. +""" +import dask +import dask.config +import distributed +import pytest + +from iris.fileformats.netcdf._dask_locks import ( + DaskSchedulerTypeError, + dask_scheduler_is_distributed, + get_dask_array_scheduler_type, + get_worker_lock, +) + + +@pytest.fixture( + params=[ + "UnspecifiedScheduler", + "ThreadedScheduler", + "SingleThreadScheduler", + "ProcessScheduler", + "DistributedScheduler", + ] +) +def dask_scheduler(request): + # Control Dask to enable a specific scheduler type. + sched_typename = request.param + if sched_typename == "UnspecifiedScheduler": + config_name = None + elif sched_typename == "SingleThreadScheduler": + config_name = "single-threaded" + elif sched_typename == "ThreadedScheduler": + config_name = "threads" + elif sched_typename == "ProcessScheduler": + config_name = "processes" + else: + assert sched_typename == "DistributedScheduler" + config_name = "distributed" + + if config_name == "distributed": + _distributed_client = distributed.Client() + + if config_name is None: + context = None + else: + context = dask.config.set(scheduler=config_name) + context.__enter__() + + yield sched_typename + + if context: + context.__exit__(None, None, None) + + if config_name == "distributed": + _distributed_client.close() + + +def test_dask_scheduler_is_distributed(dask_scheduler): + result = dask_scheduler_is_distributed() + # Should return 'True' only with a distributed scheduler. + expected = dask_scheduler == "DistributedScheduler" + assert result == expected + + +def test_get_dask_array_scheduler_type(dask_scheduler): + result = get_dask_array_scheduler_type() + expected = { + "UnspecifiedScheduler": "threads", + "ThreadedScheduler": "threads", + "ProcessScheduler": "processes", + "SingleThreadScheduler": "single-threaded", + "DistributedScheduler": "distributed", + }[dask_scheduler] + assert result == expected + + +def test_get_worker_lock(dask_scheduler): + test_identity = "" + error = None + try: + result = get_worker_lock(test_identity) + except DaskSchedulerTypeError as err: + error = err + result = None + + if dask_scheduler == "ProcessScheduler": + assert result is None + assert isinstance(error, DaskSchedulerTypeError) + msg = 'scheduler type is "processes", which is not supported' + assert msg in error.args[0] + else: + assert error is None + assert result is not None + if dask_scheduler == "DistributedScheduler": + assert isinstance(result, distributed.Lock) + assert result.name == test_identity + else: + # low-level object doesn't have a readily available class for isinstance + assert all( + hasattr(result, att) + for att in ("acquire", "release", "locked") + ) diff --git a/lib/iris/tests/integration/netcdf/test_coord_systems.py b/lib/iris/tests/integration/netcdf/test_coord_systems.py index 8576f5ffe8..3175664b4c 100644 --- a/lib/iris/tests/integration/netcdf/test_coord_systems.py +++ b/lib/iris/tests/integration/netcdf/test_coord_systems.py @@ -18,7 +18,7 @@ from iris.cube import Cube from iris.tests import stock as stock from iris.tests.stock.netcdf import ncgen_from_cdl -from iris.tests.unit.fileformats.netcdf import test_load_cubes as tlc +from iris.tests.unit.fileformats.netcdf.loader import test_load_cubes as tlc @tests.skip_data diff --git a/lib/iris/tests/integration/netcdf/test_delayed_save.py b/lib/iris/tests/integration/netcdf/test_delayed_save.py new file mode 100644 index 0000000000..616feb3b0e --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test_delayed_save.py @@ -0,0 +1,339 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Integration tests for delayed saving. +""" +import warnings + +from cf_units import Unit +import dask.array as da +import dask.config +from dask.delayed import Delayed +import distributed +import numpy as np +import pytest + +import iris +from iris.fileformats.netcdf._thread_safe_nc import default_fillvals +from iris.fileformats.netcdf.saver import SaverFillValueWarning +import iris.tests +from iris.tests.stock import realistic_4d + + +class Test__lazy_stream_data: + @pytest.fixture(autouse=True) + def output_path(self, tmp_path): + # A temporary output netcdf-file path, **unique to each test call**. + self.temp_output_filepath = tmp_path / "tmp.nc" + yield self.temp_output_filepath + + @pytest.fixture(autouse=True, scope="module") + def all_vars_lazy(self): + # For the operation of these tests, we want to force all netcdf variables + # to load as lazy data, i.e. **don't** use real data for 'small' ones. + old_value = iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES + iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES = 0 + yield + iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES = old_value + + @staticmethod + @pytest.fixture(params=[False, True], ids=["SaveImmediate", "SaveDelayed"]) + def save_is_delayed(request): + return request.param + + @staticmethod + def make_testcube( + include_lazy_content=True, + ensure_fillvalue_collision=False, + data_is_maskedbytes=False, + include_extra_coordlikes=False, + ): + cube = realistic_4d() + + def fix_array(array): + """ + Make a new, custom array to replace the provided cube/coord data. + Optionally provide default-fill-value collisions, and/or replace with lazy + content. + """ + if array is not None: + if data_is_maskedbytes: + dmin, dmax = 0, 255 + else: + dmin, dmax = array.min(), array.max() + array = np.random.uniform(dmin, dmax, size=array.shape) + + if data_is_maskedbytes: + array = array.astype("u1") + array = np.ma.masked_array(array) + # To trigger, it must also have at least one *masked point*. + array[tuple([0] * array.ndim)] = np.ma.masked + + if ensure_fillvalue_collision: + # Set point at midpoint index = default-fill-value + fill_value = default_fillvals[array.dtype.str[1:]] + inds = tuple(dim // 2 for dim in array.shape) + array[inds] = fill_value + + if include_lazy_content: + # Make the array lazy. + # Ensure we always have multiple chunks (relatively small ones). + chunks = list(array.shape) + chunks[0] = 1 + array = da.from_array(array, chunks=chunks) + + return array + + # Replace the cube data, and one aux-coord, according to the control settings. + cube.data = fix_array(cube.data) + auxcoord = cube.coord("surface_altitude") + auxcoord.points = fix_array(auxcoord.points) + + if include_extra_coordlikes: + # Also concoct + attach an ancillary variable and a cell-measure, so we can + # check that they behave the same as coordinates. + ancil_dims = [0, 2] + cm_dims = [0, 3] + ancil_shape = [cube.shape[idim] for idim in ancil_dims] + cm_shape = [cube.shape[idim] for idim in cm_dims] + from iris.coords import AncillaryVariable, CellMeasure + + ancil = AncillaryVariable( + fix_array(np.zeros(ancil_shape)), long_name="sample_ancil" + ) + cube.add_ancillary_variable(ancil, ancil_dims) + cm = CellMeasure( + fix_array(np.zeros(cm_shape)), long_name="sample_cm" + ) + cube.add_cell_measure(cm, cm_dims) + return cube + + def test_realfile_loadsave_equivalence(self, save_is_delayed, output_path): + input_filepath = iris.tests.get_data_path( + ["NetCDF", "global", "xyz_t", "GEMS_CO2_Apr2006.nc"] + ) + original_cubes = iris.load(input_filepath) + + # Preempt some standard changes that an iris save will impose. + for cube in original_cubes: + if cube.units == Unit("-"): + # replace 'unknown unit' with 'no unit'. + cube.units = Unit("?") + # Fix conventions attribute to what iris.save outputs. + cube.attributes["Conventions"] = "CF-1.7" + + original_cubes = sorted(original_cubes, key=lambda cube: cube.name()) + result = iris.save( + original_cubes, output_path, compute=not save_is_delayed + ) + if save_is_delayed: + # In this case, must also "complete" the save. + result.compute() + reloaded_cubes = iris.load(output_path) + reloaded_cubes = sorted(reloaded_cubes, key=lambda cube: cube.name()) + assert reloaded_cubes == original_cubes + # NOTE: it might be nicer to use assertCDL, but unfortunately importing + # unittest.TestCase seems to lose us the ability to use fixtures. + + @classmethod + @pytest.fixture( + params=[ + "ThreadedScheduler", + "DistributedScheduler", + "SingleThreadScheduler", + ] + ) + def scheduler_type(cls, request): + sched_typename = request.param + if sched_typename == "ThreadedScheduler": + config_name = "threads" + elif sched_typename == "SingleThreadScheduler": + config_name = "single-threaded" + else: + assert sched_typename == "DistributedScheduler" + config_name = "distributed" + + if config_name == "distributed": + _distributed_client = distributed.Client() + + with dask.config.set(scheduler=config_name): + yield sched_typename + + if config_name == "distributed": + _distributed_client.close() + + def test_scheduler_types( + self, output_path, scheduler_type, save_is_delayed + ): + # Check operation works and behaves the same with different schedulers, + # especially including distributed. + + # Just check that the dask scheduler is setup as 'expected'. + if scheduler_type == "ThreadedScheduler": + expected_dask_scheduler = "threads" + elif scheduler_type == "SingleThreadScheduler": + expected_dask_scheduler = "single-threaded" + else: + assert scheduler_type == "DistributedScheduler" + expected_dask_scheduler = "distributed" + + assert dask.config.get("scheduler") == expected_dask_scheduler + + # Use a testcase that produces delayed warnings (and check those too). + cube = self.make_testcube( + include_lazy_content=True, ensure_fillvalue_collision=True + ) + with warnings.catch_warnings(record=True) as logged_warnings: + result = iris.save(cube, output_path, compute=not save_is_delayed) + + if not save_is_delayed: + assert result is None + assert len(logged_warnings) == 2 + issued_warnings = [log.message for log in logged_warnings] + else: + assert result is not None + assert len(logged_warnings) == 0 + warnings.simplefilter("error") + issued_warnings = result.compute() + + assert len(issued_warnings) == 2 + expected_msg = "contains unmasked data points equal to the fill-value" + assert all( + expected_msg in warning.args[0] for warning in issued_warnings + ) + + def test_time_of_writing( + self, save_is_delayed, output_path, scheduler_type + ): + # Check when lazy data is *actually* written : + # - in 'immediate' mode, on initial file write + # - in 'delayed' mode, only when the delayed-write is computed. + original_cube = self.make_testcube(include_extra_coordlikes=True) + assert original_cube.has_lazy_data() + assert original_cube.coord("surface_altitude").has_lazy_points() + assert original_cube.cell_measure("sample_cm").has_lazy_data() + assert original_cube.ancillary_variable("sample_ancil").has_lazy_data() + + result = iris.save( + original_cube, + output_path, + compute=not save_is_delayed, + ) + assert save_is_delayed == (result is not None) + + # Read back : NOTE avoid loading the separate surface-altitude cube. + readback_cube = iris.load_cube( + output_path, "air_potential_temperature" + ) + # Check the components to be tested *are* lazy. See: self.all_vars_lazy(). + assert readback_cube.has_lazy_data() + assert readback_cube.coord("surface_altitude").has_lazy_points() + assert readback_cube.cell_measure("sample_cm").has_lazy_data() + assert readback_cube.ancillary_variable("sample_ancil").has_lazy_data() + + # If 'delayed', the lazy content should all be masked, otherwise none of it. + def getmask(cube_or_coord): + cube_or_coord = ( + cube_or_coord.copy() + ) # avoid realising the original + if hasattr(cube_or_coord, "points"): + data = cube_or_coord.points + else: + data = cube_or_coord.data + return np.ma.getmaskarray(data) + + test_components = [ + readback_cube, + readback_cube.coord("surface_altitude"), + readback_cube.ancillary_variable("sample_ancil"), + readback_cube.cell_measure("sample_cm"), + ] + + def fetch_masks(): + data_mask, coord_mask, ancil_mask, cm_mask = [ + getmask(data) for data in test_components + ] + return data_mask, coord_mask, ancil_mask, cm_mask + + data_mask, coord_mask, ancil_mask, cm_mask = fetch_masks() + if save_is_delayed: + assert np.all(data_mask) + assert np.all(coord_mask) + assert np.all(ancil_mask) + assert np.all(cm_mask) + else: + assert np.all(~data_mask) + assert np.all(~coord_mask) + assert np.all(~ancil_mask) + assert np.all(~cm_mask) + + if save_is_delayed: + # Complete the write. + result.compute() + + # Re-fetch the lazy arrays. The data should now **not be masked**. + data_mask, coord_mask, ancil_mask, cm_mask = fetch_masks() + # All written now ? + assert np.all(~data_mask) + assert np.all(~coord_mask) + assert np.all(~ancil_mask) + assert np.all(~cm_mask) + + @pytest.mark.parametrize( + "warning_type", ["WarnMaskedBytes", "WarnFillvalueCollision"] + ) + def test_fill_warnings(self, warning_type, output_path, save_is_delayed): + # Test collision warnings for data with fill-value collisions, or for masked + # byte data. + if warning_type == "WarnFillvalueCollision": + make_fv_collide = True + make_maskedbytes = False + expected_msg = ( + "contains unmasked data points equal to the fill-value" + ) + else: + assert warning_type == "WarnMaskedBytes" + make_fv_collide = False + make_maskedbytes = True + expected_msg = "contains byte data with masked points" + + cube = self.make_testcube( + include_lazy_content=True, + ensure_fillvalue_collision=make_fv_collide, + data_is_maskedbytes=make_maskedbytes, + ) + with warnings.catch_warnings(record=True) as logged_warnings: + result = iris.save(cube, output_path, compute=not save_is_delayed) + + result_warnings = [ + log.message + for log in logged_warnings + if isinstance(log.message, SaverFillValueWarning) + ] + + if save_is_delayed: + # Should have had *no* fill-warnings in the initial save. + assert len(result_warnings) == 0 + # Complete the operation now + with warnings.catch_warnings(): + # NOTE: warnings should *not* be issued here, instead they are returned. + warnings.simplefilter("error", category=SaverFillValueWarning) + result_warnings = result.compute() + + # Either way, we should now have 2 similar warnings. + assert len(result_warnings) == 2 + assert all( + expected_msg in warning.args[0] for warning in result_warnings + ) + + def test_no_delayed_writes(self, output_path): + # Just check that a delayed save returns a usable 'delayed' object, even when + # there is no lazy content = no delayed writes to perform. + cube = self.make_testcube(include_lazy_content=False) + warnings.simplefilter("error") + result = iris.save(cube, output_path, compute=False) + assert isinstance(result, Delayed) + assert result.compute() == [] diff --git a/lib/iris/tests/integration/netcdf/test_thread_safety.py b/lib/iris/tests/integration/netcdf/test_thread_safety.py index 280e0f8418..5ed32d0671 100644 --- a/lib/iris/tests/integration/netcdf/test_thread_safety.py +++ b/lib/iris/tests/integration/netcdf/test_thread_safety.py @@ -98,6 +98,21 @@ def test_stream_multisource(get_cubes_from_netcdf, save_common): save_common(final_cube) # Any problems are expected here. +def test_stream_multisource__manychunks( + tiny_chunks, get_cubes_from_netcdf, save_common +): + """ + As above, but with many more small chunks. + + As this previously showed additional, sporadic problems which only emerge + (statistically) with larger numbers of chunks. + + """ + cubes = get_cubes_from_netcdf + final_cube = sum(cubes) + save_common(final_cube) # Any problems are expected here. + + def test_comparison(get_cubes_from_netcdf): """ Comparing multiple loaded files forces co-realisation. diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/endian.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/endian.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/endian.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/endian.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/mercator.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/mercator.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator_no_ellipsoid.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/mercator_no_ellipsoid.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator_no_ellipsoid.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/mercator_no_ellipsoid.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_no_ellipsoid.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic_no_ellipsoid.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_no_ellipsoid.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic_no_ellipsoid.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_scale_factor.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic_scale_factor.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_scale_factor.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic_scale_factor.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/transverse_mercator.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/transverse_mercator.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/transverse_mercator.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/transverse_mercator.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/transverse_mercator_no_ellipsoid.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/transverse_mercator_no_ellipsoid.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/transverse_mercator_no_ellipsoid.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/transverse_mercator_no_ellipsoid.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/with_climatology.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/with_climatology.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py similarity index 99% rename from lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py index bbde2d0a2d..729a2d8b14 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py @@ -15,7 +15,7 @@ from unittest import mock from iris.coords import CellMethod -from iris.fileformats.netcdf import parse_cell_methods +from iris.fileformats._nc_load_rules.helpers import parse_cell_methods class Test(tests.IrisTest): diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py similarity index 99% rename from lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py rename to lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py index 39992d03a0..1a2ef1d29d 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py @@ -25,7 +25,8 @@ from iris.coords import AncillaryVariable, CellMeasure from iris.experimental.ugrid.load import PARSE_UGRID_ON_LOAD from iris.experimental.ugrid.mesh import MeshCoord -from iris.fileformats.netcdf import load_cubes, logger +from iris.fileformats.netcdf import logger +from iris.fileformats.netcdf.loader import load_cubes from iris.tests.stock.netcdf import ncgen_from_cdl diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py similarity index 97% rename from lib/iris/tests/unit/fileformats/netcdf/test_Saver.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index 93a1537ea4..12af318c01 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -3,7 +3,7 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Unit tests for the `iris.fileformats.netcdf.Saver` class.""" +"""Unit tests for the :class:`iris.fileformats.netcdf.Saver` class.""" # Import iris.tests first so that some things can be initialised before # importing anything else. @@ -205,7 +205,13 @@ def test_zlib(self): api = self.patch("iris.fileformats.netcdf.saver._thread_safe_nc") # Define mocked default fill values to prevent deprecation warning (#4374). api.default_fillvals = collections.defaultdict(lambda: -99.0) - with Saver("/dummy/path", "NETCDF4") as saver: + # Mock the apparent dtype of mocked variables, to avoid an error. + ref = api.DatasetWrapper.return_value + ref = ref.createVariable.return_value + ref.dtype = np.dtype(np.float32) + # NOTE: use compute=False as otherwise it gets in a pickle trying to construct + # a fill-value report on a non-compliant variable in a non-file (!) + with Saver("/dummy/path", "NETCDF4", compute=False) as saver: saver.write(cube, zlib=True) dataset = api.DatasetWrapper.return_value create_var_call = mock.call( @@ -646,8 +652,16 @@ def setUp(self): self.container = mock.Mock(name="container", attributes={}) self.data_dtype = np.dtype("int32") + # We need to create mock datasets which look like they are closed. + dataset_class = mock.Mock( + return_value=mock.Mock( + # Mock dataset : the isopen() call should return 0. + isopen=mock.Mock(return_value=0) + ) + ) patch = mock.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper" + "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + dataset_class, ) _ = patch.start() self.addCleanup(patch.stop) @@ -662,7 +676,7 @@ def assertAttribute(self, value): def check_attribute_compliance_call(self, value): self.set_attribute(value) - with Saver(mock.Mock(), "NETCDF4") as saver: + with Saver("nonexistent test file", "NETCDF4") as saver: saver.check_attribute_compliance(self.container, self.data_dtype) @@ -771,7 +785,7 @@ def test_valid_range_and_valid_min_valid_max_provided(self): self.container.attributes["valid_range"] = [1, 2] self.container.attributes["valid_min"] = [1] msg = 'Both "valid_range" and "valid_min"' - with Saver(mock.Mock(), "NETCDF4") as saver: + with Saver("nonexistent test file", "NETCDF4") as saver: with self.assertRaisesRegex(ValueError, msg): saver.check_attribute_compliance( self.container, self.data_dtype diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__lazy.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py similarity index 98% rename from lib/iris/tests/unit/fileformats/netcdf/test_Saver__lazy.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py index 53e1f9a652..e1211dc276 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__lazy.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py @@ -14,7 +14,7 @@ from iris.coords import AuxCoord from iris.fileformats.netcdf import Saver from iris.tests import stock -from iris.tests.unit.fileformats.netcdf import test_Saver +from iris.tests.unit.fileformats.netcdf.saver import test_Saver class LazyMixin(tests.IrisTest): diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py new file mode 100644 index 0000000000..6fa40a14fe --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py @@ -0,0 +1,132 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for :meth:`iris.fileformats.netcdf.saver.Saver._lazy_stream_data`. + +The behaviour of this method is complex, and this only tests certain aspects. +The testing of the dask delayed operations and file writing are instead covered by +integration tests. + +""" +from unittest import mock +import warnings + +import dask.array as da +import numpy as np +import pytest + +import iris.fileformats.netcdf._thread_safe_nc as nc_threadsafe +from iris.fileformats.netcdf.saver import Saver, _FillvalueCheckInfo + + +class Test__lazy_stream_data: + @staticmethod + @pytest.fixture(autouse=True) + def saver_patch(): + # Install patches, so we can create a Saver without opening a real output file. + # Mock just enough of Dataset behaviour to allow a 'Saver.complete()' call. + mock_dataset = mock.MagicMock() + mock_dataset_class = mock.Mock(return_value=mock_dataset) + # Mock the wrapper within the netcdf saver + target1 = ( + "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper" + ) + # Mock the real netCDF4.Dataset within the threadsafe-nc module, as this is + # used by NetCDFDataProxy and NetCDFWriteProxy. + target2 = "iris.fileformats.netcdf._thread_safe_nc.netCDF4.Dataset" + with mock.patch(target1, mock_dataset_class): + with mock.patch(target2, mock_dataset_class): + yield + + # A fixture to parametrise tests over delayed and non-delayed Saver type. + # NOTE: this only affects the saver context-exit, which we do not test here, so + # should make ***no difference to any of these tests***. + @staticmethod + @pytest.fixture(params=[False, True], ids=["nocompute", "compute"]) + def compute(request) -> bool: + yield request.param + + # A fixture to parametrise tests over real and lazy-type data. + @staticmethod + @pytest.fixture(params=[False, True], ids=["realdata", "lazydata"]) + def data_is_lazy(request) -> bool: + yield request.param + + @staticmethod + def saver(compute) -> Saver: + # Create a test Saver object + return Saver( + filename="", netcdf_format="NETCDF4", compute=compute + ) + + @staticmethod + def mock_var(shape): + # Create a test cf_var object + return mock.MagicMock(shape=tuple(shape), dtype=np.dtype(np.float32)) + + def test_data_save(self, compute, data_is_lazy): + """Real data is transferred immediately, lazy data creates a delayed write.""" + saver = self.saver(compute=compute) + data = np.arange(5.0) + if data_is_lazy: + data = da.from_array(data) + fill_value = -1.0 # not occurring in data + cf_var = self.mock_var(data.shape) + saver._lazy_stream_data( + data=data, fill_value=fill_value, fill_warn=True, cf_var=cf_var + ) + assert cf_var.__setitem__.call_count == (0 if data_is_lazy else 1) + assert len(saver._delayed_writes) == (1 if data_is_lazy else 0) + if data_is_lazy: + result_data, result_writer, fill_info = saver._delayed_writes[0] + assert result_data is data + assert isinstance(result_writer, nc_threadsafe.NetCDFWriteProxy) + assert isinstance(fill_info, _FillvalueCheckInfo) + else: + cf_var.__setitem__.assert_called_once_with(slice(None), data) + + def test_warnings(self, compute, data_is_lazy): + """ + For real data, fill-value warnings are issued immediately. For lazy data, + warnings are returned from computing a delayed completion. + + N.B. The 'compute' keyword has **no effect** on this : It only causes delayed + writes to be automatically actioned on exiting a Saver context. + Streaming *always* creates delayed writes for lazy data, since this is required + to make dask distributed operation work. + """ + saver = self.saver(compute=compute) + data = np.arange(5.0) + if data_is_lazy: + data = da.from_array(data) + fill_value = 2.0 # IS occurring in data + cf_var = self.mock_var(data.shape) + + # Do initial save. When compute=True, this issues warnings + with warnings.catch_warnings(record=True) as logged_warnings: + saver._lazy_stream_data( + data=data, fill_value=fill_value, fill_warn=True, cf_var=cf_var + ) + + issued_warnings = [log.message for log in logged_warnings] + + n_expected_warnings = 0 if data_is_lazy else 1 + assert len(issued_warnings) == n_expected_warnings + + # Complete the write : any delayed warnings should be *returned*. + # NOTE: + # (1) this still works when there are no delayed writes. + # (2) the Saver 'compute' keyword makes no difference to this usage, as it + # *only* affects what happens when the saver context exits. + result2 = saver.delayed_completion().compute() + issued_warnings += list(result2) + + # Either way, a suitable warning should have been produced. + assert len(issued_warnings) == 1 + warning = issued_warnings[0] + msg = "contains unmasked data points equal to the fill-value, 2.0" + assert isinstance(warning, UserWarning) + assert msg in warning.args[0] diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py similarity index 100% rename from lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__FillValueMaskCheckAndStoreTarget.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py similarity index 69% rename from lib/iris/tests/unit/fileformats/netcdf/saver/test__FillValueMaskCheckAndStoreTarget.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py index 77209efafc..95a518e4e5 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test__FillValueMaskCheckAndStoreTarget.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py @@ -4,39 +4,48 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Unit tests for the `iris.fileformats.netcdf._FillValueMaskCheckAndStoreTarget` -class. +Unit tests for :func:`iris.fileformats.netcdf.saver._data_fillvalue_check`. + +Note: now runs all testcases on both real + lazy data. """ # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip +import collections -from unittest import mock - +import dask.array as da import numpy as np -from iris.fileformats.netcdf.saver import _FillValueMaskCheckAndStoreTarget +from iris.fileformats.netcdf.saver import _data_fillvalue_check -class Test__FillValueMaskCheckAndStoreTarget(tests.IrisTest): +class Check__fillvalueandmasking: def _call_target(self, fill_value, keys, vals): - inner_target = mock.MagicMock() - target = _FillValueMaskCheckAndStoreTarget( - inner_target, fill_value=fill_value - ) + data = np.zeros(20, dtype=np.float32) + if any(np.ma.isMaskedArray(val) for val in vals): + # N.B. array is masked if "vals" is, but has no masked points initially. + data = np.ma.masked_array(data, mask=np.zeros_like(data)) for key, val in zip(keys, vals): - target[key] = val + data[key] = val - calls = [mock.call(key, val) for key, val in zip(keys, vals)] - inner_target.__setitem__.assert_has_calls(calls) + if hasattr(self.arraylib, "compute"): + data = da.from_array(data, chunks=-1) + + results = _data_fillvalue_check( + arraylib=self.arraylib, data=data, check_value=fill_value + ) - return target + if hasattr(results, "compute"): + results = results.compute() - def test___setitem__(self): - self._call_target(None, [1], [2]) + # Return a named tuple, for named-property access to the 2 result values. + result = collections.namedtuple("_", ["is_masked", "contains_value"])( + *results + ) + return result def test_no_fill_value_not_masked(self): # Test when the fill value is not present and the data is not masked @@ -90,3 +99,11 @@ def test_contains_masked_fill_value(self): target = self._call_target(fill_value, keys, vals) self.assertFalse(target.contains_value) self.assertTrue(target.is_masked) + + +class Test__real(Check__fillvalueandmasking, tests.IrisTest): + arraylib = np + + +class Test__lazy(Check__fillvalueandmasking, tests.IrisTest): + arraylib = da diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py new file mode 100644 index 0000000000..b2e4b63e3a --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py @@ -0,0 +1,119 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for :func:`iris.fileformats.netcdf.saver._fillvalue_report`. +""" +import warnings + +import numpy as np +import pytest + +from iris.fileformats.netcdf._thread_safe_nc import default_fillvals +from iris.fileformats.netcdf.saver import ( + SaverFillValueWarning, + _fillvalue_report, + _FillvalueCheckInfo, +) + + +class Test__fillvaluereport: + @pytest.mark.parametrize( + "is_bytes", [True, False], ids=["ByteData", "NonbyteData"] + ) + @pytest.mark.parametrize( + "is_masked", [True, False], ids=["MaskedData", "NonmaskedData"] + ) + @pytest.mark.parametrize( + "contains_fv", [True, False], ids=["FillInData", "NofillInData"] + ) + @pytest.mark.parametrize( + "given_user_fv", [True, False], ids=["WithUserfill", "NoUserfill"] + ) + def test_fillvalue_checking( + self, is_bytes, is_masked, contains_fv, given_user_fv + ): + dtype_code = "u1" if is_bytes else "f4" + dtype = np.dtype(dtype_code) + if given_user_fv: + user_fill = 123 if is_bytes else 1.234 + check_value = user_fill + else: + user_fill = None + check_value = default_fillvals[dtype_code] + + fill_info = _FillvalueCheckInfo( + user_value=user_fill, + check_value=check_value, + dtype=dtype, + varname="", + ) + + # Work out expected action, according to intended logic. + if is_bytes and is_masked and not given_user_fv: + msg_fragment = "'' contains byte data with masked points" + elif contains_fv: + msg_fragment = "'' contains unmasked data points equal to the fill-value" + else: + msg_fragment = None + + # Trial the action + result = _fillvalue_report( + fill_info, + is_masked=is_masked, + contains_fill_value=contains_fv, + warn=False, + ) + + # Check the result + if msg_fragment is None: + assert result is None + else: + assert isinstance(result, Warning) + assert msg_fragment in result.args[0] + + @pytest.mark.parametrize( + "has_collision", + [True, False], + ids=["WithFvCollision", "NoFvCollision"], + ) + def test_warn(self, has_collision): + fill_info = _FillvalueCheckInfo( + user_value=1.23, + check_value=1.23, + dtype=np.float32, + varname="", + ) + + # Check results + if has_collision: + # Check that we get the expected warning + expected_msg = "'' contains unmasked data points equal to the fill-value" + # Enter a warnings context that checks for the error. + warning_context = pytest.warns( + SaverFillValueWarning, match=expected_msg + ) + warning_context.__enter__() + else: + # Check that we get NO warning of the expected type. + warnings.filterwarnings("error", category=SaverFillValueWarning) + + # Do call: it should raise AND return a warning, ONLY IF there was a collision. + result = _fillvalue_report( + fill_info, + is_masked=True, + contains_fill_value=has_collision, + warn=True, + ) + + # Check result + if has_collision: + # Fail if no warning was raised .. + warning_context.__exit__(None, None, None) + # .. or result does not have the expected message content + assert expected_msg in result.args[0] + else: + # Fail if any warning result was produced. + assert result is None diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_save.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py similarity index 78% rename from lib/iris/tests/unit/fileformats/netcdf/test_save.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py index b274a8be0d..68049b57fc 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_save.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Unit tests for the `iris.fileformats.netcdf.save` function.""" - +"""Unit tests for the :func:`iris.fileformats.netcdf.save` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip @@ -15,6 +14,7 @@ from unittest import mock import numpy as np +import pytest import iris from iris.coords import AuxCoord, DimCoord @@ -22,6 +22,7 @@ from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD from iris.fileformats.netcdf import ( CF_CONVENTIONS_VERSION, + Saver, _thread_safe_nc, save, ) @@ -359,5 +360,104 @@ def test_connectivity_dim_varname_collision(self): self._check_save_and_reload([cube_1, cube_2]) +class Test_compute_usage: + """ + Test the operation of the save function 'compute' keyword. + + In actual use, this keyword controls 'delayed saving'. That is tested elsewhere, + in testing the 'Saver' class itself. + """ + + # A fixture to mock out Saver object creation in a 'save' call. + @staticmethod + @pytest.fixture + def mock_saver_creation(): + # A mock for a Saver object. + mock_saver = mock.MagicMock(spec=Saver) + # make an __enter__ call return the object itself (as the real Saver does). + mock_saver.__enter__ = mock.Mock(return_value=mock_saver) + # A mock for the Saver() constructor call. + mock_new_saver_call = mock.Mock(return_value=mock_saver) + + # Replace the whole Saver class with a simple function, which thereby emulates + # the constructor call. This avoids complications due to the fact that Mock + # patching does not work in the usual way for __init__ and __new__ methods. + def mock_saver_class_create(*args, **kwargs): + return mock_new_saver_call(*args, **kwargs) + + # Patch the Saver() creation to return our mock Saver object. + with mock.patch( + "iris.fileformats.netcdf.saver.Saver", mock_saver_class_create + ): + # Return mocks for both constructor call, and Saver object. + yield mock_new_saver_call, mock_saver + + # A fixture to provide some mock args for 'Saver' creation. + @staticmethod + @pytest.fixture + def mock_saver_args(): + from collections import namedtuple + + # A special object for the cube, since cube.attributes must be indexable + mock_cube = mock.MagicMock() + args = namedtuple( + "saver_args", ["cube", "filename", "format", "compute"] + )( + cube=mock_cube, + filename=mock.sentinel.filepath, + format=mock.sentinel.netcdf4, + compute=mock.sentinel.compute, + ) + return args + + def test_saver_creation(self, mock_saver_creation, mock_saver_args): + # Check that 'save' creates a Saver, passing the 'compute' keyword. + mock_saver_new, mock_saver = mock_saver_creation + args = mock_saver_args + save( + cube=args.cube, + filename=args.filename, + netcdf_format=args.format, + compute=args.compute, + ) + # Check the Saver create call it made, in particular that the compute arg is + # passed in. + mock_saver_new.assert_called_once_with( + args.filename, args.format, compute=args.compute + ) + + def test_compute_true(self, mock_saver_creation, mock_saver_args): + # Check operation when compute=True. + mock_saver_new, mock_saver = mock_saver_creation + args = mock_saver_args + result = save( + cube=args.cube, + filename=args.filename, + netcdf_format=args.format, + compute=True, + ) + # It should NOT have called 'delayed_completion' + assert mock_saver.delayed_completion.call_count == 0 + # Result should be None + assert result is None + + def test_compute_false_result_delayed( + self, mock_saver_creation, mock_saver_args + ): + # Check operation when compute=False. + mock_saver_new, mock_saver = mock_saver_creation + args = mock_saver_args + result = save( + cube=args.cube, + filename=args.filename, + netcdf_format=args.format, + compute=False, + ) + # It should have called 'delayed_completion' .. + assert mock_saver.delayed_completion.call_count == 1 + # .. and should return the result of that. + assert result is mock_saver.delayed_completion.return_value + + if __name__ == "__main__": tests.main() diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 654e69779d..8473c0672b 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 753a1a5b14d89bc3c35783578a75466a83a09089a694b38ab8e01b433c65c10e +# input_hash: 9568fc25789d9bd43892afef1d5f3cb02ed16f4037938e3c6f06e43db332ac64 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.12.7-ha878542_0.conda#ff9f73d45c4a07d6f424495288a26080 @@ -154,6 +154,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py310h1fa729e_0.conda#a1f0db6709778b77b5903541eeac4032 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py310hdf3cbec_0.conda#5311a49aaea44b73935c84a6d9a68e5f https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py310h8deb116_0.conda#b7085457309e206174b8e234d90a7605 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea @@ -170,9 +171,10 @@ https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py310h1fa729e_0.conda#8d155ac95b1dfe585bcb6bec6a91c73b https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py310h5764c6d_5.tar.bz2#9e68d2ff6d98737c855b65f48dd3c597 -https://conda.anaconda.org/conda-forge/noarch/setuptools-67.6.1-pyhd8ed1ab_0.conda#6c443cccff3daa3d83b2b807b0a298ce +https://conda.anaconda.org/conda-forge/noarch/setuptools-67.7.1-pyhd8ed1ab_0.conda#82bd3ef4e96ced7384f34ab01ece65b6 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 @@ -180,6 +182,7 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 @@ -190,6 +193,7 @@ https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.15.0-pyhd8ed1ab_0.conda#13018819ca8f5b7cc675a8faf1f5fedf https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde @@ -200,6 +204,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py310hdf3cbec_0.conda#7bf9d8c765b6b04882c719509652c6d6 https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.3-py310h1fa729e_0.conda#3eb11d1ed20480b4515094af8ae24c64 https://conda.anaconda.org/conda-forge/linux-64/curl-8.0.1-h588be90_0.conda#69691e828381dd12df671c26b680f1b0 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py310h5764c6d_1.tar.bz2#fd18cd597d23b2b5ddde23bd5b7aec32 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.3-py310h1fa729e_0.conda#4f39f656d6ff2761d698e69af952be82 https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.1-h3eb15da_0.conda#a7db5e3525875444b5a5868f553ab39a https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-nompi_h4df4325_101.conda#162a25904af6586b234b2dd52ee99c61 @@ -254,6 +259,7 @@ https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.2.2-pyha770c72_0.conda#c4aab94cab4ddeb340e36d4c670a5f24 https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h5d23da1_6.conda#59c73debd9405771690ddbbad6c57b69 https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.15-pyhd8ed1ab_0.conda#27db656619a55d727eaf5a6ece3d2fd6 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.4.0-pyhd8ed1ab_0.conda#78e6f14161ba76ae48ac3e82e1f4bf13 https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-nompi_py310h4c636dd_2.conda#00383e95a1a8d1d5b21af8535cd2ac43 https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.1.0-h2e5815a_0.conda#e7ecda996c443142a0e9c379f3b28e48 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py310hab646b1_3.conda#d049da3204bf5ecb54a852b622f2d7d2 @@ -268,3 +274,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.4.1-pyhd8ed1ab_0.c https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py310h7eb24ba_1.conda#e727db22a14344608c2caeccaa9e9d2b https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a + diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 2c21d14aeb..31bf1c0295 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: ccc18d7a90c531923e2b547b37cd25694cfbf4e9ec916ee9e8800513d1be3672 +# input_hash: 8641fc2c6ee10124d2b6eb1e655660170ee9faee3c595b844e213bc027dab9cf @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.12.7-ha878542_0.conda#ff9f73d45c4a07d6f424495288a26080 @@ -154,6 +154,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py311h2582759_0.conda#adb20bd57069614552adac60a020c36d +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py311ha3edf6b_0.conda#7415f24f8c44e44152623d93c5015000 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py311h8e6699e_0.conda#90db8cc0dfa20853329bfc6642f887aa https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea @@ -170,9 +171,10 @@ https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py311h2582759_0.conda#dfcc3e6e30d6ec2b2bb416fcd8ff4dc1 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py311hd4cff14_5.tar.bz2#da8769492e423103c59f469f4f17f8d9 -https://conda.anaconda.org/conda-forge/noarch/setuptools-67.6.1-pyhd8ed1ab_0.conda#6c443cccff3daa3d83b2b807b0a298ce +https://conda.anaconda.org/conda-forge/noarch/setuptools-67.7.1-pyhd8ed1ab_0.conda#82bd3ef4e96ced7384f34ab01ece65b6 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 @@ -180,6 +182,7 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 @@ -189,6 +192,7 @@ https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.15.0-pyhd8ed1ab_0.conda#13018819ca8f5b7cc675a8faf1f5fedf https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde @@ -199,6 +203,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h4c7f6c3_1.tar https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py311ha3edf6b_0.conda#e7548e7f58965a2fe97a95950a5fedc6 https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.3-py311h2582759_0.conda#d34c18fc691a04471ff3460b2d15d19e https://conda.anaconda.org/conda-forge/linux-64/curl-8.0.1-h588be90_0.conda#69691e828381dd12df671c26b680f1b0 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py311hd4cff14_1.tar.bz2#21523141b35484b1edafba962c6ea883 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.3-py311h2582759_0.conda#55741f37ab19d949b8e7316cfe286824 https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.1-h3eb15da_0.conda#a7db5e3525875444b5a5868f553ab39a https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-nompi_h4df4325_101.conda#162a25904af6586b234b2dd52ee99c61 @@ -253,6 +258,7 @@ https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.2.2-pyha770c72_0.conda#c4aab94cab4ddeb340e36d4c670a5f24 https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h5d23da1_6.conda#59c73debd9405771690ddbbad6c57b69 https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.15-pyhd8ed1ab_0.conda#27db656619a55d727eaf5a6ece3d2fd6 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.4.0-pyhd8ed1ab_0.conda#78e6f14161ba76ae48ac3e82e1f4bf13 https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-nompi_py311h8e2db7d_2.conda#18fa0582166979a77413859eed97d667 https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.1.0-h2e5815a_0.conda#e7ecda996c443142a0e9c379f3b28e48 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py311ha74522f_3.conda#ad6dd0bed0cdf5f2d4eb2b989d6253b3 @@ -267,3 +273,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.4.1-pyhd8ed1ab_0.c https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py311hd88b842_1.conda#f19feb9440890ccb806a367ea9ae0654 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a + diff --git a/requirements/locks/py38-linux-64.lock b/requirements/locks/py38-linux-64.lock index 88c81d2fa6..1d82b482e7 100644 --- a/requirements/locks/py38-linux-64.lock +++ b/requirements/locks/py38-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 372b8320e059eef6c6d3a1a76b3d328ba9d570548dc2b9a6f1739e39b0a421f9 +# input_hash: a4f998b35ebe4d2e981f778f4bb786e492e572afcbc4a8b28c8c4f13ed0a4a25 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.12.7-ha878542_0.conda#ff9f73d45c4a07d6f424495288a26080 @@ -153,6 +153,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py38h1de0b5d_0.conda#6d97b5d6f06933ab653f1862ddf6e33e +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py38hfbd4bf9_0.conda#5401b83c1007f408d0c74e23fa9b5eff https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py38h10c12cc_0.conda#05592c85b9f6931dc2df1e80c0d56294 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea @@ -169,9 +170,10 @@ https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py38h1de0b5d_0.conda#7db73572d4f7e10a759bad609a228ad0 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py38h0a891b7_5.tar.bz2#0856c59f9ddb710c640dc0428d66b1b7 -https://conda.anaconda.org/conda-forge/noarch/setuptools-67.6.1-pyhd8ed1ab_0.conda#6c443cccff3daa3d83b2b807b0a298ce +https://conda.anaconda.org/conda-forge/noarch/setuptools-67.7.1-pyhd8ed1ab_0.conda#82bd3ef4e96ced7384f34ab01ece65b6 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 @@ -179,6 +181,7 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 @@ -189,6 +192,7 @@ https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.15.0-pyhd8ed1ab_0.conda#13018819ca8f5b7cc675a8faf1f5fedf https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde @@ -198,6 +202,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py38h4a40e3a_3.conda https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py38h26c90d9_1.tar.bz2#dcc025a7bb54374979c500c2e161fac9 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py38hfbd4bf9_0.conda#638537863b298151635c05c762a997ab https://conda.anaconda.org/conda-forge/linux-64/curl-8.0.1-h588be90_0.conda#69691e828381dd12df671c26b680f1b0 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py38h0a891b7_1.tar.bz2#183f6160ab3498b882e903b06be7d430 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.3-py38h1de0b5d_0.conda#34449fe6e3949956fac2236c9a9a3d3b https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.1-h3eb15da_0.conda#a7db5e3525875444b5a5868f553ab39a https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-nompi_h4df4325_101.conda#162a25904af6586b234b2dd52ee99c61 @@ -253,6 +258,7 @@ https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.ta https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.2.2-pyha770c72_0.conda#c4aab94cab4ddeb340e36d4c670a5f24 https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h5d23da1_6.conda#59c73debd9405771690ddbbad6c57b69 https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.15-pyhd8ed1ab_0.conda#27db656619a55d727eaf5a6ece3d2fd6 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.4.0-pyhd8ed1ab_0.conda#78e6f14161ba76ae48ac3e82e1f4bf13 https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-nompi_py38h2b78397_2.conda#03c291af8938218972bfba0b0618d3e9 https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.1.0-h2e5815a_0.conda#e7ecda996c443142a0e9c379f3b28e48 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py38ha0d8c90_3.conda#e965dc172d67920d058ac2b3a0e27565 @@ -267,3 +273,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.4.1-pyhd8ed1ab_0.c https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py38h1abf878_1.conda#4d102cd1e6db10034a6c97df4444833f https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a + diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index ee579047f8..8dbbd385d8 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 9fc254eb7e503ee38c6849d25757756fae3559d7c669631940bd56e2e8d5e5f7 +# input_hash: 2d814707461cb5eb9ee68d230e169577fb13716dd06048a6bb7d7466d9916621 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.12.7-ha878542_0.conda#ff9f73d45c4a07d6f424495288a26080 @@ -154,6 +154,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py39h72bdee0_0.conda#35514f5320206df9f4661c138c02e1c1 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py39h4b4f3f3_0.conda#413374bab5022a5199c5dd89aef75df5 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py39h7360e5f_0.conda#757070dc7cc33003254888808cd34f1e https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea @@ -170,9 +171,10 @@ https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py39h72bdee0_0.conda#18927f971926b7271600368de71de557 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py39hb9d737c_5.tar.bz2#ef9db3c38ae7275f6b14491cfe61a248 -https://conda.anaconda.org/conda-forge/noarch/setuptools-67.6.1-pyhd8ed1ab_0.conda#6c443cccff3daa3d83b2b807b0a298ce +https://conda.anaconda.org/conda-forge/noarch/setuptools-67.7.1-pyhd8ed1ab_0.conda#82bd3ef4e96ced7384f34ab01ece65b6 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 @@ -180,6 +182,7 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 @@ -190,6 +193,7 @@ https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.15.0-pyhd8ed1ab_0.conda#13018819ca8f5b7cc675a8faf1f5fedf https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde @@ -199,6 +203,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39he91dace_3.conda https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_1.tar.bz2#c943fb9a2818ecc5be1e0ecc8b7738f1 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py39h4b4f3f3_0.conda#c5387f3fb1f5b8b71e1c865fc55f4951 https://conda.anaconda.org/conda-forge/linux-64/curl-8.0.1-h588be90_0.conda#69691e828381dd12df671c26b680f1b0 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py39hb9d737c_1.tar.bz2#eb31327ace8dac15c2df243d9505a132 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.3-py39h72bdee0_0.conda#9232b3b2cc83a304c8210a092e8ba4a5 https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.1-h3eb15da_0.conda#a7db5e3525875444b5a5868f553ab39a https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-nompi_h4df4325_101.conda#162a25904af6586b234b2dd52ee99c61 @@ -254,6 +259,7 @@ https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.ta https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.2.2-pyha770c72_0.conda#c4aab94cab4ddeb340e36d4c670a5f24 https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h5d23da1_6.conda#59c73debd9405771690ddbbad6c57b69 https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.15-pyhd8ed1ab_0.conda#27db656619a55d727eaf5a6ece3d2fd6 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.4.0-pyhd8ed1ab_0.conda#78e6f14161ba76ae48ac3e82e1f4bf13 https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-nompi_py39h95eafd8_2.conda#f04f8970f741b2f78af7e5b7112d17d6 https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.1.0-h2e5815a_0.conda#e7ecda996c443142a0e9c379f3b28e48 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py39h5c7b992_3.conda#19e30314fe824605750da905febb8ee6 @@ -268,3 +274,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.4.1-pyhd8ed1ab_0.c https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py39h4bd5d67_1.conda#a60d65263a8ddbff5381ed91d4f6953e https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a + diff --git a/requirements/py310.yml b/requirements/py310.yml index c9cb826189..4af9745c40 100644 --- a/requirements/py310.yml +++ b/requirements/py310.yml @@ -35,6 +35,7 @@ dependencies: - python-stratify # Test dependencies. + - distributed - filelock - imagehash >=4.0 - pre-commit diff --git a/requirements/py311.yml b/requirements/py311.yml index 71130c87cb..5a57349593 100644 --- a/requirements/py311.yml +++ b/requirements/py311.yml @@ -35,6 +35,7 @@ dependencies: - python-stratify # Test dependencies. + - distributed - filelock - imagehash >=4.0 - pre-commit diff --git a/requirements/py38.yml b/requirements/py38.yml index fd87ca8f26..ea28be119e 100644 --- a/requirements/py38.yml +++ b/requirements/py38.yml @@ -35,6 +35,7 @@ dependencies: - python-stratify # Test dependencies. + - distributed - filelock - imagehash >=4.0 - pre-commit diff --git a/requirements/py39.yml b/requirements/py39.yml index 6d460eefeb..cf0ad24c4b 100644 --- a/requirements/py39.yml +++ b/requirements/py39.yml @@ -35,6 +35,7 @@ dependencies: - python-stratify # Test dependencies. + - distributed - filelock - imagehash >=4.0 - pre-commit