diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000000..4f67c422f8 --- /dev/null +++ b/.flake8 @@ -0,0 +1,49 @@ +[flake8] +# References: +# https://flake8.readthedocs.io/en/latest/user/configuration.html +# https://flake8.readthedocs.io/en/latest/user/error-codes.html +# https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes + +max-line-length = 80 +max-complexity = 50 +select = C,E,F,W,B,B950 +ignore = + # E203: whitespace before ':' + E203, + # E226: missing whitespace around arithmetic operator + E226, + # E231: missing whitespace after ',', ';', or ':' + E231, + # E402: module level imports on one line + E402, + # E501: line too long + E501, + # E731: do not assign a lambda expression, use a def + E731, + # W503: line break before binary operator + W503, + # W504: line break after binary operator + W504, +exclude = + # + # ignore the following directories + # + .eggs, + build, + docs/src/sphinxext/*, + tools/*, + benchmarks/*, + # + # ignore auto-generated files + # + _ff_cross_refrences.py, + std_names.py, + um_cf_map.py, + # + # ignore third-party files + # + gitwash_dumper.py, + # + # convenience imports + # + lib/iris/common/__init__.py diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index 8831329ee3..5c48966ce8 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -35,18 +35,18 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest"] - python-version: ["3.10"] + python-version: ["3.11"] session: ["doctest", "gallery", "linkcheck"] include: - os: "ubuntu-latest" - python-version: "3.10" + python-version: "3.11" session: "tests" coverage: "--coverage" - os: "ubuntu-latest" - python-version: "3.9" + python-version: "3.10" session: "tests" - os: "ubuntu-latest" - python-version: "3.8" + python-version: "3.9" session: "tests" env: diff --git a/.github/workflows/ci-wheels.yml b/.github/workflows/ci-wheels.yml index a1c18d9f77..942d528f6d 100644 --- a/.github/workflows/ci-wheels.yml +++ b/.github/workflows/ci-wheels.yml @@ -35,9 +35,7 @@ jobs: - name: "building" shell: bash run: | - # require build with explicit --sdist and --wheel in order to - # get correct version associated with sdist and bdist artifacts - pipx run build --sdist --wheel + pipx run build - uses: actions/upload-artifact@v3 with: @@ -54,7 +52,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10"] + python-version: ["3.9", "3.10", "3.11"] session: ["wheel"] env: ENV_NAME: "ci-wheels" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a6c6edfd90..fd05b03b68 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,7 +47,6 @@ repos: hooks: - id: flake8 types: [file, python] - args: [--config=./setup.cfg] - repo: https://github.com/pycqa/isort rev: 5.12.0 diff --git a/MANIFEST.in b/MANIFEST.in index 70b3d74294..94ee69d7c5 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,7 +4,9 @@ prune docs prune etc recursive-include lib *.cdl *.cml *.json *.md *.py *.template *.txt *.xml prune requirements +recursive-include requirements *.txt prune tools +exclude .flake8 exclude .git-blame-ignore-revs exclude .git_archival.txt exclude .gitattributes @@ -20,8 +22,8 @@ exclude Makefile exclude noxfile.py # files required to build iris.std_names module -include tools/generate_std_names.py include etc/cf-standard-name-table.xml +include tools/generate_std_names.py global-exclude *.py[cod] global-exclude __pycache__ \ No newline at end of file diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json index 1e726eaee5..fab5bcb44e 100644 --- a/benchmarks/asv.conf.json +++ b/benchmarks/asv.conf.json @@ -19,7 +19,7 @@ // * No build-time environment variables. // * Is run in the same environment as the ASV install itself. "delegated_env_commands": [ - "PY_VER=3.10 nox --envdir={conf_dir}/.asv/env/nox01 --session=tests --install-only --no-error-on-external-run --verbose" + "PY_VER=3.11 nox --envdir={conf_dir}/.asv/env/nox01 --session=tests --install-only --no-error-on-external-run --verbose" ], // The parent directory of the above environment. // The most recently modified environment in the directory will be used. diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index 472fbafd85..11c3af293b 100644 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -60,7 +60,7 @@ def _prep_data_gen_env() -> None: """ root_dir = BENCHMARKS_DIR.parent - python_version = "3.10" + python_version = "3.11" data_gen_var = "DATA_GEN_PYTHON" if data_gen_var in environ: print("Using existing data generation environment.") diff --git a/docs/src/userguide/real_and_lazy_data.rst b/docs/src/userguide/real_and_lazy_data.rst index 38f06aab99..ef4de0c429 100644 --- a/docs/src/userguide/real_and_lazy_data.rst +++ b/docs/src/userguide/real_and_lazy_data.rst @@ -6,6 +6,7 @@ import dask.array as da import iris + from iris.cube import CubeList import numpy as np @@ -227,10 +228,47 @@ coordinates' lazy points and bounds: Dask Processing Options ----------------------- -Iris uses dask to provide lazy data arrays for both Iris cubes and coordinates, -and for computing deferred operations on lazy arrays. +Iris uses `Dask `_ to provide lazy data arrays for +both Iris cubes and coordinates, and for computing deferred operations on lazy arrays. Dask provides processing options to control how deferred operations on lazy arrays are computed. This is provided via the ``dask.set_options`` interface. See the `dask documentation `_ for more information on setting dask processing options. + + +.. _delayed_netcdf_save: + +Delayed NetCDF Saving +--------------------- + +When saving data to NetCDF files, it is possible to *delay* writing lazy content to the +output file, to be performed by `Dask `_ later, +thus enabling parallel save operations. + +This works in the following way : + 1. an :func:`iris.save` call is made, with a NetCDF file output and the additional + keyword ``compute=False``. + This is currently *only* available when saving to NetCDF, so it is documented in + the Iris NetCDF file format API. See: :func:`iris.fileformats.netcdf.save`. + + 2. the call creates the output file, but does not fill in variables' data, where + the data is a lazy array in the Iris object. Instead, these variables are + initially created "empty". + + 3. the :meth:`~iris.save` call returns a ``result`` which is a + :class:`~dask.delayed.Delayed` object. + + 4. the save can be completed later by calling ``result.compute()``, or by passing it + to the :func:`dask.compute` call. + +The benefit of this, is that costly data transfer operations can be performed in +parallel with writes to other data files. Also, where array contents are calculated +from shared lazy input data, these can be computed in parallel efficiently by Dask +(i.e. without re-fetching), similar to what :meth:`iris.cube.CubeList.realise_data` +can do. + +.. note:: + This feature does **not** enable parallel writes to the *same* NetCDF output file. + That can only be done on certain operating systems, with a specially configured + build of the NetCDF C library, and is not supported by Iris at present. diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index ef27895e8d..6b4189d922 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -30,7 +30,10 @@ This document explains the changes made to Iris for this release ✨ Features =========== -#. N/A +#. `@pp-mo`_ and `@lbdreyer`_ supported delayed saving of lazy data, when writing to + the netCDF file format. See : :ref:`delayed netCDF saves `. + Also with significant input from `@fnattino`_. + (:pull:`5191`) 🐛 Bugs Fixed @@ -60,7 +63,11 @@ This document explains the changes made to Iris for this release 🔗 Dependencies =============== -#. N/A +#. `@rcomer`_ and `@bjlittle`_ (reviewer) added testing support for python + 3.11. (:pull:`5226`) + +#. `@rcomer`_ dropped support for python 3.8, in accordance with the NEP29_ + recommendations (:pull:`5226`) 📚 Documentation @@ -84,12 +91,26 @@ This document explains the changes made to Iris for this release #. `@bjlittle`_ added the `codespell`_ `pre-commit`_ ``git-hook`` to automate spell checking within the code-base. (:pull:`5186`) +#. `@bjlittle`_ and `@trexfeathers`_ (reviewer) added a `check-manifest`_ + GitHub Action and `pre-commit`_ ``git-hook`` to automate verification + of assets bundled within a ``sdist`` and binary ``wheel`` of our + `scitools-iris`_ PyPI package. (:pull:`5259`) + +#. `@rcomer`_ removed a now redundant copying workaround from Resolve testing. + (:pull:`5267`) + +#. `@bjlittle`_ and `@trexfeathers`_ (reviewer) migrated ``setup.cfg`` to + ``pyproject.toml``, as motivated by `PEP-0621`_. (:pull:`5262`) + +#. `@bjlittle`_ adopted `pypa/build`_ recommended best practice to build a + binary ``wheel`` from the ``sdist``. (:pull:`5266`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: - +.. _@fnattino: https://github.com/fnattino .. comment @@ -97,3 +118,7 @@ This document explains the changes made to Iris for this release .. _sphinx-panels: https://github.com/executablebooks/sphinx-panels .. _sphinx-design: https://github.com/executablebooks/sphinx-design +.. _check-manifest: https://github.com/mgedmin/check-manifest +.. _PEP-0621: https://peps.python.org/pep-0621/ +.. _pypa/build: https://pypa-build.readthedocs.io/en/stable/ +.. _NEP29: https://numpy.org/neps/nep-0029-deprecation_policy.html diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 8e6161bac5..bbf9c660c5 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -13,6 +13,8 @@ build routines, and which it does not use. """ +import re +from typing import List import warnings import cf_units @@ -28,10 +30,6 @@ import iris.exceptions import iris.fileformats.cf as cf import iris.fileformats.netcdf -from iris.fileformats.netcdf import ( - UnknownCellMethodWarning, - parse_cell_methods, -) from iris.fileformats.netcdf.loader import _get_cf_var_data import iris.std_names import iris.util @@ -184,6 +182,210 @@ CF_VALUE_STD_NAME_PROJ_Y = "projection_y_coordinate" +################################################################################ +# Handling of cell-methods. + +_CM_COMMENT = "comment" +_CM_EXTRA = "extra" +_CM_INTERVAL = "interval" +_CM_METHOD = "method" +_CM_NAME = "name" +_CM_PARSE_NAME = re.compile(r"([\w_]+\s*?:\s+)+") +_CM_PARSE = re.compile( + r""" + (?P([\w_]+\s*?:\s+)+) + (?P[\w_\s]+(?![\w_]*\s*?:))\s* + (?: + \(\s* + (?P.+) + \)\s* + )? + """, + re.VERBOSE, +) + +# Cell methods. +_CM_KNOWN_METHODS = [ + "point", + "sum", + "mean", + "maximum", + "minimum", + "mid_range", + "standard_deviation", + "variance", + "mode", + "median", +] + + +def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: + """ + Split a CF cell_methods attribute string into a list of zero or more cell + methods, each of which is then parsed with a regex to return a list of match + objects. + + Args: + + * nc_cell_methods: The value of the cell methods attribute to be split. + + Returns: + + * nc_cell_methods_matches: A list of the re.Match objects associated with + each parsed cell method + + Splitting is done based on words followed by colons outside of any brackets. + Validation of anything other than being laid out in the expected format is + left to the calling function. + """ + + # Find name candidates + name_start_inds = [] + for m in _CM_PARSE_NAME.finditer(nc_cell_methods): + name_start_inds.append(m.start()) + + # Remove those that fall inside brackets + bracket_depth = 0 + for ind, cha in enumerate(nc_cell_methods): + if cha == "(": + bracket_depth += 1 + elif cha == ")": + bracket_depth -= 1 + if bracket_depth < 0: + msg = ( + "Cell methods may be incorrectly parsed due to mismatched " + "brackets" + ) + warnings.warn(msg, UserWarning, stacklevel=2) + if bracket_depth > 0 and ind in name_start_inds: + name_start_inds.remove(ind) + + # List tuples of indices of starts and ends of the cell methods in the string + method_indices = [] + for ii in range(len(name_start_inds) - 1): + method_indices.append((name_start_inds[ii], name_start_inds[ii + 1])) + method_indices.append((name_start_inds[-1], len(nc_cell_methods))) + + # Index the string and match against each substring + nc_cell_methods_matches = [] + for start_ind, end_ind in method_indices: + nc_cell_method_str = nc_cell_methods[start_ind:end_ind] + nc_cell_method_match = _CM_PARSE.match(nc_cell_method_str.strip()) + if not nc_cell_method_match: + msg = ( + f"Failed to fully parse cell method string: {nc_cell_methods}" + ) + warnings.warn(msg, UserWarning, stacklevel=2) + continue + nc_cell_methods_matches.append(nc_cell_method_match) + + return nc_cell_methods_matches + + +class UnknownCellMethodWarning(Warning): + pass + + +def parse_cell_methods(nc_cell_methods): + """ + Parse a CF cell_methods attribute string into a tuple of zero or + more CellMethod instances. + + Args: + + * nc_cell_methods (str): + The value of the cell methods attribute to be parsed. + + Returns: + + * cell_methods + An iterable of :class:`iris.coords.CellMethod`. + + Multiple coordinates, intervals and comments are supported. + If a method has a non-standard name a warning will be issued, but the + results are not affected. + + """ + + cell_methods = [] + if nc_cell_methods is not None: + for m in _split_cell_methods(nc_cell_methods): + d = m.groupdict() + method = d[_CM_METHOD] + method = method.strip() + # Check validity of method, allowing for multi-part methods + # e.g. mean over years. + method_words = method.split() + if method_words[0].lower() not in _CM_KNOWN_METHODS: + msg = "NetCDF variable contains unknown cell method {!r}" + warnings.warn( + msg.format("{}".format(method_words[0])), + UnknownCellMethodWarning, + ) + d[_CM_METHOD] = method + name = d[_CM_NAME] + name = name.replace(" ", "") + name = name.rstrip(":") + d[_CM_NAME] = tuple([n for n in name.split(":")]) + interval = [] + comment = [] + if d[_CM_EXTRA] is not None: + # + # tokenise the key words and field colon marker + # + d[_CM_EXTRA] = d[_CM_EXTRA].replace( + "comment:", "<><<:>>" + ) + d[_CM_EXTRA] = d[_CM_EXTRA].replace( + "interval:", "<><<:>>" + ) + d[_CM_EXTRA] = d[_CM_EXTRA].split("<<:>>") + if len(d[_CM_EXTRA]) == 1: + comment.extend(d[_CM_EXTRA]) + else: + next_field_type = comment + for field in d[_CM_EXTRA]: + field_type = next_field_type + index = field.rfind("<>") + if index == 0: + next_field_type = interval + continue + elif index > 0: + next_field_type = interval + else: + index = field.rfind("<>") + if index == 0: + next_field_type = comment + continue + elif index > 0: + next_field_type = comment + if index != -1: + field = field[:index] + field_type.append(field.strip()) + # + # cater for a shared interval over multiple axes + # + if len(interval): + if len(d[_CM_NAME]) != len(interval) and len(interval) == 1: + interval = interval * len(d[_CM_NAME]) + # + # cater for a shared comment over multiple axes + # + if len(comment): + if len(d[_CM_NAME]) != len(comment) and len(comment) == 1: + comment = comment * len(d[_CM_NAME]) + d[_CM_INTERVAL] = tuple(interval) + d[_CM_COMMENT] = tuple(comment) + cell_method = iris.coords.CellMethod( + d[_CM_METHOD], + coords=d[_CM_NAME], + intervals=d[_CM_INTERVAL], + comments=d[_CM_COMMENT], + ) + cell_methods.append(cell_method) + return tuple(cell_methods) + + ################################################################################ def build_cube_metadata(engine): """Add the standard meta data to the cube.""" diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py index 505e173b0b..b696b200ff 100644 --- a/lib/iris/fileformats/netcdf/__init__.py +++ b/lib/iris/fileformats/netcdf/__init__.py @@ -18,6 +18,11 @@ # Note: *must* be done before importing from submodules, as they also use this ! logger = iris.config.get_logger(__name__) +# Note: these probably shouldn't be public, but for now they are. +from .._nc_load_rules.helpers import ( + UnknownCellMethodWarning, + parse_cell_methods, +) from .loader import DEBUG, NetCDFDataProxy, load_cubes from .saver import ( CF_CONVENTIONS_VERSION, @@ -25,8 +30,6 @@ SPATIO_TEMPORAL_AXES, CFNameCoordMap, Saver, - UnknownCellMethodWarning, - parse_cell_methods, save, ) diff --git a/lib/iris/fileformats/netcdf/_dask_locks.py b/lib/iris/fileformats/netcdf/_dask_locks.py new file mode 100644 index 0000000000..15ac117a8b --- /dev/null +++ b/lib/iris/fileformats/netcdf/_dask_locks.py @@ -0,0 +1,140 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Module containing code to create locks enabling dask workers to co-operate. + +This matter is complicated by needing different solutions for different dask scheduler +types, i.e. local 'threads' scheduler, local 'processes' or distributed. + +In any case, an "iris.fileformats.netcdf.saver.Saver" object contains a netCDF4.Dataset +targeting an output file, and creates a Saver.file_write_lock object to serialise +write-accesses to the file from dask tasks : All dask-task file writes go via a +"iris.fileformats.netcdf.saver.NetCDFWriteProxy" object, which also contains a link +to the Saver.file_write_lock, and uses it to prevent workers from fouling each other. + +For each chunk written, the NetCDFWriteProxy acquires the common per-file lock; +opens a Dataset on the file; performs a write to the relevant variable; closes the +Dataset and then releases the lock. This process is obviously very similar to what the +NetCDFDataProxy does for reading lazy chunks. + +For a threaded scheduler, the Saver.lock is a simple threading.Lock(). The workers +(threads) execute tasks which contain a NetCDFWriteProxy, as above. All of those +contain the common lock, and this is simply **the same object** for all workers, since +they share an address space. + +For a distributed scheduler, the Saver.lock is a `distributed.Lock()` which is +identified with the output filepath. This is distributed to the workers by +serialising the task function arguments, which will include the NetCDFWriteProxy. +A worker behaves like a process, though it may execute on a remote machine. When a +distributed.Lock is deserialised to reconstruct the worker task, this creates an object +that communicates with the scheduler. These objects behave as a single common lock, +as they all have the same string 'identity', so the scheduler implements inter-process +communication so that they can mutually exclude each other. + +It is also *conceivable* that multiple processes could write to the same file in +parallel, if the operating system supports it. However, this also requires that the +libnetcdf C library is built with parallel access option, which is not common. +With the "ordinary" libnetcdf build, a process which attempts to open for writing a file +which is _already_ open for writing simply raises an access error. +In any case, Iris netcdf saver will not support this mode of operation, at present. + +We don't currently support a local "processes" type scheduler. If we did, the +behaviour should be very similar to a distributed scheduler. It would need to use some +other serialisable shared-lock solution in place of 'distributed.Lock', which requires +a distributed scheduler to function. + +""" +import threading + +import dask.array +import dask.base +import dask.multiprocessing +import dask.threaded + + +# A dedicated error class, allowing filtering and testing of errors raised here. +class DaskSchedulerTypeError(ValueError): + pass + + +def dask_scheduler_is_distributed(): + """Return whether a distributed.Client is active.""" + # NOTE: this replicates logic in `dask.base.get_scheduler` : if a distributed client + # has been created + is still active, then the default scheduler will always be + # "distributed". + is_distributed = False + # NOTE: must still work when 'distributed' is not available. + try: + import distributed + + client = distributed.get_client() + is_distributed = client is not None + except (ImportError, ValueError): + pass + return is_distributed + + +def get_dask_array_scheduler_type(): + """ + Work out what type of scheduler an array.compute*() will use. + + Returns one of 'distributed', 'threads' or 'processes'. + The return value is a valid argument for dask.config.set(scheduler=). + This cannot distinguish between distributed local and remote clusters -- both of + those simply return 'distributed'. + + NOTE: this takes account of how dask is *currently* configured. It will be wrong + if the config changes before the compute actually occurs. + + """ + if dask_scheduler_is_distributed(): + result = "distributed" + else: + # Call 'get_scheduler', which respects the config settings, but pass an array + # so we default to the default scheduler for that type of object. + trial_dask_array = dask.array.zeros(1) + get_function = dask.base.get_scheduler(collections=[trial_dask_array]) + # Detect the ones which we recognise. + if get_function == dask.threaded.get: + result = "threads" + elif get_function == dask.local.get_sync: + result = "single-threaded" + elif get_function == dask.multiprocessing.get: + result = "processes" + else: + msg = f"Dask default scheduler for arrays is unrecognised : {get_function}" + raise DaskSchedulerTypeError(msg) + + return result + + +def get_worker_lock(identity: str): + """ + Return a mutex Lock which can be shared by multiple Dask workers. + + The type of Lock generated depends on the dask scheduler type, which must therefore + be set up before this is called. + + """ + scheduler_type = get_dask_array_scheduler_type() + if scheduler_type in ("threads", "single-threaded"): + # N.B. the "identity" string is never used in this case, as the same actual + # lock object is used by all workers. + lock = threading.Lock() + elif scheduler_type == "distributed": + from dask.distributed import Lock as DistributedLock + + lock = DistributedLock(identity) + else: + msg = ( + "The configured dask array scheduler type is " + f'"{scheduler_type}", ' + "which is not supported by the Iris netcdf saver." + ) + raise DaskSchedulerTypeError(msg) + + # NOTE: not supporting 'processes' scheduler, for now. + return lock diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index decca1535f..709696087b 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -340,3 +340,44 @@ def __getstate__(self): def __setstate__(self, state): for key, value in state.items(): setattr(self, key, value) + + +class NetCDFWriteProxy: + """ + The "opposite" of a NetCDFDataProxy : An object mimicking the data access of a + netCDF4.Variable, but where the data is to be ***written to***. + + It encapsulates the netcdf file and variable which are actually to be written to. + This opens the file each time, to enable writing the data chunk, then closes it. + TODO: could be improved with a caching scheme, but this just about works. + """ + + def __init__(self, filepath, cf_var, file_write_lock): + self.path = filepath + self.varname = cf_var.name + self.lock = file_write_lock + + def __setitem__(self, keys, array_data): + # Write to the variable. + # First acquire a file-specific lock for all workers writing to this file. + self.lock.acquire() + # Open the file for writing + write to the specific file variable. + # Exactly as above, in NetCDFDataProxy : a DatasetWrapper causes problems with + # invalid ID's and the netCDF4 library, for so-far unknown reasons. + # Instead, use _GLOBAL_NETCDF4_LOCK, and netCDF4 _directly_. + with _GLOBAL_NETCDF4_LOCK: + dataset = None + try: + dataset = netCDF4.Dataset(self.path, "r+") + var = dataset.variables[self.varname] + var[keys] = array_data + finally: + try: + if dataset: + dataset.close() + finally: + # *ALWAYS* let go ! + self.lock.release() + + def __repr__(self): + return f"<{self.__class__.__name__} path={self.path!r} var={self.varname!r}>" diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index e5e696d3c3..5c11d804db 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -23,9 +23,10 @@ import warnings import cf_units +import dask import dask.array as da +from dask.delayed import Delayed import numpy as np -import numpy.ma as ma from iris._lazy_data import _co_realise_lazy_arrays, is_lazy_data from iris.aux_factory import ( @@ -44,7 +45,7 @@ from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord import iris.exceptions import iris.fileformats.cf -from iris.fileformats.netcdf import _thread_safe_nc +from iris.fileformats.netcdf import _dask_locks, _thread_safe_nc import iris.io import iris.util @@ -156,207 +157,6 @@ } -# Cell methods. -_CM_KNOWN_METHODS = [ - "point", - "sum", - "mean", - "maximum", - "minimum", - "mid_range", - "standard_deviation", - "variance", - "mode", - "median", -] - -_CM_COMMENT = "comment" -_CM_EXTRA = "extra" -_CM_INTERVAL = "interval" -_CM_METHOD = "method" -_CM_NAME = "name" -_CM_PARSE_NAME = re.compile(r"([\w_]+\s*?:\s+)+") -_CM_PARSE = re.compile( - r""" - (?P([\w_]+\s*?:\s+)+) - (?P[\w_\s]+(?![\w_]*\s*?:))\s* - (?: - \(\s* - (?P.+) - \)\s* - )? - """, - re.VERBOSE, -) - - -class UnknownCellMethodWarning(Warning): - pass - - -def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: - """ - Split a CF cell_methods attribute string into a list of zero or more cell - methods, each of which is then parsed with a regex to return a list of match - objects. - - Args: - - * nc_cell_methods: The value of the cell methods attribute to be split. - - Returns: - - * nc_cell_methods_matches: A list of the re.Match objects associated with - each parsed cell method - - Splitting is done based on words followed by colons outside of any brackets. - Validation of anything other than being laid out in the expected format is - left to the calling function. - """ - - # Find name candidates - name_start_inds = [] - for m in _CM_PARSE_NAME.finditer(nc_cell_methods): - name_start_inds.append(m.start()) - - # Remove those that fall inside brackets - bracket_depth = 0 - for ind, cha in enumerate(nc_cell_methods): - if cha == "(": - bracket_depth += 1 - elif cha == ")": - bracket_depth -= 1 - if bracket_depth < 0: - msg = ( - "Cell methods may be incorrectly parsed due to mismatched " - "brackets" - ) - warnings.warn(msg, UserWarning, stacklevel=2) - if bracket_depth > 0 and ind in name_start_inds: - name_start_inds.remove(ind) - - # List tuples of indices of starts and ends of the cell methods in the string - method_indices = [] - for ii in range(len(name_start_inds) - 1): - method_indices.append((name_start_inds[ii], name_start_inds[ii + 1])) - method_indices.append((name_start_inds[-1], len(nc_cell_methods))) - - # Index the string and match against each substring - nc_cell_methods_matches = [] - for start_ind, end_ind in method_indices: - nc_cell_method_str = nc_cell_methods[start_ind:end_ind] - nc_cell_method_match = _CM_PARSE.match(nc_cell_method_str.strip()) - if not nc_cell_method_match: - msg = ( - f"Failed to fully parse cell method string: {nc_cell_methods}" - ) - warnings.warn(msg, UserWarning, stacklevel=2) - continue - nc_cell_methods_matches.append(nc_cell_method_match) - - return nc_cell_methods_matches - - -def parse_cell_methods(nc_cell_methods): - """ - Parse a CF cell_methods attribute string into a tuple of zero or - more CellMethod instances. - - Args: - - * nc_cell_methods (str): - The value of the cell methods attribute to be parsed. - - Returns: - - * cell_methods - An iterable of :class:`iris.coords.CellMethod`. - - Multiple coordinates, intervals and comments are supported. - If a method has a non-standard name a warning will be issued, but the - results are not affected. - - """ - - cell_methods = [] - if nc_cell_methods is not None: - for m in _split_cell_methods(nc_cell_methods): - d = m.groupdict() - method = d[_CM_METHOD] - method = method.strip() - # Check validity of method, allowing for multi-part methods - # e.g. mean over years. - method_words = method.split() - if method_words[0].lower() not in _CM_KNOWN_METHODS: - msg = "NetCDF variable contains unknown cell method {!r}" - warnings.warn( - msg.format("{}".format(method_words[0])), - UnknownCellMethodWarning, - ) - d[_CM_METHOD] = method - name = d[_CM_NAME] - name = name.replace(" ", "") - name = name.rstrip(":") - d[_CM_NAME] = tuple([n for n in name.split(":")]) - interval = [] - comment = [] - if d[_CM_EXTRA] is not None: - # - # tokenise the key words and field colon marker - # - d[_CM_EXTRA] = d[_CM_EXTRA].replace( - "comment:", "<><<:>>" - ) - d[_CM_EXTRA] = d[_CM_EXTRA].replace( - "interval:", "<><<:>>" - ) - d[_CM_EXTRA] = d[_CM_EXTRA].split("<<:>>") - if len(d[_CM_EXTRA]) == 1: - comment.extend(d[_CM_EXTRA]) - else: - next_field_type = comment - for field in d[_CM_EXTRA]: - field_type = next_field_type - index = field.rfind("<>") - if index == 0: - next_field_type = interval - continue - elif index > 0: - next_field_type = interval - else: - index = field.rfind("<>") - if index == 0: - next_field_type = comment - continue - elif index > 0: - next_field_type = comment - if index != -1: - field = field[:index] - field_type.append(field.strip()) - # - # cater for a shared interval over multiple axes - # - if len(interval): - if len(d[_CM_NAME]) != len(interval) and len(interval) == 1: - interval = interval * len(d[_CM_NAME]) - # - # cater for a shared comment over multiple axes - # - if len(comment): - if len(d[_CM_NAME]) != len(comment) and len(comment) == 1: - comment = comment * len(d[_CM_NAME]) - d[_CM_INTERVAL] = tuple(interval) - d[_CM_COMMENT] = tuple(comment) - cell_method = iris.coords.CellMethod( - d[_CM_METHOD], - coords=d[_CM_NAME], - intervals=d[_CM_INTERVAL], - comments=d[_CM_COMMENT], - ) - cell_methods.append(cell_method) - return tuple(cell_methods) - - class CFNameCoordMap: """Provide a simple CF name to CF coordinate mapping.""" @@ -467,61 +267,139 @@ def _setncattr(variable, name, attribute): return variable.setncattr(name, attribute) -class _FillValueMaskCheckAndStoreTarget: - """ - To be used with da.store. Remembers whether any element was equal to a - given value and whether it was masked, before passing the chunk to the - given target. +# NOTE : this matches :class:`iris.experimental.ugrid.mesh.Mesh.ELEMENTS`, +# but in the preferred order for coord/connectivity variables in the file. +MESH_ELEMENTS = ("node", "edge", "face") + + +_FillvalueCheckInfo = collections.namedtuple( + "_FillvalueCheckInfo", ["user_value", "check_value", "dtype", "varname"] +) + - NOTE: target needs to be a _thread_safe_nc._ThreadSafeWrapper subclass. +def _data_fillvalue_check(arraylib, data, check_value): + """ + Check whether an array is masked, and whether it contains a fill-value. + + Parameters + ---------- + arraylib : module + Either numpy or dask.array : When dask, results are lazy computations. + data : array-like + Array to check (numpy or dask) + check_value : number or None + If not None, fill-value to check for existence in the array. + If None, do not do value-in-array check + + Returns + ------- + is_masked : bool + True if array has any masked points. + contains_value : bool + True if array contains check_value. + Always False if check_value is None. """ + is_masked = arraylib.any(arraylib.ma.getmaskarray(data)) + if check_value is None: + contains_value = False + else: + contains_value = arraylib.any(data == check_value) + return is_masked, contains_value - def __init__(self, target, fill_value=None): - assert hasattr(target, "THREAD_SAFE_FLAG") - self.target = target - self.fill_value = fill_value - self.contains_value = False - self.is_masked = False - def __setitem__(self, keys, arr): - if self.fill_value is not None: - self.contains_value = self.contains_value or self.fill_value in arr - self.is_masked = self.is_masked or ma.is_masked(arr) - self.target[keys] = arr +class SaverFillValueWarning(UserWarning): + pass -# NOTE : this matches :class:`iris.experimental.ugrid.mesh.Mesh.ELEMENTS`, -# but in the preferred order for coord/connectivity variables in the file. -MESH_ELEMENTS = ("node", "edge", "face") +def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False): + """ + From the given information, work out whether there was a possible or actual + fill-value collision, and if so construct a warning. + + Parameters + ---------- + fill_info : _FillvalueCheckInfo + A named-tuple containing the context of the fill-value check + is_masked : bool + whether the data array was masked + contains_fill_value : bool + whether the data array contained the fill-value + warn : bool + if True, also issue any resulting warning immediately. + + Returns + ------- + None or :class:`Warning` + If not None, indicates a known or possible problem with filling + + """ + varname = fill_info.varname + user_value = fill_info.user_value + check_value = fill_info.check_value + is_byte_data = fill_info.dtype.itemsize == 1 + result = None + if is_byte_data and is_masked and user_value is None: + result = SaverFillValueWarning( + f"CF var '{varname}' contains byte data with masked points, but " + "no fill_value keyword was given. As saved, these " + "points will read back as valid values. To save as " + "masked byte data, `_FillValue` needs to be explicitly " + "set. For Cube data this can be done via the 'fill_value' " + "keyword during saving, otherwise use ncedit/equivalent." + ) + elif contains_fill_value: + result = SaverFillValueWarning( + f"CF var '{varname}' contains unmasked data points equal to the " + f"fill-value, {check_value}. As saved, these points will read back " + "as missing data. To save these as normal values, " + "`_FillValue` needs to be set to not equal any valid data " + "points. For Cube data this can be done via the 'fill_value' " + "keyword during saving, otherwise use ncedit/equivalent." + ) + + if warn and result is not None: + warnings.warn(result) + return result class Saver: """A manager for saving netcdf files.""" - def __init__(self, filename, netcdf_format): + def __init__(self, filename, netcdf_format, compute=True): """ A manager for saving netcdf files. - Args: - - * filename (string): + Parameters + ---------- + filename : string Name of the netCDF file to save the cube. - * netcdf_format (string): + netcdf_format : string Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format. - Returns: - None. - - For example:: - - # Initialise Manager for saving - with Saver(filename, netcdf_format) as sman: - # Iterate through the cubelist. - for cube in cubes: - sman.write(cube) + compute : bool, default=True + If True, delayed variable saves will be completed on exit from the Saver + context (after first closing the target file), equivalent to + :meth:`complete()`. + If False, the file is created and closed without writing the data of + variables for which the source data was lazy. These writes can be + completed later, see :meth:`delayed_completion`. + + Returns + ------- + None + + Example + ------- + >>> import iris + >>> from iris.fileformats.netcdf.saver import Saver + >>> cubes = iris.load(iris.sample_data_path('atlantic_profiles.nc')) + >>> with Saver("tmp.nc", "NETCDF4") as sman: + ... # Iterate through the cubelist. + ... for cube in cubes: + ... sman.write(cube) """ if netcdf_format not in [ @@ -548,18 +426,30 @@ def __init__(self, filename, netcdf_format): self._mesh_dims = {} #: A dictionary, mapping formula terms to owner cf variable name self._formula_terms_cache = {} + #: Target filepath + self.filepath = os.path.abspath(filename) + #: A list of delayed writes for lazy saving + self._delayed_writes = ( + [] + ) # a list of triples (source, target, fill-info) + #: Whether to complete delayed saves on exit (and raise associated warnings). + self.compute = compute + # N.B. the file-write-lock *type* actually depends on the dask scheduler type. + #: A per-file write lock to prevent dask attempting overlapping writes. + self.file_write_lock = _dask_locks.get_worker_lock(self.filepath) #: NetCDF dataset + self._dataset = None try: self._dataset = _thread_safe_nc.DatasetWrapper( - filename, mode="w", format=netcdf_format + self.filepath, mode="w", format=netcdf_format ) except RuntimeError: - dir_name = os.path.dirname(filename) + dir_name = os.path.dirname(self.filepath) if not os.path.isdir(dir_name): msg = "No such file or directory: {}".format(dir_name) raise IOError(msg) if not os.access(dir_name, os.R_OK | os.W_OK): - msg = "Permission denied: {}".format(filename) + msg = "Permission denied: {}".format(self.filepath) raise IOError(msg) else: raise @@ -572,6 +462,8 @@ def __exit__(self, type, value, traceback): self._dataset.sync() self._dataset.close() + if self.compute: + self.complete() def write( self, @@ -2444,8 +2336,7 @@ def _increment_name(self, varname): return "{}_{}".format(varname, num) - @staticmethod - def _lazy_stream_data(data, fill_value, fill_warn, cf_var): + def _lazy_stream_data(self, data, fill_value, fill_warn, cf_var): if hasattr(data, "shape") and data.shape == (1,) + cf_var.shape: # (Don't do this check for string data). # Reduce dimensionality where the data array has an extra dimension @@ -2454,25 +2345,8 @@ def _lazy_stream_data(data, fill_value, fill_warn, cf_var): # contains just 1 row, so the cf_var is 1D. data = data.squeeze(axis=0) - if is_lazy_data(data): - - def store(data, cf_var, fill_value): - # Store lazy data and check whether it is masked and contains - # the fill value - target = _FillValueMaskCheckAndStoreTarget(cf_var, fill_value) - da.store([data], [target]) - return target.is_masked, target.contains_value - - else: - - def store(data, cf_var, fill_value): - cf_var[:] = data - is_masked = np.ma.is_masked(data) - contains_value = fill_value is not None and fill_value in data - return is_masked, contains_value - + # Decide whether we are checking for fill-value collisions. dtype = cf_var.dtype - # fill_warn allows us to skip warning if packing attributes have been # specified. It would require much more complex operations to work out # what the values and fill_value _would_ be in such a case. @@ -2480,38 +2354,166 @@ def store(data, cf_var, fill_value): if fill_value is not None: fill_value_to_check = fill_value else: + # Retain 'fill_value == None', to show that no specific value was given. + # But set 'fill_value_to_check' to a calculated value fill_value_to_check = _thread_safe_nc.default_fillvals[ dtype.str[1:] ] + # Cast the check-value to the correct dtype. + # NOTE: In the case of 'S1' dtype (at least), the default (Python) value + # does not have a compatible type. This causes a deprecation warning at + # numpy 1.24, *and* was preventing correct fill-value checking of character + # data, since they are actually bytes (dtype 'S1'). + fill_value_to_check = np.array(fill_value_to_check, dtype=dtype) else: + # A None means we will NOT check for collisions. fill_value_to_check = None - # Store the data and check if it is masked and contains the fill value. - is_masked, contains_fill_value = store( - data, cf_var, fill_value_to_check + fill_info = _FillvalueCheckInfo( + user_value=fill_value, + check_value=fill_value_to_check, + dtype=dtype, + varname=cf_var.name, ) - if dtype.itemsize == 1 and fill_value is None: - if is_masked: - msg = ( - "CF var '{}' contains byte data with masked points, but " - "no fill_value keyword was given. As saved, these " - "points will read back as valid values. To save as " - "masked byte data, `_FillValue` needs to be explicitly " - "set. For Cube data this can be done via the 'fill_value' " - "keyword during saving, otherwise use ncedit/equivalent." + doing_delayed_save = is_lazy_data(data) + if doing_delayed_save: + # save lazy data with a delayed operation. For now, we just record the + # necessary information -- a single, complete delayed action is constructed + # later by a call to delayed_completion(). + def store(data, cf_var, fill_info): + # Create a data-writeable object that we can stream into, which + # encapsulates the file to be opened + variable to be written. + write_wrapper = _thread_safe_nc.NetCDFWriteProxy( + self.filepath, cf_var, self.file_write_lock ) - warnings.warn(msg.format(cf_var.name)) - elif contains_fill_value: + # Add to the list of delayed writes, used in delayed_completion(). + self._delayed_writes.append((data, write_wrapper, fill_info)) + # In this case, fill-value checking is done later. But return 2 dummy + # values, to be consistent with the non-streamed "store" signature. + is_masked, contains_value = False, False + return is_masked, contains_value + + else: + # Real data is always written directly, i.e. not via lazy save. + # We also check it immediately for any fill-value problems. + def store(data, cf_var, fill_info): + cf_var[:] = data + return _data_fillvalue_check(np, data, fill_info.check_value) + + # Store the data and check if it is masked and contains the fill value. + is_masked, contains_fill_value = store(data, cf_var, fill_info) + + if not doing_delayed_save: + # Issue a fill-value warning immediately, if appropriate. + _fillvalue_report( + fill_info, is_masked, contains_fill_value, warn=True + ) + + def delayed_completion(self) -> Delayed: + """ + Create and return a :class:`dask.delayed.Delayed` to perform file completion + for delayed saves. + + This contains all the delayed writes, which complete the file by filling out + the data of variables initially created empty, and also the checks for + potential fill-value collisions. + When computed, it returns a list of any warnings which were generated in the + save operation. + + Returns + ------- + completion : :class:`dask.delayed.Delayed` + + Notes + ----- + The dataset *must* be closed (saver has exited its context) before the + result can be computed, otherwise computation will hang (never return). + """ + if self._delayed_writes: + # Create a single delayed da.store operation to complete the file. + sources, targets, fill_infos = zip(*self._delayed_writes) + store_op = da.store(sources, targets, compute=False, lock=False) + + # Construct a delayed fill-check operation for each (lazy) source array. + delayed_fillvalue_checks = [ + # NB with arraylib=dask.array, this routine does lazy array computation + _data_fillvalue_check(da, source, fillinfo.check_value) + for source, fillinfo in zip(sources, fill_infos) + ] + + # Return a single delayed object which completes the delayed saves and + # returns a list of any fill-value warnings. + @dask.delayed + def compute_and_return_warnings(store_op, fv_infos, fv_checks): + # Note: we don't actually *do* anything with the 'store_op' argument, + # but including it here ensures that dask will compute it (thus + # performing all the delayed saves), before calling this function. + results = [] + # Pair each fill_check result (is_masked, contains_value) with its + # fillinfo and construct a suitable Warning if needed. + for fillinfo, (is_masked, contains_value) in zip( + fv_infos, fv_checks + ): + fv_warning = _fillvalue_report( + fill_info=fillinfo, + is_masked=is_masked, + contains_fill_value=contains_value, + ) + if fv_warning is not None: + # Collect the warnings and return them. + results.append(fv_warning) + return results + + result = compute_and_return_warnings( + store_op, + fv_infos=fill_infos, + fv_checks=delayed_fillvalue_checks, + ) + + else: + # Return a delayed, which returns an empty list, for usage consistency. + @dask.delayed + def no_op(): + return [] + + result = no_op() + + return result + + def complete(self, issue_warnings=True) -> List[Warning]: + """ + Complete file by computing any delayed variable saves. + + This requires that the Saver has closed the dataset (exited its context). + + Parameters + ---------- + issue_warnings : bool, default = True + If true, issue all the resulting warnings with :func:`warnings.warn`. + + Returns + ------- + warnings : list of Warning + Any warnings that were raised while writing delayed data. + + """ + if self._dataset.isopen(): msg = ( - "CF var '{}' contains unmasked data points equal to the " - "fill-value, {}. As saved, these points will read back " - "as missing data. To save these as normal values, " - "`_FillValue` needs to be set to not equal any valid data " - "points. For Cube data this can be done via the 'fill_value' " - "keyword during saving, otherwise use ncedit/equivalent." + "Cannot call Saver.complete() until its dataset is closed, " + "i.e. the saver's context has exited." ) - warnings.warn(msg.format(cf_var.name, fill_value)) + raise ValueError(msg) + + delayed_write = self.delayed_completion() + # Complete the saves now, and handle any delayed warnings that occurred + result_warnings = delayed_write.compute() + if issue_warnings: + # Issue any delayed warnings from the compute. + for delayed_warning in result_warnings: + warnings.warn(delayed_warning) + + return result_warnings def save( @@ -2530,6 +2532,7 @@ def save( least_significant_digit=None, packing=None, fill_value=None, + compute=True, ): """ Save cube(s) to a netCDF file, given the cube and the filename. @@ -2652,8 +2655,24 @@ def save( `:class:`iris.cube.CubeList`, or a single element, and each element of this argument will be applied to each cube separately. + * compute (bool): + When False, create the output file but don't write any lazy array content to + its variables, such as lazy cube data or aux-coord points and bounds. + + Instead return a :class:`dask.delayed.Delayed` which, when computed, will + stream all the lazy content via :meth:`dask.store`, to complete the file. + Several such data saves can be performed in parallel, by passing a list of them + into a :func:`dask.compute` call. + + Default is ``True``, meaning complete the file immediately, and return ``None``. + + .. Note:: + when computed, the returned :class:`dask.delayed.Delayed` object returns + a list of :class:`Warning` : These are any warnings which *would* have + been issued in the save call, if compute had been True. + Returns: - None. + A list of :class:`Warning`. .. note:: @@ -2752,7 +2771,9 @@ def is_valid_packspec(p): raise ValueError(msg) # Initialise Manager for saving - with Saver(filename, netcdf_format) as sman: + # N.B. make the Saver compute=False, as we want control over creation of the + # delayed-completion object. + with Saver(filename, netcdf_format, compute=compute) as sman: # Iterate through the cubelist. for cube, packspec, fill_value in zip(cubes, packspecs, fill_values): sman.write( @@ -2797,3 +2818,12 @@ def is_valid_packspec(p): # Add conventions attribute. sman.update_global_attributes(Conventions=conventions) + + if compute: + # No more to do, since we used Saver(compute=True). + result = None + else: + # Return a delayed completion object. + result = sman.delayed_completion() + + return result diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index a4f700cb51..7680d9bac6 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -454,7 +454,7 @@ def save(source, target, saver=None, **kwargs): # Single cube? if isinstance(source, Cube): - saver(source, target, **kwargs) + result = saver(source, target, **kwargs) # CubeList or sequence of cubes? elif isinstance(source, CubeList) or ( @@ -477,9 +477,13 @@ def save(source, target, saver=None, **kwargs): if i != 0: kwargs["append"] = True saver(cube, target, **kwargs) + + result = None # Netcdf saver. else: - saver(source, target, **kwargs) + result = saver(source, target, **kwargs) else: raise ValueError("Cannot save; non Cube found in source") + + return result diff --git a/lib/iris/tests/integration/netcdf/test__dask_locks.py b/lib/iris/tests/integration/netcdf/test__dask_locks.py new file mode 100644 index 0000000000..c41af1b356 --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test__dask_locks.py @@ -0,0 +1,115 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :mod:`iris.fileformats.netcdf._dask_locks` package. + +Note: these integration tests replace any unit testing of this module, due to its total +dependence on Dask, and even on Dask's implementation details rather than supported +and documented API and behaviour. +So (a) it is essential to check the module's behaviour against actual Dask operation, +and (b) mock-ist testing of the implementation code in isolation would not add anything +of much value. +""" +import dask +import dask.config +import distributed +import pytest + +from iris.fileformats.netcdf._dask_locks import ( + DaskSchedulerTypeError, + dask_scheduler_is_distributed, + get_dask_array_scheduler_type, + get_worker_lock, +) + + +@pytest.fixture( + params=[ + "UnspecifiedScheduler", + "ThreadedScheduler", + "SingleThreadScheduler", + "ProcessScheduler", + "DistributedScheduler", + ] +) +def dask_scheduler(request): + # Control Dask to enable a specific scheduler type. + sched_typename = request.param + if sched_typename == "UnspecifiedScheduler": + config_name = None + elif sched_typename == "SingleThreadScheduler": + config_name = "single-threaded" + elif sched_typename == "ThreadedScheduler": + config_name = "threads" + elif sched_typename == "ProcessScheduler": + config_name = "processes" + else: + assert sched_typename == "DistributedScheduler" + config_name = "distributed" + + if config_name == "distributed": + _distributed_client = distributed.Client() + + if config_name is None: + context = None + else: + context = dask.config.set(scheduler=config_name) + context.__enter__() + + yield sched_typename + + if context: + context.__exit__(None, None, None) + + if config_name == "distributed": + _distributed_client.close() + + +def test_dask_scheduler_is_distributed(dask_scheduler): + result = dask_scheduler_is_distributed() + # Should return 'True' only with a distributed scheduler. + expected = dask_scheduler == "DistributedScheduler" + assert result == expected + + +def test_get_dask_array_scheduler_type(dask_scheduler): + result = get_dask_array_scheduler_type() + expected = { + "UnspecifiedScheduler": "threads", + "ThreadedScheduler": "threads", + "ProcessScheduler": "processes", + "SingleThreadScheduler": "single-threaded", + "DistributedScheduler": "distributed", + }[dask_scheduler] + assert result == expected + + +def test_get_worker_lock(dask_scheduler): + test_identity = "" + error = None + try: + result = get_worker_lock(test_identity) + except DaskSchedulerTypeError as err: + error = err + result = None + + if dask_scheduler == "ProcessScheduler": + assert result is None + assert isinstance(error, DaskSchedulerTypeError) + msg = 'scheduler type is "processes", which is not supported' + assert msg in error.args[0] + else: + assert error is None + assert result is not None + if dask_scheduler == "DistributedScheduler": + assert isinstance(result, distributed.Lock) + assert result.name == test_identity + else: + # low-level object doesn't have a readily available class for isinstance + assert all( + hasattr(result, att) + for att in ("acquire", "release", "locked") + ) diff --git a/lib/iris/tests/integration/netcdf/test_coord_systems.py b/lib/iris/tests/integration/netcdf/test_coord_systems.py index 8576f5ffe8..3175664b4c 100644 --- a/lib/iris/tests/integration/netcdf/test_coord_systems.py +++ b/lib/iris/tests/integration/netcdf/test_coord_systems.py @@ -18,7 +18,7 @@ from iris.cube import Cube from iris.tests import stock as stock from iris.tests.stock.netcdf import ncgen_from_cdl -from iris.tests.unit.fileformats.netcdf import test_load_cubes as tlc +from iris.tests.unit.fileformats.netcdf.loader import test_load_cubes as tlc @tests.skip_data diff --git a/lib/iris/tests/integration/netcdf/test_delayed_save.py b/lib/iris/tests/integration/netcdf/test_delayed_save.py new file mode 100644 index 0000000000..616feb3b0e --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test_delayed_save.py @@ -0,0 +1,339 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Integration tests for delayed saving. +""" +import warnings + +from cf_units import Unit +import dask.array as da +import dask.config +from dask.delayed import Delayed +import distributed +import numpy as np +import pytest + +import iris +from iris.fileformats.netcdf._thread_safe_nc import default_fillvals +from iris.fileformats.netcdf.saver import SaverFillValueWarning +import iris.tests +from iris.tests.stock import realistic_4d + + +class Test__lazy_stream_data: + @pytest.fixture(autouse=True) + def output_path(self, tmp_path): + # A temporary output netcdf-file path, **unique to each test call**. + self.temp_output_filepath = tmp_path / "tmp.nc" + yield self.temp_output_filepath + + @pytest.fixture(autouse=True, scope="module") + def all_vars_lazy(self): + # For the operation of these tests, we want to force all netcdf variables + # to load as lazy data, i.e. **don't** use real data for 'small' ones. + old_value = iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES + iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES = 0 + yield + iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES = old_value + + @staticmethod + @pytest.fixture(params=[False, True], ids=["SaveImmediate", "SaveDelayed"]) + def save_is_delayed(request): + return request.param + + @staticmethod + def make_testcube( + include_lazy_content=True, + ensure_fillvalue_collision=False, + data_is_maskedbytes=False, + include_extra_coordlikes=False, + ): + cube = realistic_4d() + + def fix_array(array): + """ + Make a new, custom array to replace the provided cube/coord data. + Optionally provide default-fill-value collisions, and/or replace with lazy + content. + """ + if array is not None: + if data_is_maskedbytes: + dmin, dmax = 0, 255 + else: + dmin, dmax = array.min(), array.max() + array = np.random.uniform(dmin, dmax, size=array.shape) + + if data_is_maskedbytes: + array = array.astype("u1") + array = np.ma.masked_array(array) + # To trigger, it must also have at least one *masked point*. + array[tuple([0] * array.ndim)] = np.ma.masked + + if ensure_fillvalue_collision: + # Set point at midpoint index = default-fill-value + fill_value = default_fillvals[array.dtype.str[1:]] + inds = tuple(dim // 2 for dim in array.shape) + array[inds] = fill_value + + if include_lazy_content: + # Make the array lazy. + # Ensure we always have multiple chunks (relatively small ones). + chunks = list(array.shape) + chunks[0] = 1 + array = da.from_array(array, chunks=chunks) + + return array + + # Replace the cube data, and one aux-coord, according to the control settings. + cube.data = fix_array(cube.data) + auxcoord = cube.coord("surface_altitude") + auxcoord.points = fix_array(auxcoord.points) + + if include_extra_coordlikes: + # Also concoct + attach an ancillary variable and a cell-measure, so we can + # check that they behave the same as coordinates. + ancil_dims = [0, 2] + cm_dims = [0, 3] + ancil_shape = [cube.shape[idim] for idim in ancil_dims] + cm_shape = [cube.shape[idim] for idim in cm_dims] + from iris.coords import AncillaryVariable, CellMeasure + + ancil = AncillaryVariable( + fix_array(np.zeros(ancil_shape)), long_name="sample_ancil" + ) + cube.add_ancillary_variable(ancil, ancil_dims) + cm = CellMeasure( + fix_array(np.zeros(cm_shape)), long_name="sample_cm" + ) + cube.add_cell_measure(cm, cm_dims) + return cube + + def test_realfile_loadsave_equivalence(self, save_is_delayed, output_path): + input_filepath = iris.tests.get_data_path( + ["NetCDF", "global", "xyz_t", "GEMS_CO2_Apr2006.nc"] + ) + original_cubes = iris.load(input_filepath) + + # Preempt some standard changes that an iris save will impose. + for cube in original_cubes: + if cube.units == Unit("-"): + # replace 'unknown unit' with 'no unit'. + cube.units = Unit("?") + # Fix conventions attribute to what iris.save outputs. + cube.attributes["Conventions"] = "CF-1.7" + + original_cubes = sorted(original_cubes, key=lambda cube: cube.name()) + result = iris.save( + original_cubes, output_path, compute=not save_is_delayed + ) + if save_is_delayed: + # In this case, must also "complete" the save. + result.compute() + reloaded_cubes = iris.load(output_path) + reloaded_cubes = sorted(reloaded_cubes, key=lambda cube: cube.name()) + assert reloaded_cubes == original_cubes + # NOTE: it might be nicer to use assertCDL, but unfortunately importing + # unittest.TestCase seems to lose us the ability to use fixtures. + + @classmethod + @pytest.fixture( + params=[ + "ThreadedScheduler", + "DistributedScheduler", + "SingleThreadScheduler", + ] + ) + def scheduler_type(cls, request): + sched_typename = request.param + if sched_typename == "ThreadedScheduler": + config_name = "threads" + elif sched_typename == "SingleThreadScheduler": + config_name = "single-threaded" + else: + assert sched_typename == "DistributedScheduler" + config_name = "distributed" + + if config_name == "distributed": + _distributed_client = distributed.Client() + + with dask.config.set(scheduler=config_name): + yield sched_typename + + if config_name == "distributed": + _distributed_client.close() + + def test_scheduler_types( + self, output_path, scheduler_type, save_is_delayed + ): + # Check operation works and behaves the same with different schedulers, + # especially including distributed. + + # Just check that the dask scheduler is setup as 'expected'. + if scheduler_type == "ThreadedScheduler": + expected_dask_scheduler = "threads" + elif scheduler_type == "SingleThreadScheduler": + expected_dask_scheduler = "single-threaded" + else: + assert scheduler_type == "DistributedScheduler" + expected_dask_scheduler = "distributed" + + assert dask.config.get("scheduler") == expected_dask_scheduler + + # Use a testcase that produces delayed warnings (and check those too). + cube = self.make_testcube( + include_lazy_content=True, ensure_fillvalue_collision=True + ) + with warnings.catch_warnings(record=True) as logged_warnings: + result = iris.save(cube, output_path, compute=not save_is_delayed) + + if not save_is_delayed: + assert result is None + assert len(logged_warnings) == 2 + issued_warnings = [log.message for log in logged_warnings] + else: + assert result is not None + assert len(logged_warnings) == 0 + warnings.simplefilter("error") + issued_warnings = result.compute() + + assert len(issued_warnings) == 2 + expected_msg = "contains unmasked data points equal to the fill-value" + assert all( + expected_msg in warning.args[0] for warning in issued_warnings + ) + + def test_time_of_writing( + self, save_is_delayed, output_path, scheduler_type + ): + # Check when lazy data is *actually* written : + # - in 'immediate' mode, on initial file write + # - in 'delayed' mode, only when the delayed-write is computed. + original_cube = self.make_testcube(include_extra_coordlikes=True) + assert original_cube.has_lazy_data() + assert original_cube.coord("surface_altitude").has_lazy_points() + assert original_cube.cell_measure("sample_cm").has_lazy_data() + assert original_cube.ancillary_variable("sample_ancil").has_lazy_data() + + result = iris.save( + original_cube, + output_path, + compute=not save_is_delayed, + ) + assert save_is_delayed == (result is not None) + + # Read back : NOTE avoid loading the separate surface-altitude cube. + readback_cube = iris.load_cube( + output_path, "air_potential_temperature" + ) + # Check the components to be tested *are* lazy. See: self.all_vars_lazy(). + assert readback_cube.has_lazy_data() + assert readback_cube.coord("surface_altitude").has_lazy_points() + assert readback_cube.cell_measure("sample_cm").has_lazy_data() + assert readback_cube.ancillary_variable("sample_ancil").has_lazy_data() + + # If 'delayed', the lazy content should all be masked, otherwise none of it. + def getmask(cube_or_coord): + cube_or_coord = ( + cube_or_coord.copy() + ) # avoid realising the original + if hasattr(cube_or_coord, "points"): + data = cube_or_coord.points + else: + data = cube_or_coord.data + return np.ma.getmaskarray(data) + + test_components = [ + readback_cube, + readback_cube.coord("surface_altitude"), + readback_cube.ancillary_variable("sample_ancil"), + readback_cube.cell_measure("sample_cm"), + ] + + def fetch_masks(): + data_mask, coord_mask, ancil_mask, cm_mask = [ + getmask(data) for data in test_components + ] + return data_mask, coord_mask, ancil_mask, cm_mask + + data_mask, coord_mask, ancil_mask, cm_mask = fetch_masks() + if save_is_delayed: + assert np.all(data_mask) + assert np.all(coord_mask) + assert np.all(ancil_mask) + assert np.all(cm_mask) + else: + assert np.all(~data_mask) + assert np.all(~coord_mask) + assert np.all(~ancil_mask) + assert np.all(~cm_mask) + + if save_is_delayed: + # Complete the write. + result.compute() + + # Re-fetch the lazy arrays. The data should now **not be masked**. + data_mask, coord_mask, ancil_mask, cm_mask = fetch_masks() + # All written now ? + assert np.all(~data_mask) + assert np.all(~coord_mask) + assert np.all(~ancil_mask) + assert np.all(~cm_mask) + + @pytest.mark.parametrize( + "warning_type", ["WarnMaskedBytes", "WarnFillvalueCollision"] + ) + def test_fill_warnings(self, warning_type, output_path, save_is_delayed): + # Test collision warnings for data with fill-value collisions, or for masked + # byte data. + if warning_type == "WarnFillvalueCollision": + make_fv_collide = True + make_maskedbytes = False + expected_msg = ( + "contains unmasked data points equal to the fill-value" + ) + else: + assert warning_type == "WarnMaskedBytes" + make_fv_collide = False + make_maskedbytes = True + expected_msg = "contains byte data with masked points" + + cube = self.make_testcube( + include_lazy_content=True, + ensure_fillvalue_collision=make_fv_collide, + data_is_maskedbytes=make_maskedbytes, + ) + with warnings.catch_warnings(record=True) as logged_warnings: + result = iris.save(cube, output_path, compute=not save_is_delayed) + + result_warnings = [ + log.message + for log in logged_warnings + if isinstance(log.message, SaverFillValueWarning) + ] + + if save_is_delayed: + # Should have had *no* fill-warnings in the initial save. + assert len(result_warnings) == 0 + # Complete the operation now + with warnings.catch_warnings(): + # NOTE: warnings should *not* be issued here, instead they are returned. + warnings.simplefilter("error", category=SaverFillValueWarning) + result_warnings = result.compute() + + # Either way, we should now have 2 similar warnings. + assert len(result_warnings) == 2 + assert all( + expected_msg in warning.args[0] for warning in result_warnings + ) + + def test_no_delayed_writes(self, output_path): + # Just check that a delayed save returns a usable 'delayed' object, even when + # there is no lazy content = no delayed writes to perform. + cube = self.make_testcube(include_lazy_content=False) + warnings.simplefilter("error") + result = iris.save(cube, output_path, compute=False) + assert isinstance(result, Delayed) + assert result.compute() == [] diff --git a/lib/iris/tests/integration/netcdf/test_thread_safety.py b/lib/iris/tests/integration/netcdf/test_thread_safety.py index 280e0f8418..5ed32d0671 100644 --- a/lib/iris/tests/integration/netcdf/test_thread_safety.py +++ b/lib/iris/tests/integration/netcdf/test_thread_safety.py @@ -98,6 +98,21 @@ def test_stream_multisource(get_cubes_from_netcdf, save_common): save_common(final_cube) # Any problems are expected here. +def test_stream_multisource__manychunks( + tiny_chunks, get_cubes_from_netcdf, save_common +): + """ + As above, but with many more small chunks. + + As this previously showed additional, sporadic problems which only emerge + (statistically) with larger numbers of chunks. + + """ + cubes = get_cubes_from_netcdf + final_cube = sum(cubes) + save_common(final_cube) # Any problems are expected here. + + def test_comparison(get_cubes_from_netcdf): """ Comparing multiple loaded files forces co-realisation. diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/endian.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/endian.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/endian.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/endian.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/mercator.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/mercator.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator_no_ellipsoid.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/mercator_no_ellipsoid.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator_no_ellipsoid.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/mercator_no_ellipsoid.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_no_ellipsoid.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic_no_ellipsoid.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_no_ellipsoid.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic_no_ellipsoid.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_scale_factor.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic_scale_factor.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_scale_factor.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/stereographic_scale_factor.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/transverse_mercator.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/transverse_mercator.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/transverse_mercator.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/transverse_mercator.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/transverse_mercator_no_ellipsoid.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/transverse_mercator_no_ellipsoid.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/transverse_mercator_no_ellipsoid.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/transverse_mercator_no_ellipsoid.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/with_climatology.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver/write/with_climatology.cdl diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index e3786f5cd5..bef921b386 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -71,15 +71,15 @@ def test_python_versions(): This test is designed to fail whenever Iris' supported Python versions are updated, insisting that versions are updated EVERYWHERE in-sync. """ - latest_supported = "3.10" - all_supported = ["3.8", "3.9", latest_supported] + latest_supported = "3.11" + all_supported = ["3.9", "3.10", latest_supported] root_dir = Path(__file__).parents[3] workflows_dir = root_dir / ".github" / "workflows" benchmarks_dir = root_dir / "benchmarks" # Places that are checked: - setup_cfg_file = root_dir / "setup.cfg" + pyproject_toml_file = root_dir / "pyproject.toml" requirements_dir = root_dir / "requirements" nox_file = root_dir / "noxfile.py" ci_wheels_file = workflows_dir / "ci-wheels.yml" @@ -89,10 +89,10 @@ def test_python_versions(): text_searches: List[Tuple[Path, str]] = [ ( - setup_cfg_file, + pyproject_toml_file, "\n ".join( [ - "Programming Language :: Python :: " + ver + f'"Programming Language :: Python :: {ver}",' for ver in all_supported ] ), diff --git a/lib/iris/tests/unit/common/resolve/test_Resolve.py b/lib/iris/tests/unit/common/resolve/test_Resolve.py index 840f65db01..db1759c5fc 100644 --- a/lib/iris/tests/unit/common/resolve/test_Resolve.py +++ b/lib/iris/tests/unit/common/resolve/test_Resolve.py @@ -825,20 +825,6 @@ def setUp(self): ), ] - def _copy(self, items): - # Due to a bug in python 3.6.x, performing a deepcopy of a mock.sentinel - # will yield an object that is not equivalent to its parent, so this - # is a work-around until we drop support for python 3.6.x. - import sys - - version = sys.version_info - major, minor = version.major, version.minor - result = deepcopy(items) - if major == 3 and minor <= 6: - for i, item in enumerate(items): - result[i] = result[i]._replace(metadata=item.metadata) - return result - def test_no_mapping(self): result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) self.assertEqual(dict(), result) @@ -852,7 +838,7 @@ def test_full_mapping(self): def test_transpose_mapping(self): self.src_coverage.common_items_aux.extend(self.items) - items = self._copy(self.items) + items = deepcopy(self.items) items[0].dims[0] = 2 items[2].dims[0] = 0 self.tgt_coverage.common_items_aux.extend(items) @@ -863,7 +849,7 @@ def test_transpose_mapping(self): def test_partial_mapping__transposed(self): _ = self.items.pop(1) self.src_coverage.common_items_aux.extend(self.items) - items = self._copy(self.items) + items = deepcopy(self.items) items[0].dims[0] = 2 items[1].dims[0] = 0 self.tgt_coverage.common_items_aux.extend(items) @@ -872,7 +858,7 @@ def test_partial_mapping__transposed(self): self.assertEqual(expected, result) def test_mapping__match_multiple_src_metadata(self): - items = self._copy(self.items) + items = deepcopy(self.items) _ = self.items.pop(1) self.src_coverage.common_items_aux.extend(self.items) items[1] = items[0] @@ -882,7 +868,7 @@ def test_mapping__match_multiple_src_metadata(self): self.assertEqual(expected, result) def test_mapping__skip_match_multiple_src_metadata(self): - items = self._copy(self.items) + items = deepcopy(self.items) _ = self.items.pop(1) self.tgt_coverage.common_items_aux.extend(self.items) items[1] = items[0]._replace(dims=[1]) @@ -892,7 +878,7 @@ def test_mapping__skip_match_multiple_src_metadata(self): self.assertEqual(expected, result) def test_mapping__skip_different_rank(self): - items = self._copy(self.items) + items = deepcopy(self.items) self.src_coverage.common_items_aux.extend(self.items) items[2] = items[2]._replace(dims=[1, 2]) self.tgt_coverage.common_items_aux.extend(items) @@ -902,7 +888,7 @@ def test_mapping__skip_different_rank(self): def test_bad_metadata_mapping(self): self.src_coverage.common_items_aux.extend(self.items) - items = self._copy(self.items) + items = deepcopy(self.items) items[0] = items[0]._replace(metadata=sentinel.bad) self.tgt_coverage.common_items_aux.extend(items) emsg = "Failed to map common aux coordinate metadata" diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py index 9a81c79d44..f343f4be24 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py @@ -9,10 +9,12 @@ # importing anything else. import iris.tests as tests # isort:skip +from platform import python_version from xml.dom import minidom import numpy as np from numpy import ma +from pkg_resources import parse_version from iris._lazy_data import as_lazy_data, is_lazy_data from iris.experimental.ugrid.mesh import Connectivity @@ -61,10 +63,14 @@ def test_indices(self): def test_read_only(self): attributes = ("indices", "cf_role", "start_index", "location_axis") + if parse_version(python_version()) >= parse_version("3.11"): + msg = "object has no setter" + else: + msg = "can't set attribute" for attribute in attributes: self.assertRaisesRegex( AttributeError, - "can't set attribute", + msg, setattr, self.connectivity, attribute, diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py index dda0b9a5a9..cb90c176b6 100644 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py +++ b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py @@ -11,11 +11,13 @@ # importing anything else. import iris.tests as tests # isort:skip +from platform import python_version import re import unittest.mock as mock import dask.array as da import numpy as np +from pkg_resources import parse_version import pytest from iris._lazy_data import as_lazy_data, is_lazy_data @@ -77,8 +79,12 @@ def setUp(self): def test_fixed_metadata(self): # Check that you cannot set any of these on an existing MeshCoord. meshcoord = self.meshcoord + if parse_version(python_version()) >= parse_version("3.11"): + msg = "object has no setter" + else: + msg = "can't set attribute" for prop in ("mesh", "location", "axis"): - with self.assertRaisesRegex(AttributeError, "can't set"): + with self.assertRaisesRegex(AttributeError, msg): setattr(meshcoord, prop, mock.sentinel.odd) def test_coord_system(self): diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py similarity index 99% rename from lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py index bbde2d0a2d..729a2d8b14 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py @@ -15,7 +15,7 @@ from unittest import mock from iris.coords import CellMethod -from iris.fileformats.netcdf import parse_cell_methods +from iris.fileformats._nc_load_rules.helpers import parse_cell_methods class Test(tests.IrisTest): diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py similarity index 99% rename from lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py rename to lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py index 39992d03a0..1a2ef1d29d 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py @@ -25,7 +25,8 @@ from iris.coords import AncillaryVariable, CellMeasure from iris.experimental.ugrid.load import PARSE_UGRID_ON_LOAD from iris.experimental.ugrid.mesh import MeshCoord -from iris.fileformats.netcdf import load_cubes, logger +from iris.fileformats.netcdf import logger +from iris.fileformats.netcdf.loader import load_cubes from iris.tests.stock.netcdf import ncgen_from_cdl diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py similarity index 97% rename from lib/iris/tests/unit/fileformats/netcdf/test_Saver.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index 93a1537ea4..12af318c01 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -3,7 +3,7 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Unit tests for the `iris.fileformats.netcdf.Saver` class.""" +"""Unit tests for the :class:`iris.fileformats.netcdf.Saver` class.""" # Import iris.tests first so that some things can be initialised before # importing anything else. @@ -205,7 +205,13 @@ def test_zlib(self): api = self.patch("iris.fileformats.netcdf.saver._thread_safe_nc") # Define mocked default fill values to prevent deprecation warning (#4374). api.default_fillvals = collections.defaultdict(lambda: -99.0) - with Saver("/dummy/path", "NETCDF4") as saver: + # Mock the apparent dtype of mocked variables, to avoid an error. + ref = api.DatasetWrapper.return_value + ref = ref.createVariable.return_value + ref.dtype = np.dtype(np.float32) + # NOTE: use compute=False as otherwise it gets in a pickle trying to construct + # a fill-value report on a non-compliant variable in a non-file (!) + with Saver("/dummy/path", "NETCDF4", compute=False) as saver: saver.write(cube, zlib=True) dataset = api.DatasetWrapper.return_value create_var_call = mock.call( @@ -646,8 +652,16 @@ def setUp(self): self.container = mock.Mock(name="container", attributes={}) self.data_dtype = np.dtype("int32") + # We need to create mock datasets which look like they are closed. + dataset_class = mock.Mock( + return_value=mock.Mock( + # Mock dataset : the isopen() call should return 0. + isopen=mock.Mock(return_value=0) + ) + ) patch = mock.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper" + "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + dataset_class, ) _ = patch.start() self.addCleanup(patch.stop) @@ -662,7 +676,7 @@ def assertAttribute(self, value): def check_attribute_compliance_call(self, value): self.set_attribute(value) - with Saver(mock.Mock(), "NETCDF4") as saver: + with Saver("nonexistent test file", "NETCDF4") as saver: saver.check_attribute_compliance(self.container, self.data_dtype) @@ -771,7 +785,7 @@ def test_valid_range_and_valid_min_valid_max_provided(self): self.container.attributes["valid_range"] = [1, 2] self.container.attributes["valid_min"] = [1] msg = 'Both "valid_range" and "valid_min"' - with Saver(mock.Mock(), "NETCDF4") as saver: + with Saver("nonexistent test file", "NETCDF4") as saver: with self.assertRaisesRegex(ValueError, msg): saver.check_attribute_compliance( self.container, self.data_dtype diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__lazy.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py similarity index 98% rename from lib/iris/tests/unit/fileformats/netcdf/test_Saver__lazy.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py index 53e1f9a652..e1211dc276 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__lazy.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py @@ -14,7 +14,7 @@ from iris.coords import AuxCoord from iris.fileformats.netcdf import Saver from iris.tests import stock -from iris.tests.unit.fileformats.netcdf import test_Saver +from iris.tests.unit.fileformats.netcdf.saver import test_Saver class LazyMixin(tests.IrisTest): diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py new file mode 100644 index 0000000000..6fa40a14fe --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py @@ -0,0 +1,132 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for :meth:`iris.fileformats.netcdf.saver.Saver._lazy_stream_data`. + +The behaviour of this method is complex, and this only tests certain aspects. +The testing of the dask delayed operations and file writing are instead covered by +integration tests. + +""" +from unittest import mock +import warnings + +import dask.array as da +import numpy as np +import pytest + +import iris.fileformats.netcdf._thread_safe_nc as nc_threadsafe +from iris.fileformats.netcdf.saver import Saver, _FillvalueCheckInfo + + +class Test__lazy_stream_data: + @staticmethod + @pytest.fixture(autouse=True) + def saver_patch(): + # Install patches, so we can create a Saver without opening a real output file. + # Mock just enough of Dataset behaviour to allow a 'Saver.complete()' call. + mock_dataset = mock.MagicMock() + mock_dataset_class = mock.Mock(return_value=mock_dataset) + # Mock the wrapper within the netcdf saver + target1 = ( + "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper" + ) + # Mock the real netCDF4.Dataset within the threadsafe-nc module, as this is + # used by NetCDFDataProxy and NetCDFWriteProxy. + target2 = "iris.fileformats.netcdf._thread_safe_nc.netCDF4.Dataset" + with mock.patch(target1, mock_dataset_class): + with mock.patch(target2, mock_dataset_class): + yield + + # A fixture to parametrise tests over delayed and non-delayed Saver type. + # NOTE: this only affects the saver context-exit, which we do not test here, so + # should make ***no difference to any of these tests***. + @staticmethod + @pytest.fixture(params=[False, True], ids=["nocompute", "compute"]) + def compute(request) -> bool: + yield request.param + + # A fixture to parametrise tests over real and lazy-type data. + @staticmethod + @pytest.fixture(params=[False, True], ids=["realdata", "lazydata"]) + def data_is_lazy(request) -> bool: + yield request.param + + @staticmethod + def saver(compute) -> Saver: + # Create a test Saver object + return Saver( + filename="", netcdf_format="NETCDF4", compute=compute + ) + + @staticmethod + def mock_var(shape): + # Create a test cf_var object + return mock.MagicMock(shape=tuple(shape), dtype=np.dtype(np.float32)) + + def test_data_save(self, compute, data_is_lazy): + """Real data is transferred immediately, lazy data creates a delayed write.""" + saver = self.saver(compute=compute) + data = np.arange(5.0) + if data_is_lazy: + data = da.from_array(data) + fill_value = -1.0 # not occurring in data + cf_var = self.mock_var(data.shape) + saver._lazy_stream_data( + data=data, fill_value=fill_value, fill_warn=True, cf_var=cf_var + ) + assert cf_var.__setitem__.call_count == (0 if data_is_lazy else 1) + assert len(saver._delayed_writes) == (1 if data_is_lazy else 0) + if data_is_lazy: + result_data, result_writer, fill_info = saver._delayed_writes[0] + assert result_data is data + assert isinstance(result_writer, nc_threadsafe.NetCDFWriteProxy) + assert isinstance(fill_info, _FillvalueCheckInfo) + else: + cf_var.__setitem__.assert_called_once_with(slice(None), data) + + def test_warnings(self, compute, data_is_lazy): + """ + For real data, fill-value warnings are issued immediately. For lazy data, + warnings are returned from computing a delayed completion. + + N.B. The 'compute' keyword has **no effect** on this : It only causes delayed + writes to be automatically actioned on exiting a Saver context. + Streaming *always* creates delayed writes for lazy data, since this is required + to make dask distributed operation work. + """ + saver = self.saver(compute=compute) + data = np.arange(5.0) + if data_is_lazy: + data = da.from_array(data) + fill_value = 2.0 # IS occurring in data + cf_var = self.mock_var(data.shape) + + # Do initial save. When compute=True, this issues warnings + with warnings.catch_warnings(record=True) as logged_warnings: + saver._lazy_stream_data( + data=data, fill_value=fill_value, fill_warn=True, cf_var=cf_var + ) + + issued_warnings = [log.message for log in logged_warnings] + + n_expected_warnings = 0 if data_is_lazy else 1 + assert len(issued_warnings) == n_expected_warnings + + # Complete the write : any delayed warnings should be *returned*. + # NOTE: + # (1) this still works when there are no delayed writes. + # (2) the Saver 'compute' keyword makes no difference to this usage, as it + # *only* affects what happens when the saver context exits. + result2 = saver.delayed_completion().compute() + issued_warnings += list(result2) + + # Either way, a suitable warning should have been produced. + assert len(issued_warnings) == 1 + warning = issued_warnings[0] + msg = "contains unmasked data points equal to the fill-value, 2.0" + assert isinstance(warning, UserWarning) + assert msg in warning.args[0] diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py similarity index 100% rename from lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__FillValueMaskCheckAndStoreTarget.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py similarity index 69% rename from lib/iris/tests/unit/fileformats/netcdf/saver/test__FillValueMaskCheckAndStoreTarget.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py index 77209efafc..95a518e4e5 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test__FillValueMaskCheckAndStoreTarget.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__data_fillvalue_check.py @@ -4,39 +4,48 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Unit tests for the `iris.fileformats.netcdf._FillValueMaskCheckAndStoreTarget` -class. +Unit tests for :func:`iris.fileformats.netcdf.saver._data_fillvalue_check`. + +Note: now runs all testcases on both real + lazy data. """ # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip +import collections -from unittest import mock - +import dask.array as da import numpy as np -from iris.fileformats.netcdf.saver import _FillValueMaskCheckAndStoreTarget +from iris.fileformats.netcdf.saver import _data_fillvalue_check -class Test__FillValueMaskCheckAndStoreTarget(tests.IrisTest): +class Check__fillvalueandmasking: def _call_target(self, fill_value, keys, vals): - inner_target = mock.MagicMock() - target = _FillValueMaskCheckAndStoreTarget( - inner_target, fill_value=fill_value - ) + data = np.zeros(20, dtype=np.float32) + if any(np.ma.isMaskedArray(val) for val in vals): + # N.B. array is masked if "vals" is, but has no masked points initially. + data = np.ma.masked_array(data, mask=np.zeros_like(data)) for key, val in zip(keys, vals): - target[key] = val + data[key] = val - calls = [mock.call(key, val) for key, val in zip(keys, vals)] - inner_target.__setitem__.assert_has_calls(calls) + if hasattr(self.arraylib, "compute"): + data = da.from_array(data, chunks=-1) + + results = _data_fillvalue_check( + arraylib=self.arraylib, data=data, check_value=fill_value + ) - return target + if hasattr(results, "compute"): + results = results.compute() - def test___setitem__(self): - self._call_target(None, [1], [2]) + # Return a named tuple, for named-property access to the 2 result values. + result = collections.namedtuple("_", ["is_masked", "contains_value"])( + *results + ) + return result def test_no_fill_value_not_masked(self): # Test when the fill value is not present and the data is not masked @@ -90,3 +99,11 @@ def test_contains_masked_fill_value(self): target = self._call_target(fill_value, keys, vals) self.assertFalse(target.contains_value) self.assertTrue(target.is_masked) + + +class Test__real(Check__fillvalueandmasking, tests.IrisTest): + arraylib = np + + +class Test__lazy(Check__fillvalueandmasking, tests.IrisTest): + arraylib = da diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py new file mode 100644 index 0000000000..b2e4b63e3a --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test__fillvalue_report.py @@ -0,0 +1,119 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for :func:`iris.fileformats.netcdf.saver._fillvalue_report`. +""" +import warnings + +import numpy as np +import pytest + +from iris.fileformats.netcdf._thread_safe_nc import default_fillvals +from iris.fileformats.netcdf.saver import ( + SaverFillValueWarning, + _fillvalue_report, + _FillvalueCheckInfo, +) + + +class Test__fillvaluereport: + @pytest.mark.parametrize( + "is_bytes", [True, False], ids=["ByteData", "NonbyteData"] + ) + @pytest.mark.parametrize( + "is_masked", [True, False], ids=["MaskedData", "NonmaskedData"] + ) + @pytest.mark.parametrize( + "contains_fv", [True, False], ids=["FillInData", "NofillInData"] + ) + @pytest.mark.parametrize( + "given_user_fv", [True, False], ids=["WithUserfill", "NoUserfill"] + ) + def test_fillvalue_checking( + self, is_bytes, is_masked, contains_fv, given_user_fv + ): + dtype_code = "u1" if is_bytes else "f4" + dtype = np.dtype(dtype_code) + if given_user_fv: + user_fill = 123 if is_bytes else 1.234 + check_value = user_fill + else: + user_fill = None + check_value = default_fillvals[dtype_code] + + fill_info = _FillvalueCheckInfo( + user_value=user_fill, + check_value=check_value, + dtype=dtype, + varname="", + ) + + # Work out expected action, according to intended logic. + if is_bytes and is_masked and not given_user_fv: + msg_fragment = "'' contains byte data with masked points" + elif contains_fv: + msg_fragment = "'' contains unmasked data points equal to the fill-value" + else: + msg_fragment = None + + # Trial the action + result = _fillvalue_report( + fill_info, + is_masked=is_masked, + contains_fill_value=contains_fv, + warn=False, + ) + + # Check the result + if msg_fragment is None: + assert result is None + else: + assert isinstance(result, Warning) + assert msg_fragment in result.args[0] + + @pytest.mark.parametrize( + "has_collision", + [True, False], + ids=["WithFvCollision", "NoFvCollision"], + ) + def test_warn(self, has_collision): + fill_info = _FillvalueCheckInfo( + user_value=1.23, + check_value=1.23, + dtype=np.float32, + varname="", + ) + + # Check results + if has_collision: + # Check that we get the expected warning + expected_msg = "'' contains unmasked data points equal to the fill-value" + # Enter a warnings context that checks for the error. + warning_context = pytest.warns( + SaverFillValueWarning, match=expected_msg + ) + warning_context.__enter__() + else: + # Check that we get NO warning of the expected type. + warnings.filterwarnings("error", category=SaverFillValueWarning) + + # Do call: it should raise AND return a warning, ONLY IF there was a collision. + result = _fillvalue_report( + fill_info, + is_masked=True, + contains_fill_value=has_collision, + warn=True, + ) + + # Check result + if has_collision: + # Fail if no warning was raised .. + warning_context.__exit__(None, None, None) + # .. or result does not have the expected message content + assert expected_msg in result.args[0] + else: + # Fail if any warning result was produced. + assert result is None diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_save.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py similarity index 78% rename from lib/iris/tests/unit/fileformats/netcdf/test_save.py rename to lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py index b274a8be0d..68049b57fc 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_save.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py @@ -3,8 +3,7 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Unit tests for the `iris.fileformats.netcdf.save` function.""" - +"""Unit tests for the :func:`iris.fileformats.netcdf.save` function.""" # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests # isort:skip @@ -15,6 +14,7 @@ from unittest import mock import numpy as np +import pytest import iris from iris.coords import AuxCoord, DimCoord @@ -22,6 +22,7 @@ from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD from iris.fileformats.netcdf import ( CF_CONVENTIONS_VERSION, + Saver, _thread_safe_nc, save, ) @@ -359,5 +360,104 @@ def test_connectivity_dim_varname_collision(self): self._check_save_and_reload([cube_1, cube_2]) +class Test_compute_usage: + """ + Test the operation of the save function 'compute' keyword. + + In actual use, this keyword controls 'delayed saving'. That is tested elsewhere, + in testing the 'Saver' class itself. + """ + + # A fixture to mock out Saver object creation in a 'save' call. + @staticmethod + @pytest.fixture + def mock_saver_creation(): + # A mock for a Saver object. + mock_saver = mock.MagicMock(spec=Saver) + # make an __enter__ call return the object itself (as the real Saver does). + mock_saver.__enter__ = mock.Mock(return_value=mock_saver) + # A mock for the Saver() constructor call. + mock_new_saver_call = mock.Mock(return_value=mock_saver) + + # Replace the whole Saver class with a simple function, which thereby emulates + # the constructor call. This avoids complications due to the fact that Mock + # patching does not work in the usual way for __init__ and __new__ methods. + def mock_saver_class_create(*args, **kwargs): + return mock_new_saver_call(*args, **kwargs) + + # Patch the Saver() creation to return our mock Saver object. + with mock.patch( + "iris.fileformats.netcdf.saver.Saver", mock_saver_class_create + ): + # Return mocks for both constructor call, and Saver object. + yield mock_new_saver_call, mock_saver + + # A fixture to provide some mock args for 'Saver' creation. + @staticmethod + @pytest.fixture + def mock_saver_args(): + from collections import namedtuple + + # A special object for the cube, since cube.attributes must be indexable + mock_cube = mock.MagicMock() + args = namedtuple( + "saver_args", ["cube", "filename", "format", "compute"] + )( + cube=mock_cube, + filename=mock.sentinel.filepath, + format=mock.sentinel.netcdf4, + compute=mock.sentinel.compute, + ) + return args + + def test_saver_creation(self, mock_saver_creation, mock_saver_args): + # Check that 'save' creates a Saver, passing the 'compute' keyword. + mock_saver_new, mock_saver = mock_saver_creation + args = mock_saver_args + save( + cube=args.cube, + filename=args.filename, + netcdf_format=args.format, + compute=args.compute, + ) + # Check the Saver create call it made, in particular that the compute arg is + # passed in. + mock_saver_new.assert_called_once_with( + args.filename, args.format, compute=args.compute + ) + + def test_compute_true(self, mock_saver_creation, mock_saver_args): + # Check operation when compute=True. + mock_saver_new, mock_saver = mock_saver_creation + args = mock_saver_args + result = save( + cube=args.cube, + filename=args.filename, + netcdf_format=args.format, + compute=True, + ) + # It should NOT have called 'delayed_completion' + assert mock_saver.delayed_completion.call_count == 0 + # Result should be None + assert result is None + + def test_compute_false_result_delayed( + self, mock_saver_creation, mock_saver_args + ): + # Check operation when compute=False. + mock_saver_new, mock_saver = mock_saver_creation + args = mock_saver_args + result = save( + cube=args.cube, + filename=args.filename, + netcdf_format=args.format, + compute=False, + ) + # It should have called 'delayed_completion' .. + assert mock_saver.delayed_completion.call_count == 1 + # .. and should return the result of that. + assert result is mock_saver.delayed_completion.return_value + + if __name__ == "__main__": tests.main() diff --git a/noxfile.py b/noxfile.py index 8f9f8b72f9..601a1d576e 100755 --- a/noxfile.py +++ b/noxfile.py @@ -16,7 +16,7 @@ nox.options.reuse_existing_virtualenvs = True #: Python versions we can run sessions under -_PY_VERSIONS_ALL = ["3.8", "3.9", "3.10"] +_PY_VERSIONS_ALL = ["3.9", "3.10", "3.11"] _PY_VERSION_LATEST = _PY_VERSIONS_ALL[-1] #: One specific python version for docs builds diff --git a/pyproject.toml b/pyproject.toml index 232ddb7c5a..4f9ade1351 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,6 +8,68 @@ requires = [ # Defined by PEP 517 build-backend = "setuptools.build_meta" +[project] +authors = [ + {name = "Iris Contributors", email = "scitools.pub@gmail.com"} +] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)", + "Operating System :: MacOS", + "Operating System :: POSIX", + "Operating System :: POSIX :: Linux", + "Operating System :: Unix", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: Implementation :: CPython", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Atmospheric Science", + "Topic :: Scientific/Engineering :: Visualization", +] +dynamic = [ + "dependencies", + "readme", + "version", +] +description = "A powerful, format-agnostic, community-driven Python package for analysing and visualising Earth science data" +keywords = [ + "cf-metadata", + "data-analysis", + "earth-science", + "grib", + "netcdf", + "meteorology", + "oceanography", + "space-weather", + "ugrid", + "visualisation", +] +license = {text = "LGPL-3.0-or-later"} +name = "scitools-iris" +requires-python = ">=3.9" + +[project.urls] +Code = "https://github.com/SciTools/iris" +Discussions = "https://github.com/SciTools/iris/discussions" +Documentation = "https://scitools-iris.readthedocs.io/en/stable/" +Issues = "https://github.com/SciTools/iris/issues" + +[tool.setuptools] +license-files = ["COPYING", "COPYING.LESSER"] +zip-safe = false + +[tool.setuptools.dynamic] +dependencies = {file = "requirements/pypi-core.txt"} +readme = {file = "README.md", content-type = "text/markdown"} + +[tool.setuptools.packages.find] +include = ["iris*"] +where = ["lib"] + [tool.setuptools_scm] write_to = "lib/iris/_version.py" local_scheme = "dirty-tag" @@ -15,7 +77,7 @@ version_scheme = "release-branch-semver" [tool.black] line-length = 79 -target-version = ['py38'] +target-version = ['py39'] include = '\.pyi?$' extend-exclude = ''' ( diff --git a/requirements/README.md b/requirements/README.md new file mode 100644 index 0000000000..9d9368b9c2 --- /dev/null +++ b/requirements/README.md @@ -0,0 +1,8 @@ +# ⚠️ + +This directory contains: + +- The `locks` directory which contains auto-generated `conda-lock` environment files for each `python` distribution and `platform` supported by `iris`. +- The **top-level** `conda` environment `*.yml` files for each `python` distribution supported by `iris`. +- The `pip` core package dependencies (`pypi-core.txt`) for the [scitools-iris](https://pypi.org/project/scitools-iris/) package on PyPI. Please reference the `pyproject.toml` in the repository root directory for further details. + diff --git a/requirements/iris.yml b/requirements/iris.yml index 1e473d36d5..b0c50b8bfd 120000 --- a/requirements/iris.yml +++ b/requirements/iris.yml @@ -1 +1 @@ -py310.yml \ No newline at end of file +py311.yml \ No newline at end of file diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 3475b8bfd5..21c6cacb30 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -11,7 +11,6 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.ta https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60 -https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-3_cp310.conda#4eb33d14d794b0f4be116443ffed3853 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 @@ -32,7 +31,6 @@ https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96 https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 https://conda.anaconda.org/conda-forge/linux-64/gstreamer-orc-0.4.33-h166bdaf_0.tar.bz2#879c93426c9d0b84a9de4513fbce5f4f https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed -https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h0b41bf4_3.conda#c7a069243e1fbe9a556ed2ec030e6407 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 @@ -56,11 +54,9 @@ https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.ta https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41 https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 -https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.1-h846660c_100.tar.bz2#4b85205b094808088bb0862e08251653 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.0-h0b41bf4_0.conda#2d833be81a21128e317325a01326d36f -https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.38-h0b41bf4_0.conda#9ac34337e5101a87e5d91da05d84aa48 @@ -84,7 +80,6 @@ https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2. https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h28343ad_4.tar.bz2#4a049fc560e00e43151dc51368915fdd https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb -https://conda.anaconda.org/conda-forge/linux-64/libllvm13-13.0.1-hf817b99_2.tar.bz2#47da3ce0d8b2e65ccb226c186dd91eba https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.40.0-h753d276_0.tar.bz2#2e5f9a37d487e1019fd4d8113adb2f9f @@ -159,11 +154,9 @@ https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py310h1fa729e_0.conda#a1f0db6709778b77b5903541eeac4032 -https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py38he865349_0.tar.bz2#b1b3d6847a68251a1465206ab466b475 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py310hdf3cbec_0.conda#5311a49aaea44b73935c84a6d9a68e5f https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d5f8e9362ef6f60a8d4e7bce8f https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py310h8deb116_0.conda#b7085457309e206174b8e234d90a7605 -https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 @@ -178,9 +171,10 @@ https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py310h1fa729e_0.conda#8d155ac95b1dfe585bcb6bec6a91c73b https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py310h5764c6d_5.tar.bz2#9e68d2ff6d98737c855b65f48dd3c597 -https://conda.anaconda.org/conda-forge/noarch/setuptools-67.6.1-pyhd8ed1ab_0.conda#6c443cccff3daa3d83b2b807b0a298ce +https://conda.anaconda.org/conda-forge/noarch/setuptools-67.7.1-pyhd8ed1ab_0.conda#82bd3ef4e96ced7384f34ab01ece65b6 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 @@ -188,6 +182,7 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 @@ -198,6 +193,7 @@ https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.15.0-pyhd8ed1ab_0.conda#13018819ca8f5b7cc675a8faf1f5fedf https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde @@ -208,6 +204,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py310hdf3cbec_0.conda#7bf9d8c765b6b04882c719509652c6d6 https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.3-py310h1fa729e_0.conda#3eb11d1ed20480b4515094af8ae24c64 https://conda.anaconda.org/conda-forge/linux-64/curl-8.0.1-h588be90_0.conda#69691e828381dd12df671c26b680f1b0 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py310h5764c6d_1.tar.bz2#fd18cd597d23b2b5ddde23bd5b7aec32 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.3-py310h1fa729e_0.conda#4f39f656d6ff2761d698e69af952be82 https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.1-h3eb15da_0.conda#a7db5e3525875444b5a5868f553ab39a https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-nompi_h4df4325_101.conda#162a25904af6586b234b2dd52ee99c61 @@ -220,7 +217,6 @@ https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 https://conda.anaconda.org/conda-forge/linux-64/pillow-9.4.0-py310h023d228_1.conda#bbea829b541aa15df5c65bd40b8c1981 https://conda.anaconda.org/conda-forge/noarch/pip-23.1-pyhd8ed1ab_0.conda#9ccbacfd1cbfa0be00cc345fe5ad8816 -https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.0-h8ffa02c_0.conda#8b9dcfabec5c6bcac98e89889fffa64e https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-h5195f5e_3.conda#caeb3302ef1dc8b342b20c710a86f8a9 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 @@ -245,7 +241,6 @@ https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.5.0-py310hb814896_1.con https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py310heca2aa9_3.conda#3b1946b676534472ce65181dda0b9554 https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.1-pyhd8ed1ab_0.conda#547c7de697ec99b494a28ddde185b5a4 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hbf28c38_3.tar.bz2#703ff1ac7d1b27fb5944b8052b5d1edb https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.4.0-pyhd8ed1ab_0.conda#afe2978fcd8f15149452cdad37aebbfa https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.0-h4243ec0_2.conda#0d0c6604c8ac4ad5e51efa7bb58da05c @@ -264,25 +259,21 @@ https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.0-nompi_hdb2cfa9_4.cond https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.2.2-pyha770c72_0.conda#c4aab94cab4ddeb340e36d4c670a5f24 -https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-h1304e3e_6.tar.bz2#f2985d160b8c43dd427923c04cd732fe https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h5d23da1_6.conda#59c73debd9405771690ddbbad6c57b69 https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.15-pyhd8ed1ab_0.conda#27db656619a55d727eaf5a6ece3d2fd6 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.4.0-pyhd8ed1ab_0.conda#78e6f14161ba76ae48ac3e82e1f4bf13 https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-nompi_py310h4c636dd_2.conda#00383e95a1a8d1d5b21af8535cd2ac43 https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.1.0-h2e5815a_0.conda#e7ecda996c443142a0e9c379f3b28e48 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py310hab646b1_3.conda#d049da3204bf5ecb54a852b622f2d7d2 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py38h0ffb2e6_8.tar.bz2#acfc7625a212c27f7decdca86fdb2aba https://conda.anaconda.org/conda-forge/noarch/requests-2.28.2-pyhd8ed1ab_1.conda#3bfbd6ead1d7299ed46dab3a7bf0bc8c https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.1-py310hff52083_0.conda#c2b60c44d38d32779006a15c2581f0d1 https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 -https://conda.anaconda.org/conda-forge/linux-64/pyqtchart-5.12-py38h7400c14_8.tar.bz2#78a2a6cb4ef31f997c1bee8223a9e579 -https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py38h7400c14_8.tar.bz2#857894ea9c5e53c962c3a0932efa71ea https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.1-py310h8deb116_0.conda#4c9604c5ec179c21f8f0a09e3c164480 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.4.1-pyhd8ed1ab_0.conda#14a64286fe896fe7e1a485fc91ccd022 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 -https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py310h7eb24ba_1.conda#e727db22a14344608c2caeccaa9e9d2b https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a diff --git a/requirements/locks/py38-linux-64.lock b/requirements/locks/py311-linux-64.lock similarity index 89% rename from requirements/locks/py38-linux-64.lock rename to requirements/locks/py311-linux-64.lock index 8e05a88d2d..e369894f05 100644 --- a/requirements/locks/py38-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -11,8 +11,8 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.ta https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60 -https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-3_cp38.conda#2f3f7af062b42d664117662612022204 +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-3_cp311.conda#c2e2630ddb68cf52eec74dc7dfab20b5 +https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.2.0-h69a702a_19.tar.bz2#cd7a806282c16e1f2d39a7e80d3a3e0d https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.2.0-h65d4601_19.tar.bz2#cedcee7c064c01c403f962c9e8d3c373 @@ -31,7 +31,6 @@ https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96 https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 https://conda.anaconda.org/conda-forge/linux-64/gstreamer-orc-0.4.33-h166bdaf_0.tar.bz2#879c93426c9d0b84a9de4513fbce5f4f https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed -https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h0b41bf4_3.conda#c7a069243e1fbe9a556ed2ec030e6407 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 @@ -55,11 +54,9 @@ https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.ta https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41 https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 -https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.1-h846660c_100.tar.bz2#4b85205b094808088bb0862e08251653 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.0-h0b41bf4_0.conda#2d833be81a21128e317325a01326d36f -https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.38-h0b41bf4_0.conda#9ac34337e5101a87e5d91da05d84aa48 @@ -83,7 +80,6 @@ https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2. https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h28343ad_4.tar.bz2#4a049fc560e00e43151dc51368915fdd https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb -https://conda.anaconda.org/conda-forge/linux-64/libllvm13-13.0.1-hf817b99_2.tar.bz2#47da3ce0d8b2e65ccb226c186dd91eba https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.40.0-h753d276_0.tar.bz2#2e5f9a37d487e1019fd4d8113adb2f9f @@ -114,7 +110,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libudev1-253-h0b41bf4_1.conda#bb https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h79f4944_1.conda#04a39cdd663f295653fc143851830563 https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hd7da12d_1.conda#a69fa6f218cfed8e2d61753eeacaf034 https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 -https://conda.anaconda.org/conda-forge/linux-64/python-3.8.16-he550d4f_1_cpython.conda#9de84cccfbc5f8350a3667bb6ef6fc30 +https://conda.anaconda.org/conda-forge/linux-64/python-3.11.3-h2755cc3_0_cpython.conda#37005ea5f68df6a8a381b70cf4d4a160 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.40.0-h4ff8645_0.tar.bz2#bb11803129cbbb53ed56f9506ff74145 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-h166bdaf_0.tar.bz2#384e7fcb3cd162ba3e4aed4b687df566 @@ -123,7 +119,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-h166bd https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h166bdaf_0.tar.bz2#0a8e20a8aef954390b9481a527421a8c https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.4-h0b41bf4_0.conda#ea8fbfeb976ac49cbeb594e985393514 https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e -https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py38h578d9bd_1003.tar.bz2#db8b471d9a764f561a129f94ea215c0a +https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py311h38be061_1003.tar.bz2#0ab8f8f0cae99343907fe68cda11baea https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418 https://conda.anaconda.org/conda-forge/noarch/certifi-2022.12.7-pyhd8ed1ab_0.conda#fb9addc3db06e56abe03e0e9f21a63e6 @@ -135,7 +131,7 @@ https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7 -https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py38h578d9bd_1.tar.bz2#3746b24949251f1a00ae0d616d4cdc1b +https://conda.anaconda.org/conda-forge/linux-64/docutils-0.19-py311h38be061_1.tar.bz2#599159b0740e9b82e7eef0e8471be3c2 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.1-pyhd8ed1ab_0.conda#7312299d7a0ea4993159229b7d2dceb2 https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.0-pyhd8ed1ab_0.conda#650f18a56f366dbf419c15b543592c2d @@ -148,7 +144,7 @@ https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#3427 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py38h43d8883_1.tar.bz2#41ca56d5cac7bfc7eb4fcdbee878eb84 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py311h4dd048b_1.tar.bz2#46d451f575392c01dc193069bd89766d https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-hfd0df8a_0.conda#aa8840cdf17ef0c6084d1e24abc7a28b https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h3e3d535_1.conda#a3a0f7a6f0885f5e1e0ec691566afb77 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f @@ -157,12 +153,10 @@ https://conda.anaconda.org/conda-forge/linux-64/libpq-15.2-hb675445_0.conda#4654 https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda#9176b1e2cb8beca37a7510b0e801e38f https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py38h1de0b5d_0.conda#6d97b5d6f06933ab653f1862ddf6e33e -https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py38he865349_0.tar.bz2#b1b3d6847a68251a1465206ab466b475 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py311h2582759_0.conda#adb20bd57069614552adac60a020c36d +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py311ha3edf6b_0.conda#7415f24f8c44e44152623d93c5015000 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d5f8e9362ef6f60a8d4e7bce8f -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py38h10c12cc_0.conda#05592c85b9f6931dc2df1e80c0d56294 -https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py311h8e6699e_0.conda#90db8cc0dfa20853329bfc6642f887aa https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 @@ -174,12 +168,13 @@ https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.b https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py38h1de0b5d_0.conda#7db73572d4f7e10a759bad609a228ad0 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py311h2582759_0.conda#dfcc3e6e30d6ec2b2bb416fcd8ff4dc1 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py38h0a891b7_5.tar.bz2#0856c59f9ddb710c640dc0428d66b1b7 -https://conda.anaconda.org/conda-forge/noarch/setuptools-67.6.1-pyhd8ed1ab_0.conda#6c443cccff3daa3d83b2b807b0a298ce +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py311hd4cff14_5.tar.bz2#da8769492e423103c59f469f4f17f8d9 +https://conda.anaconda.org/conda-forge/noarch/setuptools-67.7.1-pyhd8ed1ab_0.conda#82bd3ef4e96ced7384f34ab01ece65b6 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 @@ -187,24 +182,26 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3-py38h1de0b5d_0.conda#1371a9ace5486b295a373924803acaba https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.5.0-pyha770c72_0.conda#43e7d9e50261fb11deb76e17d8431aac -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py38h0a891b7_0.tar.bz2#44421904760e9f5ae2035193e04360f0 https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49bb0d9e60ce1db25e151780331bb5f3 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.15.0-pyhd8ed1ab_0.conda#13018819ca8f5b7cc675a8faf1f5fedf https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1014.tar.bz2#d1a88f3ed5b52e1024b80d4bcd26a7a0 -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py38h4a40e3a_3.conda#3ac112151c6b6cfe457e976de41af0c5 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py38h26c90d9_1.tar.bz2#dcc025a7bb54374979c500c2e161fac9 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py38hfbd4bf9_0.conda#638537863b298151635c05c762a997ab +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py311h409f033_3.conda#9025d0786dbbe4bc91fd8e85502decce +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h4c7f6c3_1.tar.bz2#c7e54004ffd03f8db0a58ab949f2a00b +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py311ha3edf6b_0.conda#e7548e7f58965a2fe97a95950a5fedc6 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.3-py311h2582759_0.conda#d34c18fc691a04471ff3460b2d15d19e https://conda.anaconda.org/conda-forge/linux-64/curl-8.0.1-h588be90_0.conda#69691e828381dd12df671c26b680f1b0 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.3-py38h1de0b5d_0.conda#34449fe6e3949956fac2236c9a9a3d3b https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.1-h3eb15da_0.conda#a7db5e3525875444b5a5868f553ab39a @@ -214,12 +211,11 @@ https://conda.anaconda.org/conda-forge/noarch/importlib_resources-5.12.0-pyhd8ed https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_had23c3d_1.conda#36c65ed73b7c92589bd9562ef8a6023d https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5aea950_4.conda#82ef57611ace65b59db35a9687264572 -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h26c90d9_1008.tar.bz2#6bc8cd29312f4fc77156b78124e165cd +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py311h4c7f6c3_1008.tar.bz2#5998dff78c3b82a07ad77f2ae1ec1c44 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2#fbe1182f650c04513046d6894046cd6c https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 -https://conda.anaconda.org/conda-forge/linux-64/pillow-9.4.0-py38hde6dc18_1.conda#3de5619d3f556f966189e5251a266125 +https://conda.anaconda.org/conda-forge/linux-64/pillow-9.4.0-py311h50def17_1.conda#8b5d1da23907114bd7aa3d562150ff36 https://conda.anaconda.org/conda-forge/noarch/pip-23.1-pyhd8ed1ab_0.conda#9ccbacfd1cbfa0be00cc345fe5ad8816 -https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.0-h8ffa02c_0.conda#8b9dcfabec5c6bcac98e89889fffa64e https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-h5195f5e_3.conda#caeb3302ef1dc8b342b20c710a86f8a9 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 @@ -240,48 +236,43 @@ https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.0-py38hdc8b05c_0.cond https://conda.anaconda.org/conda-forge/noarch/pbr-5.11.1-pyhd8ed1ab_0.conda#5bde4ebca51438054099b9527c904ecb https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.2.0-pyhd8ed1ab_0.conda#f10c2cf447ca96f12a326b83c75b8e33 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-daemon-16.1-ha8d29e2_3.conda#34d9d75ca896f5919c372a34e25f23ea -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.5.0-py38h2f62729_1.conda#00785fd9270728fbfb82c80fc0229dc6 -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py38h8dc9893_3.conda#7bb0328b4a0f857aeb432426b9a5f908 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.5.0-py311h1850bce_1.conda#572159a946b809df471b11db4995c708 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py311hcafe171_3.conda#0d79df2a96f6572fed2883374400b235 https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.1-pyhd8ed1ab_0.conda#547c7de697ec99b494a28ddde185b5a4 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py38h43d8883_3.tar.bz2#82b3797d08a43a101b645becbb938e65 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h4dd048b_3.tar.bz2#dbfea4376856bf7bd2121e719cf816e5 https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.4.0-pyhd8ed1ab_0.conda#afe2978fcd8f15149452cdad37aebbfa https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.0-h4243ec0_2.conda#0d0c6604c8ac4ad5e51efa7bb58da05c https://conda.anaconda.org/conda-forge/noarch/identify-2.5.22-pyhd8ed1ab_0.conda#b8d16e273396a0115199a83769a39246 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py38hd6c3c57_0.conda#3b8ba76acae09fbd4b2247c4ee4c0324 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-nompi_he1eeb6f_102.conda#d9679b28fcc2154fa63e814c5acdce57 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.2-nompi_py38h2250339_100.tar.bz2#dd97e93b1f64f1cc58879d53c23ec93f +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.2-nompi_py311hc6fcf29_100.tar.bz2#1ef39f477192bf05df04fb5ad594e82d https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-hd33c08f_0.conda#a8b9e35dd7be2c945b0de4fe19a7c3a9 https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-16.1-hcb278e6_3.conda#8b452ab959166d91949af4c2d28f81db https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.1.1-pyhd8ed1ab_0.conda#0b34aa3ab7e7ccb1765a03dd9ed29938 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.0.0-pyhd8ed1ab_0.tar.bz2#c9e3f8bfdb9bfc34aa1836a6ed4b25d7 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.2.1-pyhd8ed1ab_0.conda#6fe4c2689d1b10fec1ee65819f0c4fd5 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.22.0-pyhd8ed1ab_0.conda#054007ab693cb77a029ea4f1f12f34a7 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.0-nompi_hdb2cfa9_4.conda#3e2e1d0cd06d1b64c9c2800c0eb0cde6 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.2.2-pyha770c72_0.conda#c4aab94cab4ddeb340e36d4c670a5f24 -https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-h1304e3e_6.tar.bz2#f2985d160b8c43dd427923c04cd732fe https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h5d23da1_6.conda#59c73debd9405771690ddbbad6c57b69 https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.15-pyhd8ed1ab_0.conda#27db656619a55d727eaf5a6ece3d2fd6 -https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-nompi_py38h2b78397_2.conda#03c291af8938218972bfba0b0618d3e9 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.4.0-pyhd8ed1ab_0.conda#78e6f14161ba76ae48ac3e82e1f4bf13 +https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-nompi_py311h8e2db7d_2.conda#18fa0582166979a77413859eed97d667 https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.1.0-h2e5815a_0.conda#e7ecda996c443142a0e9c379f3b28e48 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py38ha0d8c90_3.conda#e965dc172d67920d058ac2b3a0e27565 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py38h0ffb2e6_8.tar.bz2#acfc7625a212c27f7decdca86fdb2aba +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py311ha74522f_3.conda#ad6dd0bed0cdf5f2d4eb2b989d6253b3 https://conda.anaconda.org/conda-forge/noarch/requests-2.28.2-pyhd8ed1ab_1.conda#3bfbd6ead1d7299ed46dab3a7bf0bc8c -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.1-py38h578d9bd_0.conda#50ff9e0a3dd459a0ca365741072bf9a2 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.1-py311h38be061_0.conda#8fd462c8bcbba5a3affcb2d04e387476 https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 -https://conda.anaconda.org/conda-forge/linux-64/pyqtchart-5.12-py38h7400c14_8.tar.bz2#78a2a6cb4ef31f997c1bee8223a9e579 -https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py38h7400c14_8.tar.bz2#857894ea9c5e53c962c3a0932efa71ea https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.1-py38h59f1b5f_0.conda#a55546608e09f3bca435ec07ed08a768 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.4.1-pyhd8ed1ab_0.conda#14a64286fe896fe7e1a485fc91ccd022 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 -https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py38h1abf878_1.conda#4d102cd1e6db10034a6c97df4444833f +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py311hd88b842_1.conda#f19feb9440890ccb806a367ea9ae0654 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a diff --git a/requirements/locks/py39-linux-64.lock b/requirements/locks/py39-linux-64.lock index c74ebf4394..da02056cff 100644 --- a/requirements/locks/py39-linux-64.lock +++ b/requirements/locks/py39-linux-64.lock @@ -11,7 +11,6 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.ta https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60 -https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-3_cp39.conda#0dd193187d54e585cac7eab942a8847e https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 @@ -32,7 +31,6 @@ https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96 https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 https://conda.anaconda.org/conda-forge/linux-64/gstreamer-orc-0.4.33-h166bdaf_0.tar.bz2#879c93426c9d0b84a9de4513fbce5f4f https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed -https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h0b41bf4_3.conda#c7a069243e1fbe9a556ed2ec030e6407 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 @@ -56,11 +54,9 @@ https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.ta https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41 https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.3-hcb278e6_0.conda#141a126675b6d1a4eabb111a4a353898 -https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.1-h846660c_100.tar.bz2#4b85205b094808088bb0862e08251653 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.0-h0b41bf4_0.conda#2d833be81a21128e317325a01326d36f -https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.38-h0b41bf4_0.conda#9ac34337e5101a87e5d91da05d84aa48 @@ -84,7 +80,6 @@ https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2. https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h28343ad_4.tar.bz2#4a049fc560e00e43151dc51368915fdd https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb -https://conda.anaconda.org/conda-forge/linux-64/libllvm13-13.0.1-hf817b99_2.tar.bz2#47da3ce0d8b2e65ccb226c186dd91eba https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.40.0-h753d276_0.tar.bz2#2e5f9a37d487e1019fd4d8113adb2f9f @@ -159,11 +154,9 @@ https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-253-h8c4010b_1.conda https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py39h72bdee0_0.conda#35514f5320206df9f4661c138c02e1c1 -https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py38he865349_0.tar.bz2#b1b3d6847a68251a1465206ab466b475 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py39h4b4f3f3_0.conda#413374bab5022a5199c5dd89aef75df5 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d5f8e9362ef6f60a8d4e7bce8f https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py39h7360e5f_0.conda#757070dc7cc33003254888808cd34f1e -https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 @@ -178,9 +171,10 @@ https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py39h72bdee0_0.conda#18927f971926b7271600368de71de557 https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py39hb9d737c_5.tar.bz2#ef9db3c38ae7275f6b14491cfe61a248 -https://conda.anaconda.org/conda-forge/noarch/setuptools-67.6.1-pyhd8ed1ab_0.conda#6c443cccff3daa3d83b2b807b0a298ce +https://conda.anaconda.org/conda-forge/noarch/setuptools-67.7.1-pyhd8ed1ab_0.conda#82bd3ef4e96ced7384f34ab01ece65b6 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 @@ -188,6 +182,7 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 @@ -198,6 +193,7 @@ https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 https://conda.anaconda.org/conda-forge/noarch/zipp-3.15.0-pyhd8ed1ab_0.conda#13018819ca8f5b7cc675a8faf1f5fedf https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde @@ -207,6 +203,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39he91dace_3.conda https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_1.tar.bz2#c943fb9a2818ecc5be1e0ecc8b7738f1 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py39h4b4f3f3_0.conda#c5387f3fb1f5b8b71e1c865fc55f4951 https://conda.anaconda.org/conda-forge/linux-64/curl-8.0.1-h588be90_0.conda#69691e828381dd12df671c26b680f1b0 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py39hb9d737c_1.tar.bz2#eb31327ace8dac15c2df243d9505a132 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.3-py39h72bdee0_0.conda#9232b3b2cc83a304c8210a092e8ba4a5 https://conda.anaconda.org/conda-forge/linux-64/glib-2.76.1-h3eb15da_0.conda#a7db5e3525875444b5a5868f553ab39a https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-nompi_h4df4325_101.conda#162a25904af6586b234b2dd52ee99c61 @@ -220,7 +217,6 @@ https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 https://conda.anaconda.org/conda-forge/linux-64/pillow-9.4.0-py39h2320bf1_1.conda#d2f79132b9c8e416058a4cd84ef27b3d https://conda.anaconda.org/conda-forge/noarch/pip-23.1-pyhd8ed1ab_0.conda#9ccbacfd1cbfa0be00cc345fe5ad8816 -https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.0-h8ffa02c_0.conda#8b9dcfabec5c6bcac98e89889fffa64e https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-h5195f5e_3.conda#caeb3302ef1dc8b342b20c710a86f8a9 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 @@ -245,7 +241,6 @@ https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.5.0-py39h718ffca_1.cond https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py39h227be39_3.conda#9e381db00691e26bcf670c3586397be1 https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.1-pyhd8ed1ab_0.conda#547c7de697ec99b494a28ddde185b5a4 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39hf939315_3.tar.bz2#0f11bcdf9669a5ae0f39efd8c830209a https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.4.0-pyhd8ed1ab_0.conda#afe2978fcd8f15149452cdad37aebbfa https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.0-h4243ec0_2.conda#0d0c6604c8ac4ad5e51efa7bb58da05c @@ -264,25 +259,21 @@ https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.2.2-pyha770c72_0.conda#c4aab94cab4ddeb340e36d4c670a5f24 -https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-h1304e3e_6.tar.bz2#f2985d160b8c43dd427923c04cd732fe https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h5d23da1_6.conda#59c73debd9405771690ddbbad6c57b69 https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.15-pyhd8ed1ab_0.conda#27db656619a55d727eaf5a6ece3d2fd6 +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.4.0-pyhd8ed1ab_0.conda#78e6f14161ba76ae48ac3e82e1f4bf13 https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-nompi_py39h95eafd8_2.conda#f04f8970f741b2f78af7e5b7112d17d6 https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.1.0-h2e5815a_0.conda#e7ecda996c443142a0e9c379f3b28e48 https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py39h5c7b992_3.conda#19e30314fe824605750da905febb8ee6 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py38h0ffb2e6_8.tar.bz2#acfc7625a212c27f7decdca86fdb2aba https://conda.anaconda.org/conda-forge/noarch/requests-2.28.2-pyhd8ed1ab_1.conda#3bfbd6ead1d7299ed46dab3a7bf0bc8c https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.1-py39hf3d152e_0.conda#682772fa385911fb5efffbce21b269c5 https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 -https://conda.anaconda.org/conda-forge/linux-64/pyqtchart-5.12-py38h7400c14_8.tar.bz2#78a2a6cb4ef31f997c1bee8223a9e579 -https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py38h7400c14_8.tar.bz2#857894ea9c5e53c962c3a0932efa71ea https://conda.anaconda.org/conda-forge/noarch/sphinx-5.3.0-pyhd8ed1ab_0.tar.bz2#f9e1fcfe235d655900bfeb6aee426472 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.1-py39he83f1e1_0.conda#0fb6a78da33e1aca1fcea7fe02e7c179 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.4.1-pyhd8ed1ab_0.conda#14a64286fe896fe7e1a485fc91ccd022 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.13.0-pyhd8ed1ab_0.conda#26c51b97ce59bbcce6a35ff45bc5c900 -https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py39h4bd5d67_1.conda#a60d65263a8ddbff5381ed91d4f6953e https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a diff --git a/requirements/py310.yml b/requirements/py310.yml index 2f59b6354d..a164e3b055 100644 --- a/requirements/py310.yml +++ b/requirements/py310.yml @@ -35,6 +35,7 @@ dependencies: - python-stratify # Test dependencies. + - distributed - filelock - imagehash >=4.0 - pre-commit diff --git a/requirements/py38.yml b/requirements/py311.yml similarity index 94% rename from requirements/py38.yml rename to requirements/py311.yml index 56b7a2de69..a8baf58164 100644 --- a/requirements/py38.yml +++ b/requirements/py311.yml @@ -4,7 +4,7 @@ channels: - conda-forge dependencies: - - python =3.8 + - python =3.11 # Setup dependencies. - setuptools >=64 @@ -35,11 +35,13 @@ dependencies: - python-stratify # Test dependencies. + - distributed - filelock - imagehash >=4.0 - pre-commit - psutil - pytest + - pytest-cov - pytest-xdist - requests diff --git a/requirements/py39.yml b/requirements/py39.yml index 8bcf8046f7..4e8bab8adb 100644 --- a/requirements/py39.yml +++ b/requirements/py39.yml @@ -35,6 +35,7 @@ dependencies: - python-stratify # Test dependencies. + - distributed - filelock - imagehash >=4.0 - pre-commit diff --git a/requirements/pypi-core.txt b/requirements/pypi-core.txt new file mode 100644 index 0000000000..f24002a16e --- /dev/null +++ b/requirements/pypi-core.txt @@ -0,0 +1,12 @@ +cartopy>=0.21 +cf-units>=3.1 +cftime>=1.5.0 +dask[array]>=2022.9.0 +# libnetcdf<4.9 (not available on PyPI) +matplotlib>=3.5 +netcdf4 +numpy>=1.19 +pyproj +scipy +shapely!=1.8.3 +xxhash \ No newline at end of file diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 435fe59c1b..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,140 +0,0 @@ -[metadata] -author = SciTools Developers -author_email = scitools.pub@gmail.com -classifiers = - Development Status :: 5 - Production/Stable - Intended Audience :: Science/Research - License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+) - Operating System :: MacOS - Operating System :: POSIX - Operating System :: POSIX :: Linux - Operating System :: Unix - Programming Language :: Python - Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 - Programming Language :: Python :: Implementation :: CPython - Topic :: Scientific/Engineering - Topic :: Scientific/Engineering :: Atmospheric Science - Topic :: Scientific/Engineering :: Visualization -description = A powerful, format-agnostic, community-driven Python package for analysing and visualising Earth science data -download_url = https://github.com/SciTools/iris -keywords = - cf-metadata - data-analysis - earth-science - grib - netcdf - meteorology - oceanography - space-weather - ugrid - visualisation -license = LGPL-3.0-or-later -license_files = COPYING.LESSER -long_description = file: README.md -long_description_content_type = text/markdown -name = scitools-iris -project_urls = - Code = https://github.com/SciTools/iris - Discussions = https://github.com/SciTools/iris/discussions - Documentation = https://scitools-iris.readthedocs.io/en/stable/ - Issues = https://github.com/SciTools/iris/issues -url = https://github.com/SciTools/iris -version = attr: iris.__version__ - -[options] -include_package_data = True -install_requires = - cartopy>=0.21 - cf-units>=3.1 - cftime>=1.5.0 - dask[array]>=2022.9.0 - matplotlib>=3.5 - netcdf4 - numpy>=1.19 - scipy - shapely!=1.8.3 - xxhash -packages = find_namespace: -package_dir = - =lib -python_requires = - >=3.8 -zip_safe = False - -[options.packages.find] -where = lib - -[options.extras_require] -docs = - sphinx<=5.3 - sphinx-copybutton - sphinx-gallery>=0.11.0 - sphinx-design - pydata-sphinx-theme>=0.13.0 -test = - filelock - imagehash>=4.0 - pre-commit - requests - pytest - pytest-xdist -all = - mo_pack - nc-time-axis>=1.4 - pandas - stratify - %(docs)s - %(test)s - -[flake8] -# References: -# https://flake8.readthedocs.io/en/latest/user/configuration.html -# https://flake8.readthedocs.io/en/latest/user/error-codes.html -# https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes - -max-line-length = 80 -max-complexity = 50 -select = C,E,F,W,B,B950 -ignore = - # E203: whitespace before ':' - E203, - # E226: missing whitespace around arithmetic operator - E226, - # E231: missing whitespace after ',', ';', or ':' - E231, - # E402: module level imports on one line - E402, - # E501: line too long - E501, - # E731: do not assign a lambda expression, use a def - E731, - # W503: line break before binary operator - W503, - # W504: line break after binary operator - W504, -exclude = - # - # ignore the following directories - # - .eggs, - build, - docs/src/sphinxext/*, - tools/*, - benchmarks/*, - # - # ignore auto-generated files - # - _ff_cross_refrences.py, - std_names.py, - um_cf_map.py, - # - # ignore third-party files - # - gitwash_dumper.py, - # - # convenience imports - # - lib/iris/common/__init__.py