Skip to content

Commit

Permalink
Merge pull request #4296 from trexfeathers/mesh-data-model_update
Browse files Browse the repository at this point in the history
`mesh-data-model` update
  • Loading branch information
stephenworsley committed Aug 31, 2021
2 parents 2db5228 + f5fd585 commit 4b18209
Show file tree
Hide file tree
Showing 63 changed files with 13,752 additions and 526 deletions.
2 changes: 1 addition & 1 deletion .cirrus.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ env:
# Conda packages to be installed.
CONDA_CACHE_PACKAGES: "nox pip"
# Git commit hash for iris test data.
IRIS_TEST_DATA_VERSION: "2.0.0"
IRIS_TEST_DATA_VERSION: "2.4"
# Base directory for the iris-test-data.
IRIS_TEST_DATA_DIR: ${HOME}/iris-test-data

Expand Down
1 change: 1 addition & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ include CHANGES COPYING COPYING.LESSER
# Files from setup.py package_data that are not automatically added to source distributions
recursive-include lib/iris/tests/results *.cml *.cdl *.txt *.xml *.json
recursive-include lib/iris/etc *
include lib/iris/tests/stock/file_headers/*

recursive-include requirements *

Expand Down
2 changes: 1 addition & 1 deletion docs/src/sphinxext/generate_package_rst.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# list of tuples for modules to exclude. Useful if the documentation throws
# warnings, especially for experimental modules.
exclude_modules = [
("experimental/raster", "iris.experimental.raster") # gdal conflicts
("experimental/raster", "iris.experimental.raster"), # gdal conflicts
]


Expand Down
24 changes: 22 additions & 2 deletions docs/src/whatsnew/latest.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@ This document explains the changes made to Iris for this release

The highlights for this minor release of Iris include:

* N/A
* We've added support for `UGRID`_ meshes which can now be loaded and attached
to a cube.

And finally, get in touch with us on `GitHub`_ if you have any issues or
feature requests for improving Iris. Enjoy!
Expand All @@ -31,7 +32,25 @@ This document explains the changes made to Iris for this release
✨ Features
===========

#. N/A
#. `@bjlittle`_, `@pp-mo`_ and `@trexfeathers`_ added support for unstructured
meshes, as described by `UGRID`_. This involved adding a data model (:pull:`3968`,
:pull:`4014`, :pull:`4027`, :pull:`4036`, :pull:`4053`) and API (:pull:`4063`,
:pull:`4064`), and supporting representation (:pull:`4033`, :pull:`4054`) and
loading (:pull:`4058`) of data on meshes.
Most of this new API can be found in :mod:`iris.experimental.ugrid`. The key
objects introduced are :class:`iris.experimental.ugrid.Mesh`,
:class:`iris.experimental.ugrid.MeshCoord` and
:obj:`iris.experimental.ugrid.PARSE_UGRID_ON_LOAD`.
A :class:`iris.experimental.ugrid.Mesh` contains a full description of a UGRID
type mesh. :class:`~iris.experimental.ugrid.MeshCoord`\ s are coordinates that
reference and represent a :class:`~iris.experimental.ugrid.Mesh` for use
on a :class:`~iris.cube.Cube`. :class:`~iris.cube.Cube`\ s are also given the
property :attr:`~iris.cube.Cube.mesh` which returns a
:class:`~iris.experimental.ugrid.Mesh` if one is attached to the
:class:`~iris.cube.Cube` via a :class:`~iris.experimental.ugrid.MeshCoord`.
Finally, the context manager :obj:`~iris.experimental.ugrid.PARSE_UGRID_ON_LOAD`
provides a way to load UGRID files so that :class:`~iris.cube.Cube`\ s can be
returned with a :class:`~iris.experimental.ugrid.Mesh` attached.


🐛 Bugs Fixed
Expand Down Expand Up @@ -87,3 +106,4 @@ This document explains the changes made to Iris for this release
Whatsnew resources in alphabetical order:
.. _GitHub: https://github.com/SciTools/iris/issues/new/choose
.. _UGRID: https://ugrid-conventions.github.io/ugrid-conventions/
183 changes: 171 additions & 12 deletions lib/iris/common/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,22 +26,26 @@
from .lenient import _qualname as qualname

__all__ = [
"SERVICES_COMBINE",
"SERVICES_DIFFERENCE",
"SERVICES_EQUAL",
"SERVICES",
"AncillaryVariableMetadata",
"BaseMetadata",
"CellMeasureMetadata",
"CoordMetadata",
"CubeMetadata",
"DimCoordMetadata",
"hexdigest",
"metadata_filter",
"metadata_manager_factory",
"SERVICES",
"SERVICES_COMBINE",
"SERVICES_DIFFERENCE",
"SERVICES_EQUAL",
]


# https://www.unidata.ucar.edu/software/netcdf/docs/netcdf_data_set_components.html#object_name

from ..util import guess_coord_axis

_TOKEN_PARSE = re.compile(r"""^[a-zA-Z0-9][\w\.\+\-@]*$""")

# Configure the logger.
Expand Down Expand Up @@ -193,9 +197,19 @@ def func(field):
return result

# Note that, for strict we use "_fields" not "_members".
# The "circular" member does not participate in strict equivalence.
# TODO: refactor so that 'non-participants' can be held in their specific subclasses.
# Certain members never participate in strict equivalence, so
# are filtered out.
fields = filter(
lambda field: field != "circular", self._fields
lambda field: field
not in (
"circular",
"src_dim",
"node_dimension",
"edge_dimension",
"face_dimension",
),
self._fields,
)
result = all([func(field) for field in fields])

Expand Down Expand Up @@ -1338,6 +1352,149 @@ def equal(self, other, lenient=None):
return super().equal(other, lenient=lenient)


def metadata_filter(
instances,
item=None,
standard_name=None,
long_name=None,
var_name=None,
attributes=None,
axis=None,
):
"""
Filter a collection of objects by their metadata to fit the given metadata
criteria.
Criteria can be either specific properties or other objects with metadata
to be matched.
Args:
* instances:
One or more objects to be filtered.
Kwargs:
* item:
Either,
* a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`,
:attr:`~iris.common.mixin.CFVariableMixin.long_name`, or
:attr:`~iris.common.mixin.CFVariableMixin.var_name` which is compared
against the :meth:`~iris.common.mixin.CFVariableMixin.name`.
* a coordinate or metadata instance equal to that of
the desired objects e.g., :class:`~iris.coords.DimCoord`
or :class:`CoordMetadata`.
* standard_name:
The CF standard name of the desired object. If ``None``, does not
check for ``standard_name``.
* long_name:
An unconstrained description of the object. If ``None``, does not
check for ``long_name``.
* var_name:
The NetCDF variable name of the desired object. If ``None``, does
not check for ``var_name``.
* attributes:
A dictionary of attributes desired on the object. If ``None``,
does not check for ``attributes``.
* axis:
The desired object's axis, see :func:`~iris.util.guess_coord_axis`.
If ``None``, does not check for ``axis``. Accepts the values ``X``,
``Y``, ``Z`` and ``T`` (case-insensitive).
Returns:
A list of the objects supplied in the ``instances`` argument, limited
to only those that matched the given criteria.
"""
name = None
obj = None

if isinstance(item, str):
name = item
else:
obj = item

# apply de morgan's law for one less logical operation
if not (isinstance(instances, str) or isinstance(instances, Iterable)):
instances = [instances]

result = instances

if name is not None:
result = [instance for instance in result if instance.name() == name]

if standard_name is not None:
result = [
instance
for instance in result
if instance.standard_name == standard_name
]

if long_name is not None:
result = [
instance for instance in result if instance.long_name == long_name
]

if var_name is not None:
result = [
instance for instance in result if instance.var_name == var_name
]

if attributes is not None:
if not isinstance(attributes, Mapping):
msg = (
"The attributes keyword was expecting a dictionary "
"type, but got a %s instead." % type(attributes)
)
raise ValueError(msg)

def attr_filter(instance):
return all(
k in instance.attributes
and hexdigest(instance.attributes[k]) == hexdigest(v)
for k, v in attributes.items()
)

result = [instance for instance in result if attr_filter(instance)]

if axis is not None:
axis = axis.upper()

def get_axis(instance):
if hasattr(instance, "axis"):
axis = instance.axis.upper()
else:
axis = guess_coord_axis(instance)
return axis

result = [
instance for instance in result if get_axis(instance) == axis
]

if obj is not None:
if hasattr(obj, "__class__") and issubclass(
obj.__class__, BaseMetadata
):
target_metadata = obj
else:
target_metadata = obj.metadata

result = [
instance
for instance in result
if instance.metadata == target_metadata
]

return result


@lru_cache(maxsize=None)
def _factory_cache(cls):
def __init__(self, cls, **kwargs):
Expand Down Expand Up @@ -1484,29 +1641,31 @@ def metadata_manager_factory(cls, **kwargs):


#: Convenience collection of lenient metadata combine services.
SERVICES_COMBINE = (
# TODO: change lists back to tuples once CellMeasureMetadata is re-integrated
# here (currently in experimental.ugrid).
SERVICES_COMBINE = [
AncillaryVariableMetadata.combine,
BaseMetadata.combine,
CellMeasureMetadata.combine,
CoordMetadata.combine,
CubeMetadata.combine,
DimCoordMetadata.combine,
)
]


#: Convenience collection of lenient metadata difference services.
SERVICES_DIFFERENCE = (
SERVICES_DIFFERENCE = [
AncillaryVariableMetadata.difference,
BaseMetadata.difference,
CellMeasureMetadata.difference,
CoordMetadata.difference,
CubeMetadata.difference,
DimCoordMetadata.difference,
)
]


#: Convenience collection of lenient metadata equality services.
SERVICES_EQUAL = (
SERVICES_EQUAL = [
AncillaryVariableMetadata.__eq__,
AncillaryVariableMetadata.equal,
BaseMetadata.__eq__,
Expand All @@ -1519,7 +1678,7 @@ def metadata_manager_factory(cls, **kwargs):
CubeMetadata.equal,
DimCoordMetadata.__eq__,
DimCoordMetadata.equal,
)
]


#: Convenience collection of lenient metadata services.
Expand Down
12 changes: 7 additions & 5 deletions lib/iris/coords.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import zlib

import cftime
import dask.array as da
import numpy as np
import numpy.ma as ma

Expand Down Expand Up @@ -579,8 +580,7 @@ def xml_element(self, doc):
Returns:
The :class:`xml.dom.minidom.Element` that will describe this
:class:`_DimensionalMetadata`, and the dictionary of attributes
that require to be added to this element.
:class:`_DimensionalMetadata`.
"""
# Create the XML element as the camelCaseEquivalent of the
Expand Down Expand Up @@ -627,6 +627,10 @@ def xml_element(self, doc):
# otherwise.
if isinstance(self, Coord):
values_term = "points"
# TODO: replace with isinstance(self, Connectivity) once Connectivity
# is re-integrated here (currently in experimental.ugrid).
elif hasattr(self, "indices"):
values_term = "indices"
else:
values_term = "data"
element.setAttribute(values_term, self._xml_array_repr(self._values))
Expand Down Expand Up @@ -1936,7 +1940,6 @@ def collapsed(self, dims_to_collapse=None):
Replaces the points & bounds with a simple bounded region.
"""
import dask.array as da

# Ensure dims_to_collapse is a tuple to be able to pass
# through to numpy
Expand Down Expand Up @@ -2262,8 +2265,7 @@ def xml_element(self, doc):
Returns:
The :class:`xml.dom.minidom.Element` that will describe this
:class:`DimCoord`, and the dictionary of attributes that require
to be added to this element.
:class:`DimCoord`.
"""
# Create the XML element as the camelCaseEquivalent of the
Expand Down
Loading

0 comments on commit 4b18209

Please sign in to comment.