Source code for iris

# Copyright Iris contributors
#
# This file is part of Iris and is released under the BSD license.
# See LICENSE in the root of the repository for full licensing details.
"""A package for handling multi-dimensional data and associated metadata.

.. note ::

    The Iris documentation has further usage information, including
    a :ref:`user guide <user_guide_index>` which should be the first port of
    call for new users.

The functions in this module provide the main way to load and/or save
your data.

The :func:`load` function provides a simple way to explore data from
the interactive Python prompt. It will convert the source data into
:class:`Cubes <iris.cube.Cube>`, and combine those cubes into
higher-dimensional cubes where possible.

The :func:`load_cube` and :func:`load_cubes` functions are similar to
:func:`load`, but they raise an exception if the number of cubes is not
what was expected. They are more useful in scripts, where they can
provide an early sanity check on incoming data.

The :func:`load_raw` function is provided for those occasions where the
automatic combination of cubes into higher-dimensional cubes is
undesirable. However, it is intended as a tool of last resort! If you
experience a problem with the automatic combination process then please
raise an issue with the Iris developers.

To persist a cube to the file-system, use the :func:`save` function.

All the load functions share very similar arguments:

* uris:
    Either a single filename/URI expressed as a string or
    :class:`pathlib.PurePath`, or an iterable of filenames/URIs.

    Filenames can contain `~` or `~user` abbreviations, and/or
    Unix shell-style wildcards (e.g. `*` and `?`). See the
    standard library function :func:`os.path.expanduser` and
    module :mod:`fnmatch` for more details.

    .. warning::

        If supplying a URL, only OPeNDAP Data Sources are supported.

* constraints:
    Either a single constraint, or an iterable of constraints.
    Each constraint can be either a string, an instance of
    :class:`iris.Constraint`, or an instance of
    :class:`iris.AttributeConstraint`.  If the constraint is a string
    it will be used to match against cube.name().

    .. _constraint_egs:

    For example::

        # Load air temperature data.
        load_cube(uri, 'air_temperature')

        # Load data with a specific model level number.
        load_cube(uri, iris.Constraint(model_level_number=1))

        # Load data with a specific STASH code.
        load_cube(uri, iris.AttributeConstraint(STASH='m01s00i004'))

* callback:
    A function to add metadata from the originating field and/or URI which
    obeys the following rules:

    1. Function signature must be: ``(cube, field, filename)``.
    2. Modifies the given cube inplace, unless a new cube is
       returned by the function.
    3. If the cube is to be rejected the callback must raise
       an :class:`iris.exceptions.IgnoreCubeException`.

    For example::

        def callback(cube, field, filename):
            # Extract ID from filenames given as: <prefix>__<exp_id>
            experiment_id = filename.split('__')[1]
            experiment_coord = iris.coords.AuxCoord(
                experiment_id, long_name='experiment_id')
            cube.add_aux_coord(experiment_coord)

"""

from collections.abc import Iterable
import contextlib
import glob
import importlib
import itertools
import os.path
import threading
from typing import Callable, Literal

import iris._constraints
import iris.config
import iris.io

from ._deprecation import IrisDeprecation, warn_deprecated

try:
    from ._version import version as __version__  # noqa: F401
except ModuleNotFoundError:
    __version__ = "unknown"


try:
    import iris_sample_data
except ImportError:
    iris_sample_data = None


# Restrict the names imported when using "from iris import *"
__all__ = [
    "AttributeConstraint",
    "Constraint",
    "FUTURE",
    "Future",
    "IrisDeprecation",
    "NameConstraint",
    "load",
    "load_cube",
    "load_cubes",
    "load_raw",
    "sample_data_path",
    "save",
    "site_configuration",
    "use_plugin",
]


Constraint = iris._constraints.Constraint
AttributeConstraint = iris._constraints.AttributeConstraint
NameConstraint = iris._constraints.NameConstraint


[docs] class Future(threading.local): """Run-time configuration controller.""" def __init__(self, datum_support=False, pandas_ndim=False, save_split_attrs=False): """Container for run-time options controls. To adjust the values simply update the relevant attribute from within your code. For example:: # example_future_flag is a fictional example. iris.FUTURE.example_future_flag = False If Iris code is executed with multiple threads, note the values of these options are thread-specific. Parameters ---------- datum_support : bool, default=False Opts in to loading coordinate system datum information from NetCDF files into :class:`~iris.coord_systems.CoordSystem`, wherever this information is present. pandas_ndim : bool, default=False See :func:`iris.pandas.as_data_frame` for details - opts in to the newer n-dimensional behaviour. save_split_attrs : bool, default=False Save "global" and "local" cube attributes to netcdf in appropriately different ways : "global" ones are saved as dataset attributes, where possible, while "local" ones are saved as data-variable attributes. See :func:`iris.fileformats.netcdf.saver.save`. """ # The flag 'example_future_flag' is provided as a reference for the # structure of this class. # # Note that self.__dict__ is used explicitly due to the manner in which # __setattr__ is overridden. # # self.__dict__['example_future_flag'] = example_future_flag self.__dict__["datum_support"] = datum_support self.__dict__["pandas_ndim"] = pandas_ndim self.__dict__["save_split_attrs"] = save_split_attrs # TODO: next major release: set IrisDeprecation to subclass # DeprecationWarning instead of UserWarning. def __repr__(self): # msg = ('Future(example_future_flag={})') # return msg.format(self.example_future_flag) msg = "Future(datum_support={}, pandas_ndim={}, save_split_attrs={})" return msg.format(self.datum_support, self.pandas_ndim, self.save_split_attrs) # deprecated_options = {'example_future_flag': 'warning',} deprecated_options: dict[str, Literal["error", "warning"]] = {} def __setattr__(self, name, value): if name in self.deprecated_options: level = self.deprecated_options[name] if level == "error" and not value: emsg = ( "setting the 'Future' property {prop!r} has been " "deprecated to be removed in a future release, and " "deprecated {prop!r} behaviour has been removed. " "Please remove code that sets this property." ) raise AttributeError(emsg.format(prop=name)) else: msg = ( "setting the 'Future' property {!r} is deprecated " "and will be removed in a future release. " "Please remove code that sets this property." ) warn_deprecated(msg.format(name)) if name not in self.__dict__: msg = "'Future' object has no attribute {!r}".format(name) raise AttributeError(msg) self.__dict__[name] = value
[docs] @contextlib.contextmanager def context(self, **kwargs): """Return context manager for temp modification of option values for the active thread. On entry to the `with` statement, all keyword arguments are applied to the Future object. On exit from the `with` statement, the previous state is restored. For example:: # example_future_flag is a fictional example. with iris.FUTURE.context(example_future_flag=False): # ... code that expects some past behaviour """ # Save the current context current_state = self.__dict__.copy() # Update the state for name, value in kwargs.items(): setattr(self, name, value) try: yield finally: # Return the state self.__dict__.clear() self.__dict__.update(current_state)
#: Object containing all the Iris run-time options. FUTURE = Future() # Initialise the site configuration dictionary. #: Iris site configuration dictionary. site_configuration: dict[ Literal["cf_profile", "cf_patch", "cf_patch_conventions"], Callable | Literal[False] | None, ] = {} try: from iris.site_config import update as _update except ImportError: pass else: _update(site_configuration) def _generate_cubes(uris, callback, constraints): """Return a generator of cubes given the URIs and a callback.""" if isinstance(uris, str) or not isinstance(uris, Iterable): # Make a string, or other single item, into an iterable. uris = [uris] # Group collections of uris by their iris handler # Create list of tuples relating schemes to part names uri_tuples = sorted(iris.io.decode_uri(uri) for uri in uris) for scheme, groups in itertools.groupby(uri_tuples, key=lambda x: x[0]): # Call each scheme handler with the appropriate URIs if scheme == "file": part_names = [x[1] for x in groups] for cube in iris.io.load_files(part_names, callback, constraints): yield cube elif scheme in ["http", "https"]: urls = [":".join(x) for x in groups] for cube in iris.io.load_http(urls, callback): yield cube elif scheme == "data": data_objects = [x[1] for x in groups] for cube in iris.io.load_data_objects(data_objects, callback): yield cube else: raise ValueError("Iris cannot handle the URI scheme: %s" % scheme) def _load_collection(uris, constraints=None, callback=None): from iris.cube import _CubeFilterCollection try: cubes = _generate_cubes(uris, callback, constraints) result = _CubeFilterCollection.from_cubes(cubes, constraints) except EOFError as e: raise iris.exceptions.TranslationError( "The file appears empty or incomplete: {!r}".format(str(e)) ) return result
[docs] def load(uris, constraints=None, callback=None): """Load any number of Cubes for each constraint. For a full description of the arguments, please see the module documentation for :mod:`iris`. Parameters ---------- uris : str or :class:`pathlib.PurePath` One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. If supplying a URL, only OPeNDAP Data Sources are supported. constraints : optional One or more constraints. callback : optional A modifier/filter function. Returns ------- :class:`iris.cube.CubeList` An :class:`iris.cube.CubeList`. Note that there is no inherent order to this :class:`iris.cube.CubeList` and it should be treated as if it were random. """ return _load_collection(uris, constraints, callback).merged().cubes()
[docs] def load_cube(uris, constraint=None, callback=None): """Load a single cube. For a full description of the arguments, please see the module documentation for :mod:`iris`. Parameters ---------- uris : One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. If supplying a URL, only OPeNDAP Data Sources are supported. constraints : optional A constraint. callback : optional A modifier/filter function. Returns ------- :class:`iris.cube.Cube` """ constraints = iris._constraints.list_of_constraints(constraint) if len(constraints) != 1: raise ValueError("only a single constraint is allowed") cubes = _load_collection(uris, constraints, callback).cubes() try: cube = cubes.merge_cube() except iris.exceptions.MergeError as e: raise iris.exceptions.ConstraintMismatchError(str(e)) except ValueError: raise iris.exceptions.ConstraintMismatchError("no cubes found") return cube
[docs] def load_cubes(uris, constraints=None, callback=None): """Load exactly one Cube for each constraint. For a full description of the arguments, please see the module documentation for :mod:`iris`. Parameters ---------- uris : One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. If supplying a URL, only OPeNDAP Data Sources are supported. constraints : optional One or more constraints. callback : optional A modifier/filter function. Returns ------- :class:`iris.cube.CubeList` An :class:`iris.cube.CubeList`. Note that there is no inherent order to this :class:`iris.cube.CubeList` and it should be treated as if it were random. """ # Merge the incoming cubes collection = _load_collection(uris, constraints, callback).merged() # Make sure we have exactly one merged cube per constraint bad_pairs = [pair for pair in collection.pairs if len(pair) != 1] if bad_pairs: fmt = " {} -> {} cubes" bits = [fmt.format(pair.constraint, len(pair)) for pair in bad_pairs] msg = "\n" + "\n".join(bits) raise iris.exceptions.ConstraintMismatchError(msg) return collection.cubes()
[docs] def load_raw(uris, constraints=None, callback=None): """Load non-merged cubes. This function is provided for those occasions where the automatic combination of cubes into higher-dimensional cubes is undesirable. However, it is intended as a tool of last resort! If you experience a problem with the automatic combination process then please raise an issue with the Iris developers. For a full description of the arguments, please see the module documentation for :mod:`iris`. Parameters ---------- uris : One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. If supplying a URL, only OPeNDAP Data Sources are supported. constraints : optional One or more constraints. callback : optional A modifier/filter function. Returns ------- :class:`iris.cube.CubeList` """ from iris.fileformats.um._fast_load import _raw_structured_loading with _raw_structured_loading(): return _load_collection(uris, constraints, callback).cubes()
save = iris.io.save
[docs] def sample_data_path(*path_to_join): """Given the sample data resource, returns the full path to the file. .. note:: This function is only for locating files in the iris sample data collection (installed separately from iris). It is not needed or appropriate for general file access. """ target = os.path.join(*path_to_join) if os.path.isabs(target): raise ValueError( "Absolute paths, such as {!r}, are not supported.\n" "NB. This function is only for locating files in the " "iris sample data collection. It is not needed or " "appropriate for general file access.".format(target) ) if iris_sample_data is not None: target = os.path.join(iris_sample_data.path, target) else: raise ImportError( "Please install the 'iris-sample-data' package to access sample data." ) if not glob.glob(target): raise ValueError( "Sample data file(s) at {!r} not found.\n" "NB. This function is only for locating files in the " "iris sample data collection. It is not needed or " "appropriate for general file access.".format(target) ) return target
[docs] def use_plugin(plugin_name): """Import a plugin. Parameters ---------- plugin_name : str Name of plugin. Examples -------- The following:: use_plugin("my_plugin") is equivalent to:: import iris.plugins.my_plugin This is useful for plugins that are not used directly, but instead do all their setup on import. In this case, style checkers would not know the significance of the import statement and warn that it is an unused import. """ importlib.import_module(f"iris.plugins.{plugin_name}")