Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open mfdataset enchancement #9955

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ New Features
~~~~~~~~~~~~
- Relax nanosecond datetime restriction in CF time decoding (:issue:`7493`, :pull:`9618`).
By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_ and `Spencer Clark <https://github.com/spencerkclark>`_.
- Add new ``errors`` arg to :py:meth:`open_mfdataset` to better handle invalid files.
(:issue:`6736`, :pull:`9955`). By `Pratiman Patel <https://github.com/pratiman-91>`_.

Breaking changes
~~~~~~~~~~~~~~~~
Expand Down
33 changes: 31 additions & 2 deletions xarray/backends/api.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import os
import warnings
from collections.abc import (
Callable,
Hashable,
Expand Down Expand Up @@ -1393,6 +1394,7 @@ def open_mfdataset(
join: JoinOptions = "outer",
attrs_file: str | os.PathLike | None = None,
combine_attrs: CombineAttrsOptions = "override",
errors: ErrorOptionsWithWarn = "raise",
**kwargs,
) -> Dataset:
"""Open multiple files as a single dataset.
Expand Down Expand Up @@ -1519,7 +1521,12 @@ def open_mfdataset(

If a callable, it must expect a sequence of ``attrs`` dicts and a context object
as its only parameters.
**kwargs : optional
errors : {'raise', 'warn', 'ignore'}, default 'raise'
- If 'raise', then invalid dataset will raise an exception.
- If 'warn', then a warning will be issued for each invalid dataset.
- If 'ignore', then invalid dataset will be ignored.

**kwargs : optional
Additional arguments passed on to :py:func:`xarray.open_dataset`. For an
overview of some of the possible options, see the documentation of
:py:func:`xarray.open_dataset`
Expand Down Expand Up @@ -1611,7 +1618,28 @@ def open_mfdataset(
open_ = open_dataset
getattr_ = getattr

datasets = [open_(p, **open_kwargs) for p in paths1d]
if errors not in ("raise", "warn", "ignore"):
raise ValueError(f"'errors' must be 'raise', 'warn' or 'ignore', got '{errors}'")

datasets = []
invalid_ids = set() # to remove invalid ids for 'combine'
for i, p in enumerate(paths1d):
try:
ds = open_(p, **open_kwargs)
datasets.append(ds)
except Exception:
if errors == "raise":
raise
elif errors == "warn":
warnings.warn(
f"Could not open {p}. Ignoring.", UserWarning, stacklevel=2
)
invalid_ids.add(i)
continue
else:
invalid_ids.add(i)
continue

closers = [getattr_(ds, "_close") for ds in datasets]
if preprocess is not None:
datasets = [preprocess(ds) for ds in datasets]
Expand All @@ -1626,6 +1654,7 @@ def open_mfdataset(
if combine == "nested":
# Combined nested list by successive concat and merge operations
# along each dimension, using structure given by "ids"
ids = [id_ for i, id_ in enumerate(ids) if i not in invalid_ids]
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does this work with nested datasets? E.g. a 2x2 set where one file is missing

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for pointing this out. I did not know about 2x2 functionality. Now, I have modified the logic to remove the invalid ids and paths and added tests for the same.

combined = _nested_combine(
datasets,
concat_dims=concat_dim,
Expand Down
31 changes: 31 additions & 0 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -4831,6 +4831,37 @@ def test_open_mfdataset_2(self) -> None:
) as actual:
assert_identical(original, actual)

def test_open_mfdataset_with_ignore(self) -> None:
original = Dataset({"foo": ("x", np.random.randn(10))})
with create_tmp_files(2) as (tmp1, tmp2):
ds1 = original.isel(x=slice(5))
ds2 = original.isel(x=slice(5, 10))
ds1.to_netcdf(tmp1)
ds2.to_netcdf(tmp2)
with open_mfdataset(
[tmp1, tmp2, "non-existent-file.nc"],
concat_dim="x",
combine="nested",
errors="ignore",
) as actual:
assert_identical(original, actual)

def test_open_mfdataset_with_warn(self) -> None:
original = Dataset({"foo": ("x", np.random.randn(10))})
with pytest.warns(UserWarning, match="Ignoring."):
with create_tmp_files(2) as (tmp1, tmp2):
ds1 = original.isel(x=slice(5))
ds2 = original.isel(x=slice(5, 10))
ds1.to_netcdf(tmp1)
ds2.to_netcdf(tmp2)
with open_mfdataset(
[tmp1, tmp2, "non-existent-file.nc"],
concat_dim="x",
combine="nested",
errors="warn",
) as actual:
assert_identical(original, actual)

def test_attrs_mfdataset(self) -> None:
original = Dataset({"foo": ("x", np.random.randn(10))})
with create_tmp_file() as tmp1:
Expand Down