Skip to content

Open mfdataset enchancement #9955

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 39 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
3ec575d
GH6736
pratiman-91 Jan 16, 2025
5b95c21
Updated whats-new.rst
pratiman-91 Jan 16, 2025
9249bf3
Update xarray/backends/api.py
pratiman-91 Jan 17, 2025
1eb6422
Updated logic
pratiman-91 Jan 17, 2025
8005e33
Added tests and modifiede the logic to get correct ids for concat
pratiman-91 Jan 19, 2025
3bfaaee
Added new tests and logic to handle 2x2 open_mfdataset with ignore an…
pratiman-91 Jan 19, 2025
f621030
pre-commit run
pratiman-91 Jan 19, 2025
b9f04c8
new logic to add nested paths
pratiman-91 Feb 16, 2025
0657014
made remove_path a private function and updated whats-new.rst
pratiman-91 Apr 4, 2025
4dd6da4
Merge branch 'main' into open_mfdataset_enchancement
pratiman-91 Apr 4, 2025
232ab45
Merge branch 'main' into open_mfdataset_enchancement
pratiman-91 Apr 7, 2025
1110a28
Merge branch 'main' into open_mfdataset_enchancement
pratiman-91 Apr 14, 2025
ffc3c53
Updated whats-new.rst
pratiman-91 Apr 15, 2025
efe1642
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Apr 15, 2025
279e82b
Merge branch 'main' into open_mfdataset_enchancement
pratiman-91 Apr 16, 2025
fc286d5
removed entry to whats-new.rst
pratiman-91 Apr 25, 2025
ae0aa48
Remove conflict
pratiman-91 Apr 25, 2025
c27ac70
Whats-new conflicts
pratiman-91 Apr 25, 2025
c2180d1
Merge branch 'main' into open_mfdataset_enchancement
pratiman-91 Apr 25, 2025
4a00d26
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Apr 25, 2025
c0c7ad4
Merge branch 'main' into open_mfdataset_enchancement
pratiman-91 Apr 29, 2025
f7bf6c9
Merge branch 'pydata:main' into open_mfdataset_enchancement
pratiman-91 May 9, 2025
8e970b4
Merge branch 'main' into open_mfdataset_enchancement
pratiman-91 May 30, 2025
860be1e
modify docs
pratiman-91 May 30, 2025
0451d13
modify doc-strings
pratiman-91 May 30, 2025
e650c22
Merge branch 'main' into open_mfdataset_enchancement
pratiman-91 May 30, 2025
05cb2f0
Update xarray/backends/api.py
pratiman-91 May 30, 2025
607b6f0
Update xarray/backends/api.py
pratiman-91 May 30, 2025
0b67aa1
catch exception for warn
pratiman-91 May 30, 2025
3e269ea
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] May 30, 2025
7c98670
Update xarray/backends/api.py
pratiman-91 May 30, 2025
2567598
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] May 30, 2025
e0fa3ba
import emit_user_level_warning
pratiman-91 May 30, 2025
c24826c
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] May 30, 2025
f55644b
retry importing emit_user_level_warning
pratiman-91 May 30, 2025
a726c4a
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] May 30, 2025
16819bd
emit_user_level_warning
pratiman-91 May 30, 2025
6105c0d
adding import
pratiman-91 May 30, 2025
75468a1
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] May 30, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ New Features
~~~~~~~~~~~~
- Relax nanosecond datetime restriction in CF time decoding (:issue:`7493`, :pull:`9618`).
By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_ and `Spencer Clark <https://github.com/spencerkclark>`_.
- Add new ``errors`` arg to :py:meth:`open_mfdataset` to better handle invalid files.
(:issue:`6736`, :pull:`9955`). By `Pratiman Patel <https://github.com/pratiman-91>`_.

Breaking changes
~~~~~~~~~~~~~~~~
Expand Down
33 changes: 31 additions & 2 deletions xarray/backends/api.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import os
import warnings
from collections.abc import (
Callable,
Hashable,
Expand Down Expand Up @@ -1393,6 +1394,7 @@ def open_mfdataset(
join: JoinOptions = "outer",
attrs_file: str | os.PathLike | None = None,
combine_attrs: CombineAttrsOptions = "override",
errors: ErrorOptionsWithWarn = "raise",
**kwargs,
) -> Dataset:
"""Open multiple files as a single dataset.
Expand Down Expand Up @@ -1519,7 +1521,12 @@ def open_mfdataset(

If a callable, it must expect a sequence of ``attrs`` dicts and a context object
as its only parameters.
**kwargs : optional
errors : {'raise', 'warn', 'ignore'}, default 'raise'
- If 'raise', then invalid dataset will raise an exception.
- If 'warn', then a warning will be issued for each invalid dataset.
- If 'ignore', then invalid dataset will be ignored.

**kwargs : optional
Additional arguments passed on to :py:func:`xarray.open_dataset`. For an
overview of some of the possible options, see the documentation of
:py:func:`xarray.open_dataset`
Expand Down Expand Up @@ -1611,7 +1618,28 @@ def open_mfdataset(
open_ = open_dataset
getattr_ = getattr

datasets = [open_(p, **open_kwargs) for p in paths1d]
if errors not in ("raise", "warn", "ignore"):
raise ValueError(f"'errors' must be 'raise', 'warn' or 'ignore', got '{errors}'")

datasets = []
invalid_ids = set() # to remove invalid ids for 'combine'
for i, p in enumerate(paths1d):
try:
ds = open_(p, **open_kwargs)
datasets.append(ds)
except Exception:
if errors == "raise":
raise
elif errors == "warn":
warnings.warn(
f"Could not open {p}. Ignoring.", UserWarning, stacklevel=2
)
invalid_ids.add(i)
continue
else:
invalid_ids.add(i)
continue

closers = [getattr_(ds, "_close") for ds in datasets]
if preprocess is not None:
datasets = [preprocess(ds) for ds in datasets]
Expand All @@ -1626,6 +1654,7 @@ def open_mfdataset(
if combine == "nested":
# Combined nested list by successive concat and merge operations
# along each dimension, using structure given by "ids"
ids = [id_ for i, id_ in enumerate(ids) if i not in invalid_ids]
combined = _nested_combine(
datasets,
concat_dims=concat_dim,
Expand Down
31 changes: 31 additions & 0 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -4831,6 +4831,37 @@ def test_open_mfdataset_2(self) -> None:
) as actual:
assert_identical(original, actual)

def test_open_mfdataset_with_ignore(self) -> None:
original = Dataset({"foo": ("x", np.random.randn(10))})
with create_tmp_files(2) as (tmp1, tmp2):
ds1 = original.isel(x=slice(5))
ds2 = original.isel(x=slice(5, 10))
ds1.to_netcdf(tmp1)
ds2.to_netcdf(tmp2)
with open_mfdataset(
[tmp1, tmp2, "non-existent-file.nc"],
concat_dim="x",
combine="nested",
errors="ignore",
) as actual:
assert_identical(original, actual)

def test_open_mfdataset_with_warn(self) -> None:
original = Dataset({"foo": ("x", np.random.randn(10))})
with pytest.warns(UserWarning, match="Ignoring."):
with create_tmp_files(2) as (tmp1, tmp2):
ds1 = original.isel(x=slice(5))
ds2 = original.isel(x=slice(5, 10))
ds1.to_netcdf(tmp1)
ds2.to_netcdf(tmp2)
with open_mfdataset(
[tmp1, tmp2, "non-existent-file.nc"],
concat_dim="x",
combine="nested",
errors="warn",
) as actual:
assert_identical(original, actual)

def test_attrs_mfdataset(self) -> None:
original = Dataset({"foo": ("x", np.random.randn(10))})
with create_tmp_file() as tmp1:
Expand Down