Skip to content
Draft
Show file tree
Hide file tree
Changes from 11 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 52 additions & 2 deletions .github/workflows/test_and_deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
fail-fast: true
matrix:
os: ["windows-latest", "ubuntu-latest", "macos-latest"]
python-version: ["3.9", "3.10", "3.11", "3.12"]
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]

steps:
- name: Checkout source
Expand All @@ -34,7 +34,7 @@ jobs:
shell: bash -l {0}
run: |
conda activate dask-image-testenv
python -m pip install -e .
python -m pip install -e .[dataframe]
conda list

- name: Run tests
Expand All @@ -49,6 +49,56 @@ jobs:
parallel: true
path-to-lcov: coverage.lcov

test-minimal:
# Verify dask-image works without the optional `dataframe` extras
# (i.e. without pandas and dask[dataframe]).
runs-on: ${{ matrix.os }}
strategy:
Comment on lines +52 to +56
Copy link

Copilot AI Apr 27, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

deploy (and potentially coveralls) still only depends on test. With the new test-minimal and test-latest-all-extras jobs, a tagged release could publish even if those new test jobs fail, since they are not included in deploy.needs. Update the workflow so deploy.needs (and any other gating jobs) includes the new test jobs.

Copilot uses AI. Check for mistakes.
fail-fast: true
matrix:
os: ["windows-latest", "ubuntu-latest", "macos-latest"]
steps:
- name: Checkout source
uses: actions/checkout@v5

- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: "3.12"

- name: Install dask-image without dataframe extras
run: |
python -m pip install --upgrade pip
python -m pip install -e .[test]

- name: Run tests (find_objects tests are skipped automatically)
run: pytest -v

test-latest-all-extras:
# Verify dask-image works with the latest unfixed dependencies, including
# optional `dataframe` extras so find_objects is also tested.
runs-on: ${{ matrix.os }}
strategy:
fail-fast: true
matrix:
os: ["windows-latest", "ubuntu-latest", "macos-latest"]
steps:
- name: Checkout source
uses: actions/checkout@v5

- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: "3.13"

- name: Install dask-image with all extras
run: |
python -m pip install --upgrade pip
python -m pip install -e .[dataframe,test]

- name: Run tests
run: pytest -v

coveralls:
needs: test
runs-on: ubuntu-latest
Expand Down
24 changes: 24 additions & 0 deletions continuous_integration/environment-3.13.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
name: dask-image-testenv

channels:
- conda-forge

dependencies:
- python=3.13.*
- pip==24.3.1
- coverage==7.6.1
- flake8==7.1.1
- pytest==8.3.4
- pytest-cov==6.0.0
- pytest-flake8==1.3.0
- pytest-timeout >=2.3.1
- dask==2025.1.0
- numpy==2.2.0
- scipy==1.15.0
- scikit-image==0.25.0
- pims==0.6.1
- slicerator==1.1.0
- pandas==2.2.3
- twine==6.0.1
- pip:
- build==1.2.2
24 changes: 24 additions & 0 deletions continuous_integration/environment-3.14.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
name: dask-image-testenv

channels:
- conda-forge

dependencies:
- python=3.14.*
- pip==25.0.1
- coverage==7.6.1
- flake8==7.1.1
- pytest==8.3.4
- pytest-cov==6.0.0
- pytest-flake8==1.3.0
- pytest-timeout >=2.3.1
- dask==2025.1.0
- numpy==2.2.0
- scipy==1.15.0
- scikit-image==0.25.0
- pims==0.6.1
- slicerator==1.1.0
- pandas==2.2.3
- twine==6.0.1
- pip:
- build==1.2.2
24 changes: 0 additions & 24 deletions continuous_integration/environment-3.9.yml

This file was deleted.

17 changes: 14 additions & 3 deletions dask_image/ndmeasure/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

import dask.array as da
import dask.bag as db
import dask.dataframe as dd
import numpy as np

from . import _utils
Expand Down Expand Up @@ -228,9 +227,21 @@ def find_objects(label_image):

Notes
-----
You must have the optional dependency ``dask[dataframe]`` installed
to use the ``find_objects`` function.
You must have the optional dependencies ``dask[dataframe]`` and
``pandas`` installed to use the ``find_objects`` function. They can
be installed together via the ``dataframe`` extras group:
``pip install dask-image[dataframe]``.
"""
try:
import pandas # noqa: F401 # used by the private helpers below
import dask.dataframe as dd
except ImportError as e:
raise ImportError(
"dask_image.ndmeasure.find_objects requires the optional "
"dependencies `dask[dataframe]` and `pandas`. Install them "
"with `pip install dask-image[dataframe]`."
) from e

if label_image.dtype.char not in np.typecodes['AllInteger']:
raise ValueError("find_objects only accepts integer dtype arrays")

Expand Down
9 changes: 7 additions & 2 deletions dask_image/ndmeasure/_utils/_find_objects.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import numpy as np
import pandas as pd
from dask.delayed import Delayed
import dask.dataframe as dd
import dask.config as dask_config


Expand All @@ -24,6 +22,8 @@ def _find_bounding_boxes(x, array_location):
This alternative function returns a pandas dataframe,
with one row per object found in the image chunk.
"""
import pandas as pd

unique_vals = np.unique(x)
unique_vals = unique_vals[unique_vals != 0]
result = {}
Expand Down Expand Up @@ -53,6 +53,8 @@ def _combine_slices(slices):

def _merge_bounding_boxes(x, ndim):
"Merge the bounding boxes describing objects over multiple image chunks."
import pandas as pd

x = x.dropna()
data = {}
# For each dimension in the array,
Expand All @@ -72,6 +74,9 @@ def _merge_bounding_boxes(x, ndim):

def _find_objects(ndim, df1, df2):
"""Main utility function for find_objects."""
import pandas as pd
import dask.dataframe as dd

meta = dd.utils.make_meta([(i, object) for i in range(ndim)])
if isinstance(df1, Delayed):
with dask_config.set({'dataframe.convert-string': False}):
Expand Down
15 changes: 11 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,29 +10,33 @@ keywords = ["dask-image", "dask", "image"]
readme = "README.rst"
license = { text = "BSD-3-Clause" }
dynamic = ["version"]
requires-python = ">=3.9"
requires-python = ">=3.10"
classifiers = [
Comment thread
m-albert marked this conversation as resolved.
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.14",
]
dependencies = [
"dask[array,dataframe] >=2024.4.1",
"dask[array] >=2024.4.1",
"numpy >=1.18",
"scipy >=1.7.0",
"pandas >=2.0.0",
"pims >=0.4.1",
"tifffile >=2020.10.1",
]

[project.optional-dependencies]
dataframe = [
"dask[dataframe] >=2024.4.1",
"pandas >=2.0.0",
]
test = [
"build >=1.2.1",
"coverage >=7.2.1",
Expand Down Expand Up @@ -72,3 +76,6 @@ include = [
[tool.pytest.ini_options]
addopts = "--flake8"
markers = "cupy"

[tool.flake8]
exclude = ["dask_image/_version.py"]
8 changes: 4 additions & 4 deletions tests/test_dask_image/test_ndfilters/test__generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,9 @@ def test_generic_filter_shape_type(da_func):
@pytest.mark.parametrize(
"function, size, footprint",
[
(lambda x: x, 1, None),
(lambda x: x, (1, 1), None),
(lambda x: x, None, np.ones((1, 1))),
(lambda x: x[0], 1, None),
(lambda x: x[0], (1, 1), None),
(lambda x: x[0], None, np.ones((1, 1))),
],
)
def test_generic_filter_identity(sp_func, da_func, function, size, footprint):
Expand All @@ -94,7 +94,7 @@ def test_generic_filter_identity(sp_func, da_func, function, size, footprint):
],
)
def test_generic_filter_comprehensions(da_func):
da_wfunc = lambda arr: da_func(arr, lambda x: x, 1) # noqa: E731
da_wfunc = lambda arr: da_func(arr, lambda x: x[0], 1) # noqa: E731

np.random.seed(0)

Expand Down
12 changes: 7 additions & 5 deletions tests/test_dask_image/test_ndmeasure/test_find_objects.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import dask.array as da
import dask.dataframe as dd
import numpy as np
import pandas as pd
import pytest

import dask_image.ndmeasure
pd = pytest.importorskip("pandas")
dd = pytest.importorskip("dask.dataframe")

import dask.array as da # noqa: E402
import numpy as np # noqa: E402

import dask_image.ndmeasure # noqa: E402


@pytest.fixture
Expand Down
Loading