Skip to content

Commit 3bfbf22

Browse files
authored
Merge pull request #8 from Genentech/remove-package
remove crick package
2 parents 9022b9e + 82bf4c5 commit 3bfbf22

4 files changed

Lines changed: 1 addition & 85 deletions

File tree

pyproject.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,6 @@ dependencies = [
3838
"bioio-tifffile",
3939
"centrosome",
4040
"cp-measure",
41-
"crick",
4241
"dask-image",
4342
"dask",
4443
"decorator",

requirements.txt

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@ bioio-base==1.0.7
66
bioio==1.6.1
77
centrosome==1.3.3
88
cp-measure==0.1.13
9-
crick==0.0.8
109
cython==3.2.4
1110
dask-image==2025.11.0
1211
dask==2025.11.0

scallops/tests/test_xr.py

Lines changed: 1 addition & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
1-
import numpy as np
21
import pytest
32
import xarray as xr
43

5-
from scallops.xr import apply_data_array, dask_grouped_quantiles
4+
from scallops.xr import apply_data_array
65

76

87
@pytest.fixture
@@ -12,17 +11,6 @@ def image(array_A1_102_aln):
1211
) # ops swaps z and t in saved tif
1312

1413

15-
@pytest.mark.io
16-
def test_dask_grouped_quantiles(image):
17-
dask_image = image.squeeze() # ops swaps z and t in saved tif
18-
dask_image = dask_image.chunk(dict(t=1, y=256, x=256))
19-
q = [0.5, 0.75]
20-
dask_results = dask_grouped_quantiles(dask_image, dims=["t", "c"], q=q).compute()
21-
results = image.squeeze().quantile(dim=["y", "x"], q=q)
22-
assert np.abs(dask_results.isel(quantile=0) - results.isel(quantile=0)).max() < 0.8
23-
assert np.abs(dask_results.isel(quantile=1) - results.isel(quantile=1)).max() < 1.75
24-
25-
2614
@pytest.mark.io
2715
def test_data_array(image):
2816
def add_data_array(x: xr.DataArray, y: float):

scallops/xr.py

Lines changed: 0 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -8,13 +8,10 @@
88
- The SCALLOPS development team
99
"""
1010

11-
import importlib
1211
import itertools
1312
from collections.abc import Callable, Sequence
1413
from typing import Any, Literal, Union
1514

16-
import dask
17-
import dask.array as da
1815
import numpy as np
1916
import xarray as xr
2017

@@ -92,73 +89,6 @@ def _get_dims(
9289
return _dims
9390

9491

95-
def dask_grouped_quantiles(
96-
array: xr.DataArray, dims: list[str], q: list[float]
97-
) -> xr.DataArray:
98-
"""Compute quantiles for grouped data using Dask.
99-
100-
This function calculates the specified quantiles for the given dimensions in a
101-
Dask-backed Xarray DataArray. It uses Dask's percentile computation to handle
102-
large datasets efficiently.
103-
104-
:param array: The input DataArray containing the data.
105-
:param dims: List of dimensions over which to compute the quantiles.
106-
:param q: List of quantiles to compute, each value should be between 0 and 1.
107-
:return: A DataArray containing the computed quantiles for the specified dimensions.
108-
109-
:raises AssertionError:
110-
If no quantiles are provided in the `q` list.
111-
:raises ValueError:
112-
If a specified dimension is not found in the DataArray and `missing_dims`
113-
is set to "error".
114-
115-
:example:
116-
117-
.. code-block:: python
118-
119-
import xarray as xr
120-
import numpy as np
121-
import dask.array as da
122-
123-
data = da.random.random((10, 20, 30), chunks=(5, 10, 15))
124-
array = xr.DataArray(data, dims=["x", "y", "z"])
125-
126-
# Compute quantiles for dimensions 'x' and 'y'
127-
quantiles = dask_grouped_quantiles(array, ["x", "y"], [0.25, 0.5, 0.75])
128-
print(quantiles)
129-
"""
130-
assert len(q) > 0, "No quantiles provided"
131-
dims = _get_dims(array, dims)
132-
133-
coords = {d: array.coords[d] for d in dims}
134-
coords["quantile"] = q
135-
136-
quantiles = [_q * 100 for _q in q]
137-
results = xr.DataArray(
138-
da.zeros((len(q),) + tuple([array.sizes[d] for d in dims])),
139-
dims=["quantile"] + dims,
140-
coords=coords,
141-
)
142-
143-
dim_vals = [array[d].values for d in dims]
144-
internal_method = "tdigest"
145-
146-
try:
147-
importlib.import_module("crick")
148-
except ModuleNotFoundError:
149-
internal_method = "default"
150-
with dask.config.set(**{"array.slicing.split_large_chunks": True}):
151-
for dim_val in itertools.product(*dim_vals):
152-
sel = dict(zip(dims, dim_val))
153-
values = da.percentile(
154-
array.sel(sel).data.reshape(-1),
155-
quantiles,
156-
internal_method=internal_method,
157-
)
158-
results.loc[sel] = values
159-
return results
160-
161-
16292
def apply_data_array(
16393
array: xr.DataArray,
16494
dims: list[str],

0 commit comments

Comments
 (0)