Skip to content

Commit 769db63

Browse files
authored
typing fixes. (#292)
* typing fixes. * update typing ci * more fixes
1 parent f77bf79 commit 769db63

File tree

3 files changed

+16
-12
lines changed

3 files changed

+16
-12
lines changed

.github/workflows/ci-additional.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ jobs:
5959
environment-name: flox-tests
6060
init-shell: bash
6161
cache-environment: true
62-
cache-env-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}"
62+
cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}"
6363
create-args: |
6464
python=${{ env.PYTHON_VERSION }}
6565
@@ -112,7 +112,7 @@ jobs:
112112
environment-name: flox-tests
113113
init-shell: bash
114114
cache-environment: true
115-
cache-env-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}"
115+
cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{env.PYTHON_VERSION}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}"
116116
create-args: |
117117
python=${{ env.PYTHON_VERSION }}
118118
- name: Install flox

flox/core.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@
5454
T_DuckArray = Union[np.ndarray, DaskArray] # Any ?
5555
T_By = T_DuckArray
5656
T_Bys = tuple[T_By, ...]
57-
T_ExpectIndex = Union[pd.Index]
57+
T_ExpectIndex = pd.Index
5858
T_ExpectIndexTuple = tuple[T_ExpectIndex, ...]
5959
T_ExpectIndexOpt = Union[T_ExpectIndex, None]
6060
T_ExpectIndexOptTuple = tuple[T_ExpectIndexOpt, ...]
@@ -314,7 +314,7 @@ def invert(x) -> tuple[np.ndarray, ...]:
314314
items = tuple((k, set(k), v) for k, v in sorted_chunks_cohorts.items() if k)
315315

316316
merged_cohorts = {}
317-
merged_keys = set()
317+
merged_keys: set[tuple] = set()
318318

319319
# Now we iterate starting with the longest number of chunks,
320320
# and then merge in cohorts that are present in a subset of those chunks
@@ -1895,7 +1895,7 @@ def groupby_reduce(
18951895
engine: T_EngineOpt = None,
18961896
reindex: bool | None = None,
18971897
finalize_kwargs: dict[Any, Any] | None = None,
1898-
) -> tuple[DaskArray, Unpack[tuple[np.ndarray | DaskArray, ...]]]: # type: ignore[misc] # Unpack not in mypy yet
1898+
) -> tuple[DaskArray, Unpack[tuple[np.ndarray | DaskArray, ...]]]:
18991899
"""
19001900
GroupBy reductions using tree reductions for dask.array
19011901
@@ -2223,4 +2223,4 @@ def groupby_reduce(
22232223

22242224
if is_bool_array and (_is_minmax_reduction(func) or _is_first_last_reduction(func)):
22252225
result = result.astype(bool)
2226-
return (result, *groups) # type: ignore[return-value] # Unpack not in mypy yet
2226+
return (result, *groups)

tests/test_core.py

+10-6
Original file line numberDiff line numberDiff line change
@@ -179,14 +179,15 @@ def test_groupby_reduce(
179179
elif func == "count":
180180
expected_result = np.array(expected, dtype=np.intp)
181181

182-
(result, groups) = groupby_reduce(
182+
(result, *groups) = groupby_reduce(
183183
array,
184184
by,
185185
func=func,
186186
expected_groups=expected_groups,
187187
fill_value=123,
188188
engine=engine,
189189
)
190+
(groups_array,) = groups
190191
# we use pd.Index(expected_groups).to_numpy() which is always int64
191192
# for the values in this test
192193
if expected_groups is None:
@@ -196,7 +197,7 @@ def test_groupby_reduce(
196197
else:
197198
g_dtype = np.int64
198199

199-
assert_equal(groups, np.array([0, 1, 2], g_dtype))
200+
assert_equal(groups_array, np.array([0, 1, 2], g_dtype))
200201
assert_equal(expected_result, result)
201202

202203

@@ -795,11 +796,14 @@ def test_groupby_bins(chunk_labels, kwargs, chunks, engine, method) -> None:
795796
labels = dask.array.from_array(labels, chunks=chunks)
796797

797798
with raise_if_dask_computes():
798-
actual, groups = groupby_reduce(
799+
actual, *groups = groupby_reduce(
799800
array, labels, func="count", fill_value=0, engine=engine, method=method, **kwargs
800801
)
802+
(groups_array,) = groups
801803
expected = np.array([3, 1, 0], dtype=np.intp)
802-
for left, right in zip(groups, pd.IntervalIndex.from_arrays([1, 2, 4], [2, 4, 5]).to_numpy()):
804+
for left, right in zip(
805+
groups_array, pd.IntervalIndex.from_arrays([1, 2, 4], [2, 4, 5]).to_numpy()
806+
):
803807
assert left == right
804808
assert_equal(actual, expected)
805809

@@ -1034,13 +1038,13 @@ def test_bool_reductions(func, engine):
10341038
def test_map_reduce_blockwise_mixed() -> None:
10351039
t = pd.date_range("2000-01-01", "2000-12-31", freq="D").to_series()
10361040
data = t.dt.dayofyear
1037-
actual, _ = groupby_reduce(
1041+
actual, *_ = groupby_reduce(
10381042
dask.array.from_array(data.values, chunks=365),
10391043
t.dt.month,
10401044
func="mean",
10411045
method="map-reduce",
10421046
)
1043-
expected, _ = groupby_reduce(data, t.dt.month, func="mean")
1047+
expected, *_ = groupby_reduce(data, t.dt.month, func="mean")
10441048
assert_equal(expected, actual)
10451049

10461050

0 commit comments

Comments
 (0)