Skip to content

Commit 491ff58

Browse files
authored
Use math.prod instead of np.prod (#157)
1 parent c62ad44 commit 491ff58

File tree

1 file changed

+9
-8
lines changed

1 file changed

+9
-8
lines changed

flox/core.py

+9-8
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
import copy
44
import itertools
5+
import math
56
import operator
67
from collections import namedtuple
78
from functools import partial, reduce
@@ -85,7 +86,7 @@ def _move_reduce_dims_to_end(arr: np.ndarray, axis: Sequence) -> np.ndarray:
8586

8687
def _collapse_axis(arr: np.ndarray, naxis: int) -> np.ndarray:
8788
"""Reshape so that the last `naxis` axes are collapsed to one axis."""
88-
newshape = arr.shape[:-naxis] + (np.prod(arr.shape[-naxis:]),)
89+
newshape = arr.shape[:-naxis] + (math.prod(arr.shape[-naxis:]),)
8990
return arr.reshape(newshape)
9091

9192

@@ -165,7 +166,7 @@ def find_group_cohorts(labels, chunks, merge=True, method="cohorts"):
165166

166167
# Iterate over each block and create a new block of same shape with "chunk number"
167168
shape = tuple(array.blocks.shape[ax] for ax in axis)
168-
blocks = np.empty(np.prod(shape), dtype=object)
169+
blocks = np.empty(math.prod(shape), dtype=object)
169170
for idx, block in enumerate(array.blocks.ravel()):
170171
blocks[idx] = np.full(tuple(block.shape[ax] for ax in axis), idx)
171172
which_chunk = np.block(blocks.reshape(shape).tolist()).reshape(-1)
@@ -382,11 +383,11 @@ def offset_labels(labels: np.ndarray, ngroups: int) -> tuple[np.ndarray, int]:
382383
"""
383384
assert labels.ndim > 1
384385
offset: np.ndarray = (
385-
labels + np.arange(np.prod(labels.shape[:-1])).reshape((*labels.shape[:-1], -1)) * ngroups
386+
labels + np.arange(math.prod(labels.shape[:-1])).reshape((*labels.shape[:-1], -1)) * ngroups
386387
)
387388
# -1 indicates NaNs. preserve these otherwise we aggregate in the wrong groups!
388389
offset[labels == -1] = -1
389-
size: int = np.prod(labels.shape[:-1]) * ngroups # type: ignore
390+
size: int = math.prod(labels.shape[:-1]) * ngroups # type: ignore
390391
return offset, size
391392

392393

@@ -455,7 +456,7 @@ def factorize_(
455456
factorized.append(idx)
456457

457458
grp_shape = tuple(len(grp) for grp in found_groups)
458-
ngroups = np.prod(grp_shape)
459+
ngroups = math.prod(grp_shape)
459460
if len(by) > 1:
460461
group_idx = np.ravel_multi_index(factorized, grp_shape, mode="wrap")
461462
# NaNs; as well as values outside the bins are coded by -1
@@ -630,7 +631,7 @@ def chunk_reduce(
630631
groups = groups[0]
631632

632633
# always reshape to 1D along group dimensions
633-
newshape = array.shape[: array.ndim - by.ndim] + (np.prod(array.shape[-by.ndim :]),)
634+
newshape = array.shape[: array.ndim - by.ndim] + (math.prod(array.shape[-by.ndim :]),)
634635
array = array.reshape(newshape)
635636

636637
assert group_idx.ndim == 1
@@ -1506,7 +1507,7 @@ def groupby_reduce(
15061507
by, final_groups, grp_shape = _factorize_multiple(
15071508
by, expected_groups, by_is_dask=by_is_dask, reindex=reindex
15081509
)
1509-
expected_groups = (pd.RangeIndex(np.prod(grp_shape)),)
1510+
expected_groups = (pd.RangeIndex(math.prod(grp_shape)),)
15101511

15111512
assert len(by) == 1
15121513
by = by[0]
@@ -1601,7 +1602,7 @@ def groupby_reduce(
16011602
array_subset = array
16021603
for ax, idxr in zip(range(-by.ndim, 0), indexer):
16031604
array_subset = np.take(array_subset, idxr, axis=ax)
1604-
numblocks = np.prod([len(array_subset.chunks[ax]) for ax in axis])
1605+
numblocks = math.prod([len(array_subset.chunks[ax]) for ax in axis])
16051606

16061607
# get final result for these groups
16071608
r, *g = partial_agg(

0 commit comments

Comments
 (0)