Skip to content

Commit 33136a8

Browse files
Migrate to pyproject.toml (#214)
* Migrate to pyproject.toml * Some linting * Fix build * fix version * fix lint * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * autofix notebooks * lint more * Proper docs env install * Update * Fix linting * fix * Fix * Fix typing * Update tests/test_xarray.py * Cleanup * Readd setup.py for benchmarks --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent daebc86 commit 33136a8

21 files changed

+218
-152
lines changed

.git_archival.txt

+4
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
node: $Format:%H$
2+
node-date: $Format:%cI$
3+
describe-name: $Format:%(describe:tags=true)$
4+
ref-names: $Format:%D$
+89
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
name: Build and Upload to TestPyPI
2+
3+
on:
4+
push:
5+
branches:
6+
- "main"
7+
pull_request:
8+
types: [opened, reopened, synchronize, labeled]
9+
branches:
10+
- "*"
11+
workflow_dispatch:
12+
13+
# no need for concurrency limits
14+
15+
jobs:
16+
build-artifacts:
17+
if: ${{ contains( github.event.pull_request.labels.*.name, 'test-build') && github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch' }}
18+
runs-on: ubuntu-latest
19+
steps:
20+
- uses: actions/checkout@v3
21+
with:
22+
fetch-depth: 0
23+
24+
- uses: actions/setup-python@v4
25+
name: Install Python
26+
with:
27+
python-version: "3.10"
28+
29+
- name: Install dependencies
30+
run: |
31+
python -m pip install --upgrade pip
32+
python -m pip install build twine
33+
python -m pip install tomli tomli_w
34+
35+
# - name: Disable local versions
36+
# run: |
37+
# python .github/workflows/configure-testpypi-version.py pyproject.toml
38+
# git update-index --assume-unchanged pyproject.toml
39+
# cat pyproject.toml
40+
41+
- name: Build tarball and wheels
42+
run: |
43+
git clean -xdf
44+
python -m build
45+
46+
- name: Check built artifacts
47+
run: |
48+
python -m twine check --strict dist/*
49+
if [ -f dist/flox-999.tar.gz ]; then
50+
echo "❌ INVALID VERSION NUMBER"
51+
exit 1
52+
else
53+
echo "✅ Looks good"
54+
fi
55+
56+
- uses: actions/upload-artifact@v3
57+
with:
58+
name: releases
59+
path: dist
60+
61+
test-built-dist:
62+
needs: build-artifacts
63+
runs-on: ubuntu-latest
64+
steps:
65+
- uses: actions/setup-python@v4
66+
name: Install Python
67+
with:
68+
python-version: "3.10"
69+
- uses: actions/download-artifact@v3
70+
with:
71+
name: releases
72+
path: dist
73+
- name: List contents of built dist
74+
run: |
75+
ls -ltrh
76+
ls -ltrh dist
77+
78+
- name: Verify the built dist/wheel is valid
79+
run: |
80+
python -m pip install --upgrade pip
81+
python -m pip install dist/flox*.whl
82+
# python -m cf_xarray.scripts.print_versions
83+
84+
# - name: Publish package to TestPyPI
85+
# uses: pypa/[email protected]
86+
# with:
87+
# password: ${{ secrets.TESTPYPI_TOKEN }}
88+
# repository_url: https://test.pypi.org/legacy/
89+
# verbose: true

.pre-commit-config.yaml

+13-19
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,13 @@ ci:
22
autoupdate_schedule: quarterly
33

44
repos:
5+
- repo: https://github.com/charliermarsh/ruff-pre-commit
6+
# Ruff version.
7+
rev: 'v0.0.246'
8+
hooks:
9+
- id: ruff
10+
args: ["--fix"]
11+
512
- repo: https://github.com/pre-commit/pre-commit-hooks
613
rev: v4.4.0
714
hooks:
@@ -11,20 +18,10 @@ repos:
1118
- id: check-docstring-first
1219

1320
- repo: https://github.com/psf/black
14-
rev: 22.12.0
21+
rev: 23.1.0
1522
hooks:
1623
- id: black
1724

18-
- repo: https://github.com/PyCQA/flake8
19-
rev: 6.0.0
20-
hooks:
21-
- id: flake8
22-
23-
- repo: https://github.com/PyCQA/isort
24-
rev: 5.11.4
25-
hooks:
26-
- id: isort
27-
2825
- repo: https://github.com/executablebooks/mdformat
2926
rev: 0.7.16
3027
hooks:
@@ -37,9 +34,8 @@ repos:
3734
rev: 1.6.1
3835
hooks:
3936
- id: nbqa-black
40-
- id: nbqa-pyupgrade
41-
args: [--py37-plus]
42-
- id: nbqa-isort
37+
- id: nbqa-ruff
38+
args: [--fix]
4339

4440
- repo: https://github.com/kynan/nbstripout
4541
rev: 0.6.1
@@ -54,9 +50,7 @@ repos:
5450
additional_dependencies:
5551
- tomli
5652

57-
- repo: https://github.com/asottile/pyupgrade
58-
rev: v3.3.1
53+
- repo: https://github.com/abravalheri/validate-pyproject
54+
rev: v0.12.1
5955
hooks:
60-
- id: pyupgrade
61-
args:
62-
- "--py38-plus"
56+
- id: validate-pyproject

MANIFEST.in

-6
This file was deleted.

ci/docs.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,4 +18,4 @@ dependencies:
1818
- jupyter
1919
- sphinx-codeautolink
2020
- pip:
21-
- git+https://github.com/xarray-contrib/flox
21+
- -e ..

docs/source/user-stories/climatology-hourly.ipynb

-2
Original file line numberDiff line numberDiff line change
@@ -55,8 +55,6 @@
5555
}
5656
],
5757
"source": [
58-
"import time\n",
59-
"\n",
6058
"import dask.array\n",
6159
"import numpy as np\n",
6260
"import pandas as pd\n",

docs/source/user-stories/custom-aggregations.ipynb

-2
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
"metadata": {},
2727
"outputs": [],
2828
"source": [
29-
"import matplotlib.pyplot as plt\n",
3029
"import numpy as np\n",
3130
"import numpy_groupies as npg\n",
3231
"import xarray as xr\n",
@@ -203,7 +202,6 @@
203202
"outputs": [],
204203
"source": [
205204
"def grouped_median(group_idx, array, *, axis=-1, size=None, fill_value=None, dtype=None):\n",
206-
"\n",
207205
" return npg.aggregate_numpy.aggregate(\n",
208206
" group_idx,\n",
209207
" array,\n",

flox/__init__.py

+9-10
Original file line numberDiff line numberDiff line change
@@ -5,15 +5,14 @@
55
from .aggregations import Aggregation # noqa
66
from .core import groupby_reduce, rechunk_for_blockwise, rechunk_for_cohorts # noqa
77

8-
try:
9-
from importlib.metadata import version as _version
10-
except ImportError:
11-
# if the fallback library is missing, we are doomed.
12-
from importlib_metadata import version as _version # type: ignore[no-redef]
138

14-
try:
15-
__version__ = _version("flox")
16-
except Exception:
17-
# Local copy or not installed with setuptools.
18-
# Disable minimum version checks on downstream libraries.
9+
def _get_version():
1910
__version__ = "999"
11+
try:
12+
from ._version import __version__
13+
except ImportError:
14+
pass
15+
return __version__
16+
17+
18+
__version__ = _get_version()

flox/aggregate_flox.py

-1
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,6 @@ def _nan_grouped_op(group_idx, array, func, fillna, *args, **kwargs):
7777

7878

7979
def sum_of_squares(group_idx, array, *, axis=-1, size=None, fill_value=None, dtype=None):
80-
8180
return sum(
8281
group_idx,
8382
array**2,

flox/aggregate_npg.py

-2
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@ def sum_of_squares(
1818
fill_value=None,
1919
dtype=None,
2020
):
21-
2221
return _get_aggregate(engine).aggregate(
2322
group_idx,
2423
array,
@@ -40,7 +39,6 @@ def nansum_of_squares(
4039
fill_value=None,
4140
dtype=None,
4241
):
43-
4442
return _get_aggregate(engine).aggregate(
4543
group_idx,
4644
array,

flox/aggregations.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,8 @@
99
import numpy_groupies as npg
1010
from numpy.typing import DTypeLike
1111

12-
from . import aggregate_flox, aggregate_npg, xrdtypes as dtypes, xrutils
12+
from . import aggregate_flox, aggregate_npg, xrutils
13+
from . import xrdtypes as dtypes
1314

1415
if TYPE_CHECKING:
1516
FuncTuple = tuple[Callable | str, ...]

flox/core.py

+3-6
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ def _get_optimal_chunks_for_groups(chunks, labels):
122122
firstidx = first_indexes[labels_at_chunk_bounds]
123123

124124
newchunkidx = [0]
125-
for c, f, l in zip(chunkidx, firstidx, lastidx):
125+
for c, f, l in zip(chunkidx, firstidx, lastidx): # noqa
126126
Δf = abs(c - f)
127127
Δl = abs(c - l)
128128
if c == 0 or newchunkidx[-1] > l:
@@ -362,7 +362,6 @@ def rechunk_for_blockwise(array: DaskArray, axis: T_Axis, labels: np.ndarray):
362362
def reindex_(
363363
array: np.ndarray, from_, to, fill_value=None, axis: T_Axis = -1, promote: bool = False
364364
) -> np.ndarray:
365-
366365
if not isinstance(to, pd.Index):
367366
if promote:
368367
to = pd.Index(to)
@@ -389,7 +388,7 @@ def reindex_(
389388
)
390389
idx = from_.get_indexer(to)
391390
indexer = [slice(None, None)] * array.ndim
392-
indexer[axis] = idx # type: ignore
391+
indexer[axis] = idx
393392
reindexed = array[tuple(indexer)]
394393
if any(idx == -1):
395394
if fill_value is None:
@@ -417,7 +416,7 @@ def offset_labels(labels: np.ndarray, ngroups: int) -> tuple[np.ndarray, int]:
417416
)
418417
# -1 indicates NaNs. preserve these otherwise we aggregate in the wrong groups!
419418
offset[labels == -1] = -1
420-
size: int = math.prod(labels.shape[:-1]) * ngroups # type: ignore
419+
size: int = math.prod(labels.shape[:-1]) * ngroups
421420
return offset, size
422421

423422

@@ -1213,7 +1212,6 @@ def dask_groupby_agg(
12131212
sort: bool = True,
12141213
chunks_cohorts=None,
12151214
) -> tuple[DaskArray, tuple[np.ndarray | DaskArray]]:
1216-
12171215
import dask.array
12181216
from dask.array.core import slices_from_chunks
12191217

@@ -1556,7 +1554,6 @@ def _factorize_multiple(by, expected_groups, any_by_dask, reindex):
15561554

15571555

15581556
def _validate_expected_groups(nby: int, expected_groups: T_ExpectedGroupsOpt) -> T_ExpectTuple:
1559-
15601557
if expected_groups is None:
15611558
return (None,) * nby
15621559

flox/visualize.py

-3
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ def draw_mesh(
2424
y0=0,
2525
append=False,
2626
):
27-
2827
dx = 2
2928
xpts = x0 + np.arange(0, (ncol + nspaces) * dx, dx)
3029
ypts = y0 + np.arange(0, nrow * dx, dx)
@@ -118,7 +117,6 @@ def visualize_groups_1d(array, labels, axis=-1, colors=None, cmap=None, append=T
118117

119118

120119
def get_colormap(N):
121-
122120
cmap = mpl.cm.get_cmap("tab20_r").copy()
123121
ncolors = len(cmap.colors)
124122
q = N // ncolors
@@ -129,7 +127,6 @@ def get_colormap(N):
129127

130128

131129
def factorize_cohorts(by, cohorts):
132-
133130
factorized = np.full(by.shape, -1)
134131
for idx, cohort in enumerate(cohorts):
135132
factorized[np.isin(by, cohort)] = idx

flox/xarray.py

+2-4
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,9 @@
1515
_get_expected_groups,
1616
_validate_expected_groups,
1717
groupby_reduce,
18-
rechunk_for_blockwise as rechunk_array_for_blockwise,
19-
rechunk_for_cohorts as rechunk_array_for_cohorts,
2018
)
19+
from .core import rechunk_for_blockwise as rechunk_array_for_blockwise
20+
from .core import rechunk_for_cohorts as rechunk_array_for_cohorts
2121
from .xrutils import _contains_cftime_datetimes, _to_pytimedelta, datetime_to_numeric
2222

2323
if TYPE_CHECKING:
@@ -343,7 +343,6 @@ def xarray_reduce(
343343
raise ValueError("expect_index cannot be None")
344344

345345
def wrapper(array, *by, func, skipna, core_dims, **kwargs):
346-
347346
array, *by = _broadcast_size_one_dims(array, *by, core_dims=core_dims)
348347

349348
# Handle skipna here because I need to know dtype to make a good default choice.
@@ -585,7 +584,6 @@ def resample_reduce(
585584
keep_attrs: bool = True,
586585
**kwargs,
587586
):
588-
589587
warnings.warn(
590588
"flox.xarray.resample_reduce is now deprecated. Please use Xarray's resample method directly.",
591589
DeprecationWarning,

flox/xrutils.py

-1
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,6 @@ def datetime_to_numeric(array, offset=None, datetime_unit=None, dtype=float):
181181

182182
# Convert np.NaT to np.nan
183183
elif array.dtype.kind in "mM":
184-
185184
# Convert to specified timedelta units.
186185
if datetime_unit:
187186
array = array / np.timedelta64(1, datetime_unit)

0 commit comments

Comments
 (0)