Skip to content

Add some more type signatures #227

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Apr 28, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions odc/geo/_blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"""
Working with 2d+ chunks.
"""
from typing import Any, Iterator, Mapping, Optional, Tuple
from typing import Any, Iterator, Literal, Mapping, Optional, Tuple

import numpy as np

Expand Down Expand Up @@ -123,7 +123,9 @@ def extract(
*,
dtype=None,
roi=None,
casting="same_kind",
casting: (
Literal["no", "equiv", "safe", "same_kind", "unsafe"] | None
) = "same_kind",
) -> np.ndarray:
"""
Paste all blocks together into one array possibly with type coercion.
Expand Down
4 changes: 2 additions & 2 deletions odc/geo/_compress.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def _verify_can_compress(xx: xr.DataArray):
)


def _compress_image(im: np.ndarray, driver="PNG", **opts) -> bytes:
def _compress_image(im: np.ndarray, driver: str = "PNG", **opts) -> bytes:
if im.ndim > 2:
im = np.squeeze(im)

Expand Down Expand Up @@ -67,7 +67,7 @@ def compress(
xx,
/,
*args,
as_data_url=False,
as_data_url: bool = False,
transparent: Optional[Tuple[int, int, int]] = None,
**kw,
) -> Union[str, bytes]:
Expand Down
6 changes: 3 additions & 3 deletions odc/geo/_dask.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from functools import partial
from typing import Any, Optional, Sequence, Tuple, Union, Mapping
from typing import Any, Literal, Optional, Sequence, Tuple, Union, Mapping
from uuid import uuid4

import dask.array as da
Expand All @@ -22,7 +22,7 @@ def _do_chunked_reproject(
*blocks: np.ndarray,
axis: int = 0,
dtype=None,
casting="same_kind",
casting: Literal["no", "equiv", "safe", "same_kind", "unsafe"] | None = "same_kind",
resampling: Resampling = "nearest",
src_nodata: Nodata = None,
dst_nodata: Nodata = None,
Expand Down Expand Up @@ -118,7 +118,7 @@ def with_yx(a, yx):
)
src_block_keys = src.__dask_keys__()

def _src(idx):
def _src(idx: dict):
a = src_block_keys
for i in idx:
a = a[i]
Expand Down
2 changes: 1 addition & 1 deletion odc/geo/_interop.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def __dir__():
return [*__all__, "is_dask_collection"]


def __getattr__(name):
def __getattr__(name: str):
if name == "is_dask_collection":
if have.dask:
import dask
Expand Down
4 changes: 2 additions & 2 deletions odc/geo/_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@


# pylint: disable=import-outside-toplevel, redefined-builtin, too-many-locals
def _add_to_folium(url, bounds, map, name=None, index=None, **kw):
def _add_to_folium(url, bounds, map, name: Optional[str] = None, index=None, **kw):
assert have.folium

from folium.raster_layers import ImageOverlay
Expand All @@ -21,7 +21,7 @@ def _add_to_folium(url, bounds, map, name=None, index=None, **kw):
return img_overlay


def _add_to_ipyleaflet(url, bounds, map, name=None, index=None, **kw):
def _add_to_ipyleaflet(url, bounds, map, name: Optional[str] = None, index=None, **kw):
assert have.ipyleaflet

from ipyleaflet import ImageOverlay, Map
Expand Down
4 changes: 2 additions & 2 deletions odc/geo/_rgba.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# pylint: disable=import-outside-toplevel


def is_rgb(x: xr.DataArray):
def is_rgb(x: xr.DataArray) -> bool:
"""
Check if array is RGB(A).
"""
Expand Down Expand Up @@ -177,7 +177,7 @@ def _matplotlib_colorize(
vmin=None,
vmax=None,
nodata: Nodata = None,
robust=False,
robust: bool = False,
):
from matplotlib import colormaps
from matplotlib.colors import Normalize
Expand Down
18 changes: 9 additions & 9 deletions odc/geo/_xr_interop.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def _get_crs_from_attrs(obj: XarrayObject, sdims: Tuple[str, str]) -> Optional[C
"""
crs_set: Set[CRS] = set()

def _add_candidate(crs):
def _add_candidate(crs) -> None:
if crs is None:
return
if isinstance(crs, str):
Expand All @@ -132,11 +132,11 @@ def _add_candidate(crs):
else:
warnings.warn(f"Ignoring crs attribute of type: {type(crs)}")

def process_attrs(attrs):
def process_attrs(attrs) -> None:
_add_candidate(attrs.get("crs", None))
_add_candidate(attrs.get("crs_wkt", None))

def process_datavar(x):
def process_datavar(x) -> None:
process_attrs(x.attrs)
for dim in sdims:
if dim in x.coords:
Expand Down Expand Up @@ -1030,7 +1030,7 @@ class ODCExtension:
Common accessors for both Array/Dataset.
"""

def __init__(self, state: GeoState):
def __init__(self, state: GeoState) -> None:
self._state = state

@property
Expand Down Expand Up @@ -1119,7 +1119,7 @@ class ODCExtensionDa(ODCExtension):
ODC extension for :py:class:`xarray.DataArray`.
"""

def __init__(self, xx: xarray.DataArray):
def __init__(self, xx: xarray.DataArray) -> None:
ODCExtension.__init__(self, _locate_geo_info(xx))
self._xx = xx

Expand Down Expand Up @@ -1174,7 +1174,7 @@ def nodata(self) -> Nodata:
return None

@nodata.setter
def nodata(self, value: Nodata):
def nodata(self, value: Nodata) -> None:
nodata = resolve_nodata(value, self._xx.dtype)

if nodata is None:
Expand All @@ -1201,7 +1201,7 @@ class ODCExtensionDs(ODCExtension):
ODC extension for :py:class:`xarray.Dataset`.
"""

def __init__(self, ds: xarray.Dataset):
def __init__(self, ds: xarray.Dataset) -> None:
ODCExtension.__init__(self, _locate_geo_info(ds))
self._xx = ds

Expand Down Expand Up @@ -1242,7 +1242,7 @@ def _xarray_geobox(xx: XarrayObject) -> Optional[GeoBox]:
return None


def register_geobox():
def register_geobox() -> None:
"""
Backwards compatiblity layer for datacube ``.geobox`` property.
"""
Expand Down Expand Up @@ -1350,7 +1350,7 @@ def _postfix_dims(n):

def xr_zeros(
geobox: SomeGeoBox,
dtype="float64",
dtype: str = "float64",
*,
chunks: Optional[Union[Tuple[int, int], Tuple[int, int, int]]] = None,
time=None,
Expand Down
6 changes: 3 additions & 3 deletions odc/geo/cog/_az.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def __init__(
blob: str,
credential: Any = None,
client: Any = None,
):
) -> None:
"""
Initialise Azure multipart upload.

Expand Down Expand Up @@ -102,7 +102,7 @@ def finalise(self, parts: list[dict[str, Any]]) -> str:
self.blob_client.commit_block_list(block_list)
return self.blob_client.get_blob_properties().etag

def cancel(self, other: str = ""):
def cancel(self, other: str = "") -> None:
"""
Cancel the upload by clearing the block list.
"""
Expand Down Expand Up @@ -143,7 +143,7 @@ class DelayedAzureWriter(AzureLimits):
Dask-compatible writer for Azure Blob Storage multipart uploads.
"""

def __init__(self, mpu: AzMultiPartUpload, kw: dict[str, Any]):
def __init__(self, mpu: AzMultiPartUpload, kw: dict[str, Any]) -> None:
"""
Initialise the Azure writer.

Expand Down
6 changes: 3 additions & 3 deletions odc/geo/cog/_mpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def __repr__(self) -> str:
s = f"{s} final"
return s

def append(self, data: SomeData, chunk_id: Any = None):
def append(self, data: SomeData, chunk_id: Any = None) -> None:
sz = len(data)
self.observed.append((sz, chunk_id))
self.data += data
Expand Down Expand Up @@ -368,7 +368,7 @@ def mpu_write(
user_kw: dict[str, Any] | None = None,
writes_per_chunk: int = 1,
spill_sz: int = 20 * (1 << 20),
dask_name_prefix="mpufinalise",
dask_name_prefix: str = "mpufinalise",
) -> "Delayed":
# pylint: disable=import-outside-toplevel,too-many-locals,too-many-arguments
from dask.base import tokenize
Expand Down Expand Up @@ -502,7 +502,7 @@ def get_mpu_kwargs(
mk_header=None,
mk_footer=None,
user_kw=None,
writes_per_chunk=1,
writes_per_chunk: int = 1,
spill_sz=20 * (1 << 20),
client=None,
) -> dict:
Expand Down
2 changes: 1 addition & 1 deletion odc/geo/cog/_rio.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ def _write_cog(

rio_opts.update(extra_rio_opts)

def _write(pix, band, dst):
def _write(pix, band, dst) -> None:
if not use_windowed_writes:
dst.write(pix, band)
else:
Expand Down
10 changes: 5 additions & 5 deletions odc/geo/cog/_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
_state: dict[str, Any] = {}


def _mpu_local_lock(k="mpu_lock") -> Lock:
def _mpu_local_lock(k: str = "mpu_lock") -> Lock:
lck = _state.get(k, None)
if lck is not None:
return lck
Expand Down Expand Up @@ -84,7 +84,7 @@ def __init__(
profile: Optional[str] = None,
endpoint_url: Optional[str] = None,
creds: Optional["ReadOnlyCredentials"] = None,
):
) -> None:
self.bucket = bucket
self.key = key
self.uploadId = uploadId
Expand Down Expand Up @@ -153,7 +153,7 @@ def started(self) -> bool:
"""Check if the multipart upload has been initiated."""
return len(self.uploadId) > 0

def cancel(self, other: str = ""):
def cancel(self, other: str = "") -> None:
"""Cancel the multipart upload."""
uploadId = other if other else self.uploadId
if not uploadId:
Expand Down Expand Up @@ -202,7 +202,7 @@ def dask_name_prefix(self) -> str:
return "s3finalise"


def _safe_get(v, timeout=0.1):
def _safe_get(v, timeout: float = 0.1):
try:
return v.get(timeout)
except Exception: # pylint: disable=broad-except
Expand All @@ -216,7 +216,7 @@ class DelayedS3Writer(S3Limits):

# pylint: disable=import-outside-toplevel,import-error

def __init__(self, mpu: S3MultiPartUpload, kw: dict[str, Any]):
def __init__(self, mpu: S3MultiPartUpload, kw: dict[str, Any]) -> None:
self.mpu = mpu
self.kw = kw # mostly ContentType= kinda thing
self._shared_var: Optional["distributed.Variable"] = None
Expand Down
2 changes: 1 addition & 1 deletion odc/geo/cog/_tifffile.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,7 @@ def _mk_tile_compressor(
)


def _compress_cog_tile(encoder, block, idx):
def _compress_cog_tile(encoder, block, idx: int) -> list[tuple]:
return [(encoder(block), idx)]


Expand Down
6 changes: 3 additions & 3 deletions odc/geo/crs.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ class CRS:

__slots__ = ("_crs", "_epsg", "_str")

def __init__(self, crs_spec: Any):
def __init__(self, crs_spec: Any) -> None:
"""
Construct CRS object from *something*.

Expand All @@ -122,8 +122,8 @@ def __init__(self, crs_spec: Any):
def __getstate__(self):
return {"crs_str": self._str}

def __setstate__(self, state):
self.__init__(state["crs_str"])
def __setstate__(self, state) -> None:
self._crs, self._str, self._epsg = _make_crs(state["crs_str"])

def to_wkt(self, pretty: bool = False, version: Optional[WktVersion] = None) -> str:
"""
Expand Down
2 changes: 1 addition & 1 deletion odc/geo/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ class _CachedGeoDataFrame:
_lock = threading.Lock()
_data_url = ""

def __init__(self):
def __init__(self) -> None:
# Thread safe class-cached dataload
if self._instance is None:
with self._lock:
Expand Down
8 changes: 4 additions & 4 deletions odc/geo/gcp.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def __init__(
pix: SomePointSet,
wld: SomePointSet,
crs: MaybeCRS = None,
):
) -> None:
pix, _ = _points_to_array(pix)
wld, _crs = _points_to_array(wld)

Expand Down Expand Up @@ -143,7 +143,7 @@ class GCPGeoBox(GeoBoxBase):

def __init__(
self, shape: SomeShape, mapping: GCPMapping, affine: Optional[Affine] = None
):
) -> None:
if affine is None:
affine = Affine.identity()
GeoBoxBase.__init__(self, shape, affine, mapping.crs)
Expand Down Expand Up @@ -182,7 +182,7 @@ def linear(self) -> bool:
return False

@property
def axis_aligned(self):
def axis_aligned(self) -> bool:
return False

@property
Expand Down Expand Up @@ -279,7 +279,7 @@ def zoom_to(
_shape, _affine = self.compute_zoom_to(shape, resolution=resolution)
return GCPGeoBox(_shape, self._mapping, _affine)

def __str__(self):
def __str__(self) -> str:
return self.__repr__()

def __repr__(self) -> str:
Expand Down
Loading
Loading