Skip to content
11 changes: 6 additions & 5 deletions yt/_typing.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from typing import Any, Optional, TypeAlias

import numpy as np
import numpy.typing as npt
import unyt as un

FieldDescT = tuple[str, tuple[str, list[str], str | None]]
Expand All @@ -12,12 +13,12 @@
FieldKey = tuple[FieldType, FieldName]
ImplicitFieldKey = FieldName
AnyFieldKey = FieldKey | ImplicitFieldKey
DomainDimensions = tuple[int, ...] | list[int] | np.ndarray
DomainDimensions = tuple[int, ...] | list[int] | npt.NDArray

ParticleCoordinateTuple = tuple[
str, # particle type
tuple[np.ndarray, np.ndarray, np.ndarray], # xyz
float | np.ndarray, # hsml
tuple[npt.NDArray, npt.NDArray, npt.NDArray], # xyz
float | npt.NDArray, # hsml
]

# Geometry specific types
Expand All @@ -33,5 +34,5 @@
# np.ndarray[...] syntax is runtime-valid from numpy 1.22, we quote it until our minimal
# runtime requirement is bumped to, or beyond this version

MaskT = Optional["np.ndarray[Any, np.dtype[np.bool_]]"]
AlphaT = Optional["np.ndarray[Any, np.dtype[np.float64]]"]
MaskT = Optional["npt.NDArray[Any]"]
AlphaT = Optional["npt.NDArray[Any]"]
Comment thread
maverickcodex18 marked this conversation as resolved.
Outdated
7 changes: 4 additions & 3 deletions yt/frontends/artio/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from collections import defaultdict

import numpy as np
import numpy.typing as nptype

from yt.data_objects.field_data import YTFieldData
from yt.data_objects.index_subobjects.octree_subset import OctreeSubset
Expand Down Expand Up @@ -339,10 +340,10 @@ def _read_fluid_fields(self, fields, dobj, chunk=None):

def _icoords_to_fcoords(
self,
icoords: np.ndarray,
ires: np.ndarray,
icoords: nptype.NDArray,
ires: nptype.NDArray,
axes: tuple[int, ...] | None = None,
) -> tuple[np.ndarray, np.ndarray]:
) -> tuple[nptype.NDArray, nptype.NDArray]:
"""
Accepts icoords and ires and returns appropriate fcoords and fwidth.
Mostly useful for cases where we have irregularly spaced or structured
Expand Down
13 changes: 7 additions & 6 deletions yt/frontends/ramses/hilbert.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from typing import Any, Optional

import numpy as np
import numpy.typing as nptype
Comment thread
chrishavlin marked this conversation as resolved.
Outdated

from yt.data_objects.selection_objects.region import YTRegion
from yt.geometry.selection_routines import (
Expand Down Expand Up @@ -49,8 +50,8 @@


def hilbert3d(
ijk: "np.ndarray[Any, np.dtype[np.int64]]", bit_length: int
) -> "np.ndarray[Any, np.dtype[np.float64]]":
ijk: "nptype.NDArray[np.int64]", bit_length: int
) -> "nptype.NDArray[np.float64]":
Comment thread
chrishavlin marked this conversation as resolved.
Outdated
"""Compute the order using Hilbert indexing.

Arguments
Expand All @@ -70,11 +71,11 @@ def hilbert3d(
def get_intersecting_cpus(
ds,
region: YTRegion,
LE: Optional["np.ndarray[Any, np.dtype[np.float64]]"] = None,
LE: Optional["nptype.NDArray[np.float64]"] = None,
dx: float = 1.0,
dx_cond: float | None = None,
factor: float = 4.0,
bound_keys: Optional["np.ndarray[Any, np.dtype[np.float64]]"] = None,
bound_keys: Optional["nptype.NDArray[np.float64]"] = None,
) -> set[int]:
"""
Find the subset of CPUs that intersect the bbox in a recursive fashion.
Expand Down Expand Up @@ -119,8 +120,8 @@ def get_intersecting_cpus(

def get_cpu_list_cuboid(
ds,
X: "np.ndarray[Any, np.dtype[np.float64]]",
bound_keys: "np.ndarray[Any, np.dtype[np.float64]]",
X: "nptype.NDArray[np.float64]",
bound_keys: "nptype.NDArray[np.float64]",
) -> set[int]:
"""
Return the list of the CPU intersecting with the cuboid containing the positions.
Expand Down
11 changes: 6 additions & 5 deletions yt/frontends/ramses/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from typing import TYPE_CHECKING, Union

import numpy as np
import numpy.typing as nptype
Comment thread
chrishavlin marked this conversation as resolved.
Outdated
from unyt import unyt_array

from yt._maintenance.deprecation import issue_deprecation_warning
Expand Down Expand Up @@ -37,7 +38,7 @@ def convert_ramses_ages(ds, conformal_ages):


def convert_ramses_conformal_time_to_physical_time(
ds, conformal_time: np.ndarray
ds, conformal_time: nptype.NDArray
) -> unyt_array:
"""
Convert conformal times (as defined in RAMSES) to physical times.
Expand Down Expand Up @@ -82,7 +83,7 @@ def _ramses_particle_binary_file_handler(
subset: "RAMSESDomainSubset",
fields: list[FieldKey],
count: int,
) -> dict[FieldKey, np.ndarray]:
) -> dict[FieldKey, nptype.NDArray]:
"""General file handler for binary file, called by _read_particle_subset

Parameters
Expand All @@ -96,7 +97,7 @@ def _ramses_particle_binary_file_handler(
count: integer
The number of elements to count
"""
tr = {}
tr: dict[FieldKey, nptype.NDArray] = {}
ds = subset.domain.ds
foffsets = particle_handler.field_offsets
fname = particle_handler.fname
Expand Down Expand Up @@ -130,7 +131,7 @@ def _ramses_particle_csv_file_handler(
subset: "RAMSESDomainSubset",
fields: list[FieldKey],
count: int,
) -> dict[FieldKey, np.ndarray]:
) -> dict[FieldKey, nptype.NDArray]:
"""General file handler for csv file, called by _read_particle_subset

Parameters
Expand All @@ -146,7 +147,7 @@ def _ramses_particle_csv_file_handler(
"""
from yt.utilities.on_demand_imports import _pandas as pd

tr = {}
tr: dict[FieldKey, nptype.NDArray] = {}
ds = subset.domain.ds
foffsets = particle_handler.field_offsets
fname = particle_handler.fname
Expand Down
11 changes: 6 additions & 5 deletions yt/frontends/ramses/particle_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from typing import TYPE_CHECKING, Any

import numpy as np
import numpy.typing as nptype
Comment thread
chrishavlin marked this conversation as resolved.
Outdated

from yt._typing import FieldKey
from yt.config import ytcfg
Expand Down Expand Up @@ -71,7 +72,7 @@ class ParticleFileHandler(abc.ABC, HandlerMixin):
# assumed to be `self`).
reader: Callable[
["RAMSESDomainSubset", list[FieldKey], int],
dict[FieldKey, np.ndarray],
dict[FieldKey, nptype.NDArray],
]

# Name of the config section (if any)
Expand Down Expand Up @@ -162,7 +163,7 @@ def header(self) -> dict[str, Any]:
self.read_header()
return self._header

def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, np.ndarray]):
def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, nptype.NDArray]):
"""
This function allows custom code to be called to handle special cases,
such as the particle birth time.
Expand All @@ -173,7 +174,7 @@ def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, np.ndarray]):
----------
field : FieldKey
The field name.
data_dict : dict[FieldKey, np.ndarray]
data_dict : dict[FieldKey, nptype.NDArray]
A dictionary containing the data.

By default, this function does nothing.
Expand Down Expand Up @@ -346,7 +347,7 @@ def birth_file_fname(self):
def has_birth_file(self):
return os.path.exists(self.birth_file_fname)

def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, np.ndarray]):
def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, nptype.NDArray]):
_ptype, fname = field
if not (fname == "particle_birth_time" and self.ds.cosmological_simulation):
return
Expand Down Expand Up @@ -492,7 +493,7 @@ def read_header(self):
self._field_offsets = field_offsets
self._field_types = _pfields

def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, np.ndarray]):
def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, nptype.NDArray]):
_ptype, fname = field
if not (fname == "particle_birth_time" and self.ds.cosmological_simulation):
return
Expand Down
5 changes: 3 additions & 2 deletions yt/frontends/rockstar/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from typing import Any, Optional

import numpy as np
import numpy.typing as nptype
Comment thread
chrishavlin marked this conversation as resolved.
Outdated

from yt.data_objects.static_output import ParticleDataset
from yt.frontends.halo_catalog.data_structures import HaloCatalogFile
Expand All @@ -21,7 +22,7 @@ class RockstarBinaryFile(HaloCatalogFile):
header: dict
_position_offset: int
_member_offset: int
_Npart: "np.ndarray[Any, np.dtype[np.int64]]"
_Npart: "nptype.NDArray[np.int64]"
_ids_halos: list[int]
_file_size: int

Expand Down Expand Up @@ -49,7 +50,7 @@ def __init__(self, ds, io, filename, file_id, range):

def _read_member(
self, ihalo: int
) -> Optional["np.ndarray[Any, np.dtype[np.int64]]"]:
) -> Optional["nptype.NDArray[np.int64]"]:
if ihalo not in self._ids_halos:
return None

Expand Down
5 changes: 3 additions & 2 deletions yt/frontends/stream/misc.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import numpy as np
import numpy.typing as nptype
Comment thread
chrishavlin marked this conversation as resolved.
Outdated

from yt._typing import DomainDimensions


def _validate_cell_widths(
cell_widths: list[np.ndarray],
cell_widths: list[nptype.NDArray],
domain_dimensions: DomainDimensions,
) -> list[np.ndarray]:
) -> list[nptype.NDArray]:
# check dimensionality
if (nwids := len(cell_widths)) != (ndims := len(domain_dimensions)):
raise ValueError(
Expand Down
5 changes: 3 additions & 2 deletions yt/geometry/coordinates/coordinate_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from yt.funcs import fix_unitary, is_sequence, parse_center_array, validate_width_tuple
from yt.units.yt_array import YTArray, YTQuantity
from yt.utilities.exceptions import YTCoordinateNotImplemented, YTInvalidWidthError
import numpy.typing as npt


def _unknown_coord(data):
Expand Down Expand Up @@ -158,7 +159,7 @@ def pixelize(
periodic=True,
*,
return_mask: Literal[False],
) -> "np.ndarray[Any, np.dtype[np.float64]]": ...
) -> "npt.NDArray[np.float64]": ...

@overload
def pixelize(
Expand All @@ -173,7 +174,7 @@ def pixelize(
*,
return_mask: Literal[True],
) -> tuple[
"np.ndarray[Any, np.dtype[np.float64]]", "np.ndarray[Any, np.dtype[np.bool_]]"
"npt.NDArray[np.float64]", "npt.NDArray[np.bool_]"
]: ...

@abc.abstractmethod
Expand Down
7 changes: 4 additions & 3 deletions yt/geometry/geometry_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import weakref

import numpy as np
import numpy.typing as npt

from yt._maintenance.deprecation import issue_deprecation_warning
from yt.config import ytcfg
Expand Down Expand Up @@ -51,10 +52,10 @@ def _detect_output_fields(self):

def _icoords_to_fcoords(
self,
icoords: np.ndarray,
ires: np.ndarray,
icoords: npt.NDArray,
ires: npt.NDArray,
axes: tuple[int, ...] | None = None,
) -> tuple[np.ndarray, np.ndarray]:
) -> tuple[npt.NDArray, npt.NDArray]:
# What's the use of raising NotImplementedError for this, when it's an
# abstract base class? Well, only *some* of the subclasses have it --
# and for those that *don't*, we should not be calling it -- and since
Expand Down
7 changes: 4 additions & 3 deletions yt/geometry/grid_geometry_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from collections import defaultdict

import numpy as np
import numpy.typing as npt

from yt.arraytypes import blankRecordArray
from yt.config import ytcfg
Expand Down Expand Up @@ -447,10 +448,10 @@ def _chunk_io(

def _icoords_to_fcoords(
self,
icoords: np.ndarray,
ires: np.ndarray,
icoords: npt.NDArray,
ires: npt.NDArray,
axes: tuple[int, ...] | None = None,
) -> tuple[np.ndarray, np.ndarray]:
) -> tuple[npt.NDArray, npt.NDArray]:
"""
Accepts icoords and ires and returns appropriate fcoords and fwidth.
Mostly useful for cases where we have irregularly spaced or structured
Expand Down
7 changes: 4 additions & 3 deletions yt/geometry/oct_geometry_handler.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import numpy as np
import numpy.typing as npt

from yt.fields.field_detector import FieldDetector
from yt.geometry.geometry_handler import Index
Expand Down Expand Up @@ -119,10 +120,10 @@ def _mesh_sampling_particle_field(data):

def _icoords_to_fcoords(
self,
icoords: np.ndarray,
ires: np.ndarray,
icoords: npt.NDArray,
ires: npt.NDArray,
axes: tuple[int, ...] | None = None,
) -> tuple[np.ndarray, np.ndarray]:
) -> tuple[npt.NDArray, npt.NDArray]:
"""
Accepts icoords and ires and returns appropriate fcoords and fwidth.
Mostly useful for cases where we have irregularly spaced or structured
Expand Down
7 changes: 4 additions & 3 deletions yt/loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from urllib.parse import urlsplit

import numpy as np
import numpy.typing as nptype
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

let's use import numpy.typing as npt everywhere. You've got a mix of imports across files, please just stick to import numpy.typing as npt.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

npt was already in use as an alias at some files , so I thought nptype would be better choice to avoid any issues .

Also can you help me add label to the pr , I don't know how to add it.

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can you point out a spot where it's being used as an alias? I don't see it in this file or the other couple I just checked. npt is preferred. i'll add the label when I get to doing a full review.

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The alias nptype was chosen to avoid conflicts with numpy.testing, which is often imported as npt.

it's true that we use npt for both namespaces, however I don't think they are ever imported in the same module (we don't have type hints in tests, and we shouldn't be using numpy.typing anywhere else), so, even if the meaning of npt is context-dependent, I don't think it's actually ambiguous.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks @neutrinoceros
In testing.py , We are importing both numpy.typing and numpy.testing , otherwise everything looks good on using npt as alias.
Should I stick to npt alias for numpy.typing and revert type hints changes in 'testing.py' ?

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ah, you mean this section?

yt/yt/testing.py

Lines 1880 to 1897 in 2973f4d

def _deprecated_numpy_testing_reexport(func):
import numpy.testing as npt
npt_func = getattr(npt, func.__name__)
@wraps(npt_func)
def retf(*args, **kwargs):
__tracebackhide__ = True # Hide traceback for pytest
issue_deprecation_warning(
f"yt.testing.{func.__name__} is a pure re-export of "
f"numpy.testing.{func.__name__}, it will stop working in the future. "
"Please import this function directly from numpy instead.",
since="4.2",
stacklevel=3,
)
return npt_func(*args, **kwargs)
return retf

ya, that's an unfortunate re-definition of npt in that function... I'd say (1) keep the import numpy.typing as npt at the top of the file and revert your changes and (2) adjust that testing import for clarity:

    import numpy.testing as nptesting

    npt_func = getattr(nptesting, func.__name__)

Looks like there are a couple other minor spots where we do import numpy.testing as npt, but not in any of the files you're touching, so I'll submit a separate PR to fix those locations so we reserve npt for numpy.typing

Thanks! and FYI I'm hoping to take a detailed look this afternoon or tomorrow.

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

still need to update this import

Suggested change
import numpy.typing as nptype
import numpy.typing as npt

from more_itertools import always_iterable

from yt._maintenance.deprecation import (
Expand Down Expand Up @@ -687,7 +688,7 @@ def load_amr_grids(


def load_particles(
data: Mapping[AnyFieldKey, np.ndarray | tuple[np.ndarray, str]],
data: Mapping[AnyFieldKey, nptype.NDArray | tuple[nptype.NDArray, str]],
length_unit=None,
bbox=None,
sim_time=None,
Expand Down Expand Up @@ -826,7 +827,7 @@ def parse_unit(unit, dimension):
field_units, data, _ = process_data(data)
sfh = StreamDictFieldHandler()

pdata: dict[AnyFieldKey, np.ndarray | tuple[np.ndarray, str]] = {}
pdata: dict[AnyFieldKey, nptype.NDArray | tuple[nptype.NDArray, str]] = {}
for key in data.keys():
field: FieldKey
if not isinstance(key, tuple):
Expand Down Expand Up @@ -1816,7 +1817,7 @@ def load_hdf5_file(
fn: Union[str, "os.PathLike[str]"],
root_node: str | None = "/",
fields: list[str] | None = None,
bbox: np.ndarray | None = None,
bbox: nptype.NDArray | None = None,
nchunks: int = 0,
dataset_arguments: dict | None = None,
):
Expand Down
Loading
Loading