Skip to content
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -109,3 +109,4 @@ doc/source/quickstart/.ipynb_checkpoints/
dist
.python-version
answer_nosetests.xml
.venv/
13 changes: 7 additions & 6 deletions yt/_typing.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from typing import Any, Optional, TypeAlias
from typing import Optional, TypeAlias

import numpy as np
import numpy.typing as npt
import unyt as un

FieldDescT = tuple[str, tuple[str, list[str], str | None]]
Expand All @@ -12,12 +13,12 @@
FieldKey = tuple[FieldType, FieldName]
ImplicitFieldKey = FieldName
AnyFieldKey = FieldKey | ImplicitFieldKey
DomainDimensions = tuple[int, ...] | list[int] | np.ndarray
DomainDimensions = tuple[int, ...] | list[int] | npt.NDArray

ParticleCoordinateTuple = tuple[
str, # particle type
tuple[np.ndarray, np.ndarray, np.ndarray], # xyz
float | np.ndarray, # hsml
tuple[npt.NDArray, npt.NDArray, npt.NDArray], # xyz
float | npt.NDArray, # hsml
]

# Geometry specific types
Expand All @@ -33,5 +34,5 @@
# np.ndarray[...] syntax is runtime-valid from numpy 1.22, we quote it until our minimal
# runtime requirement is bumped to, or beyond this version

MaskT = Optional["np.ndarray[Any, np.dtype[np.bool_]]"]
AlphaT = Optional["np.ndarray[Any, np.dtype[np.float64]]"]
MaskT = Optional["npt.NDArray[np.bool_]"]
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We don't need Optional at all now that we dropped Python 3.9 (over a year ago). I don't understand why this wasn't auto-refactored already by ruff, but I want to find out.

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

gotcha: ruff is pulling back because of the stringed annotation within. Remove the quotes and we actually get Python 3.10 code. I think the original reason we used quotes there was backward compatibility with older versions of numpy, but we require 1.21+ now so numpy.typing.NDArray can always be assumed to be available

AlphaT = Optional["npt.NDArray[np.float64]"]
7 changes: 4 additions & 3 deletions yt/frontends/artio/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from collections import defaultdict

import numpy as np
import numpy.typing as npt

from yt.data_objects.field_data import YTFieldData
from yt.data_objects.index_subobjects.octree_subset import OctreeSubset
Expand Down Expand Up @@ -339,10 +340,10 @@ def _read_fluid_fields(self, fields, dobj, chunk=None):

def _icoords_to_fcoords(
self,
icoords: np.ndarray,
ires: np.ndarray,
icoords: npt.NDArray,
ires: npt.NDArray,
axes: tuple[int, ...] | None = None,
) -> tuple[np.ndarray, np.ndarray]:
) -> tuple[npt.NDArray, npt.NDArray]:
"""
Accepts icoords and ires and returns appropriate fcoords and fwidth.
Mostly useful for cases where we have irregularly spaced or structured
Expand Down
15 changes: 7 additions & 8 deletions yt/frontends/ramses/hilbert.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from typing import Any, Optional
from typing import Optional

import numpy as np
import numpy.typing as npt

from yt.data_objects.selection_objects.region import YTRegion
from yt.geometry.selection_routines import (
Expand Down Expand Up @@ -48,9 +49,7 @@
)


def hilbert3d(
ijk: "np.ndarray[Any, np.dtype[np.int64]]", bit_length: int
) -> "np.ndarray[Any, np.dtype[np.float64]]":
def hilbert3d(ijk: "npt.NDArray[np.int64]", bit_length: int) -> "npt.NDArray[np.int64]":
"""Compute the order using Hilbert indexing.

Arguments
Expand All @@ -70,11 +69,11 @@ def hilbert3d(
def get_intersecting_cpus(
ds,
region: YTRegion,
LE: Optional["np.ndarray[Any, np.dtype[np.float64]]"] = None,
LE: Optional["npt.NDArray[np.float64]"] = None,
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

oh! nothing to change here, but I just remembered: the Any in the original np.ndarray[Any, np.dtype[np.float64]] is a reference to the array shape. So the update here is equivalent as npt.NDArray[dtype] is a type alias to np.ndarray[tuple[Any,...], dtype]

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

dx: float = 1.0,
dx_cond: float | None = None,
factor: float = 4.0,
bound_keys: Optional["np.ndarray[Any, np.dtype[np.float64]]"] = None,
bound_keys: Optional["npt.NDArray[np.float64]"] = None,
) -> set[int]:
"""
Find the subset of CPUs that intersect the bbox in a recursive fashion.
Expand Down Expand Up @@ -119,8 +118,8 @@ def get_intersecting_cpus(

def get_cpu_list_cuboid(
ds,
X: "np.ndarray[Any, np.dtype[np.float64]]",
bound_keys: "np.ndarray[Any, np.dtype[np.float64]]",
X: "npt.NDArray[np.float64]",
bound_keys: "npt.NDArray[np.float64]",
) -> set[int]:
"""
Return the list of the CPU intersecting with the cuboid containing the positions.
Expand Down
11 changes: 6 additions & 5 deletions yt/frontends/ramses/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from typing import TYPE_CHECKING, Union

import numpy as np
import numpy.typing as npt
from unyt import unyt_array

from yt._maintenance.deprecation import issue_deprecation_warning
Expand Down Expand Up @@ -37,7 +38,7 @@ def convert_ramses_ages(ds, conformal_ages):


def convert_ramses_conformal_time_to_physical_time(
ds, conformal_time: np.ndarray
ds, conformal_time: npt.NDArray
) -> unyt_array:
"""
Convert conformal times (as defined in RAMSES) to physical times.
Expand Down Expand Up @@ -82,7 +83,7 @@ def _ramses_particle_binary_file_handler(
subset: "RAMSESDomainSubset",
fields: list[FieldKey],
count: int,
) -> dict[FieldKey, np.ndarray]:
) -> dict[FieldKey, npt.NDArray]:
"""General file handler for binary file, called by _read_particle_subset

Parameters
Expand All @@ -96,7 +97,7 @@ def _ramses_particle_binary_file_handler(
count: integer
The number of elements to count
"""
tr = {}
tr: dict[FieldKey, npt.NDArray] = {}
ds = subset.domain.ds
foffsets = particle_handler.field_offsets
fname = particle_handler.fname
Expand Down Expand Up @@ -130,7 +131,7 @@ def _ramses_particle_csv_file_handler(
subset: "RAMSESDomainSubset",
fields: list[FieldKey],
count: int,
) -> dict[FieldKey, np.ndarray]:
) -> dict[FieldKey, npt.NDArray]:
"""General file handler for csv file, called by _read_particle_subset

Parameters
Expand All @@ -146,7 +147,7 @@ def _ramses_particle_csv_file_handler(
"""
from yt.utilities.on_demand_imports import _pandas as pd

tr = {}
tr: dict[FieldKey, npt.NDArray] = {}
ds = subset.domain.ds
foffsets = particle_handler.field_offsets
fname = particle_handler.fname
Expand Down
11 changes: 6 additions & 5 deletions yt/frontends/ramses/particle_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from typing import TYPE_CHECKING, Any

import numpy as np
import numpy.typing as npt

from yt._typing import FieldKey
from yt.config import ytcfg
Expand Down Expand Up @@ -71,7 +72,7 @@ class ParticleFileHandler(abc.ABC, HandlerMixin):
# assumed to be `self`).
reader: Callable[
["RAMSESDomainSubset", list[FieldKey], int],
dict[FieldKey, np.ndarray],
dict[FieldKey, npt.NDArray],
]

# Name of the config section (if any)
Expand Down Expand Up @@ -162,7 +163,7 @@ def header(self) -> dict[str, Any]:
self.read_header()
return self._header

def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, np.ndarray]):
def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, npt.NDArray]):
"""
This function allows custom code to be called to handle special cases,
such as the particle birth time.
Expand All @@ -173,7 +174,7 @@ def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, np.ndarray]):
----------
field : FieldKey
The field name.
data_dict : dict[FieldKey, np.ndarray]
data_dict : dict[FieldKey, npt.NDArray]
A dictionary containing the data.

By default, this function does nothing.
Expand Down Expand Up @@ -346,7 +347,7 @@ def birth_file_fname(self):
def has_birth_file(self):
return os.path.exists(self.birth_file_fname)

def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, np.ndarray]):
def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, npt.NDArray]):
_ptype, fname = field
if not (fname == "particle_birth_time" and self.ds.cosmological_simulation):
return
Expand Down Expand Up @@ -492,7 +493,7 @@ def read_header(self):
self._field_offsets = field_offsets
self._field_types = _pfields

def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, np.ndarray]):
def handle_field(self, field: FieldKey, data_dict: dict[FieldKey, npt.NDArray]):
_ptype, fname = field
if not (fname == "particle_birth_time" and self.ds.cosmological_simulation):
return
Expand Down
9 changes: 4 additions & 5 deletions yt/frontends/rockstar/data_structures.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import glob
import os
from functools import cached_property
from typing import Any, Optional
from typing import Optional

import numpy as np
import numpy.typing as npt

from yt.data_objects.static_output import ParticleDataset
from yt.frontends.halo_catalog.data_structures import HaloCatalogFile
Expand All @@ -21,7 +22,7 @@ class RockstarBinaryFile(HaloCatalogFile):
header: dict
_position_offset: int
_member_offset: int
_Npart: "np.ndarray[Any, np.dtype[np.int64]]"
_Npart: "npt.NDArray[np.int64]"
_ids_halos: list[int]
_file_size: int

Expand All @@ -47,9 +48,7 @@ def __init__(self, ds, io, filename, file_id, range):

super().__init__(ds, io, filename, file_id, range)

def _read_member(
self, ihalo: int
) -> Optional["np.ndarray[Any, np.dtype[np.int64]]"]:
def _read_member(self, ihalo: int) -> Optional["npt.NDArray[np.int64]"]:
if ihalo not in self._ids_halos:
return None

Expand Down
5 changes: 3 additions & 2 deletions yt/frontends/stream/misc.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import numpy as np
import numpy.typing as npt

from yt._typing import DomainDimensions


def _validate_cell_widths(
cell_widths: list[np.ndarray],
cell_widths: list[npt.NDArray],
domain_dimensions: DomainDimensions,
) -> list[np.ndarray]:
) -> list[npt.NDArray]:
# check dimensionality
if (nwids := len(cell_widths)) != (ndims := len(domain_dimensions)):
raise ValueError(
Expand Down
9 changes: 4 additions & 5 deletions yt/geometry/coordinates/coordinate_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@
import weakref
from functools import cached_property
from numbers import Number
from typing import Any, Literal, overload
from typing import Literal, overload

import numpy as np
import numpy.typing as npt

from yt._typing import AxisOrder
from yt.funcs import fix_unitary, is_sequence, parse_center_array, validate_width_tuple
Expand Down Expand Up @@ -158,7 +159,7 @@ def pixelize(
periodic=True,
*,
return_mask: Literal[False],
) -> "np.ndarray[Any, np.dtype[np.float64]]": ...
) -> "npt.NDArray[np.float64]": ...

@overload
def pixelize(
Expand All @@ -172,9 +173,7 @@ def pixelize(
periodic=True,
*,
return_mask: Literal[True],
) -> tuple[
"np.ndarray[Any, np.dtype[np.float64]]", "np.ndarray[Any, np.dtype[np.bool_]]"
]: ...
) -> tuple["npt.NDArray[np.float64]", "npt.NDArray[np.bool_]"]: ...

@abc.abstractmethod
def pixelize(
Expand Down
7 changes: 4 additions & 3 deletions yt/geometry/geometry_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import weakref

import numpy as np
import numpy.typing as npt

from yt._maintenance.deprecation import issue_deprecation_warning
from yt.config import ytcfg
Expand Down Expand Up @@ -51,10 +52,10 @@ def _detect_output_fields(self):

def _icoords_to_fcoords(
self,
icoords: np.ndarray,
ires: np.ndarray,
icoords: npt.NDArray,
ires: npt.NDArray,
axes: tuple[int, ...] | None = None,
) -> tuple[np.ndarray, np.ndarray]:
) -> tuple[npt.NDArray, npt.NDArray]:
# What's the use of raising NotImplementedError for this, when it's an
# abstract base class? Well, only *some* of the subclasses have it --
# and for those that *don't*, we should not be calling it -- and since
Expand Down
7 changes: 4 additions & 3 deletions yt/geometry/grid_geometry_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from collections import defaultdict

import numpy as np
import numpy.typing as npt

from yt.arraytypes import blankRecordArray
from yt.config import ytcfg
Expand Down Expand Up @@ -447,10 +448,10 @@ def _chunk_io(

def _icoords_to_fcoords(
self,
icoords: np.ndarray,
ires: np.ndarray,
icoords: npt.NDArray,
ires: npt.NDArray,
axes: tuple[int, ...] | None = None,
) -> tuple[np.ndarray, np.ndarray]:
) -> tuple[npt.NDArray, npt.NDArray]:
"""
Accepts icoords and ires and returns appropriate fcoords and fwidth.
Mostly useful for cases where we have irregularly spaced or structured
Expand Down
7 changes: 4 additions & 3 deletions yt/geometry/oct_geometry_handler.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import numpy as np
import numpy.typing as npt

from yt.fields.field_detector import FieldDetector
from yt.geometry.geometry_handler import Index
Expand Down Expand Up @@ -119,10 +120,10 @@ def _mesh_sampling_particle_field(data):

def _icoords_to_fcoords(
self,
icoords: np.ndarray,
ires: np.ndarray,
icoords: npt.NDArray,
ires: npt.NDArray,
axes: tuple[int, ...] | None = None,
) -> tuple[np.ndarray, np.ndarray]:
) -> tuple[npt.NDArray, npt.NDArray]:
"""
Accepts icoords and ires and returns appropriate fcoords and fwidth.
Mostly useful for cases where we have irregularly spaced or structured
Expand Down
7 changes: 4 additions & 3 deletions yt/loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from urllib.parse import urlsplit

import numpy as np
import numpy.typing as npt
from more_itertools import always_iterable

from yt._maintenance.deprecation import (
Expand Down Expand Up @@ -687,7 +688,7 @@ def load_amr_grids(


def load_particles(
data: Mapping[AnyFieldKey, np.ndarray | tuple[np.ndarray, str]],
data: Mapping[AnyFieldKey, npt.NDArray | tuple[npt.NDArray, str]],
length_unit=None,
bbox=None,
sim_time=None,
Expand Down Expand Up @@ -826,7 +827,7 @@ def parse_unit(unit, dimension):
field_units, data, _ = process_data(data)
sfh = StreamDictFieldHandler()

pdata: dict[AnyFieldKey, np.ndarray | tuple[np.ndarray, str]] = {}
pdata: dict[AnyFieldKey, npt.NDArray | tuple[npt.NDArray, str]] = {}
for key in data.keys():
field: FieldKey
if not isinstance(key, tuple):
Expand Down Expand Up @@ -1818,7 +1819,7 @@ def load_hdf5_file(
fn: Union[str, "os.PathLike[str]"],
root_node: str | None = "/",
fields: list[str] | None = None,
bbox: np.ndarray | None = None,
bbox: npt.NDArray | None = None,
nchunks: int = 0,
dataset_arguments: dict | None = None,
):
Expand Down
Loading