diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml index 6193564b7..6abb296e8 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/examples.yml @@ -21,7 +21,7 @@ jobs: submodules: true - uses: actions/setup-python@v6 with: - python-version: "3.9" + python-version: "3.10" - uses: astral-sh/setup-uv@v7 - name: Build and install kernel run: uv run --group examples -m ipykernel install --user --name boost-hist diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e06ae6423..ce2ab2f9a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -59,7 +59,7 @@ jobs: fail-fast: false matrix: include: - - python-version: "3.9" + - python-version: "3.10" cmake-extras: "-DCMAKE_CXX_STANDARD=17" - python-version: "3.11" - python-version: "3.13t" @@ -111,7 +111,7 @@ jobs: - os: ubuntu-24.04-arm only: cp313-manylinux_aarch64 - os: windows-latest - only: cp39-win32 + only: cp310-win32 - os: windows-latest only: cp313-win_amd64 - os: macos-13 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c7035182b..47cbae997 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,7 +47,7 @@ repos: hooks: - id: mypy files: ^src - additional_dependencies: [numpy~=2.3.0, pytest, uhi] + additional_dependencies: [numpy~=2.2.0, pytest, uhi] - repo: https://github.com/codespell-project/codespell rev: v2.4.1 diff --git a/README.md b/README.md index 8a4de3fa8..910a50cb8 100644 --- a/README.md +++ b/README.md @@ -179,11 +179,12 @@ python3 -m pip install boost-histogram ``` All the normal best-practices for Python apply; Pip should not be very old (Pip -9 is very old), you should be in a virtual environment, etc. Python 3.9+ is +9 is very old), you should be in a virtual environment, etc. Python 3.10+ is required; for older versions of Python (3.5 and 2.7), `0.13` will be installed instead, which is API equivalent to 1.0, but will not be gaining new features. 1.3.x was the last series to support Python 3.6. 1.4.x was the last series to -support Python 3.7. 1.5.x was the last series to support Python 3.8. +support Python 3.7. 1.5.x was the last series to support Python 3.8. 1.6.x was +the last to support Python 3.9. #### Binaries available: @@ -192,17 +193,18 @@ when you run the above command on a supported platform. Wheels are produced usin [cibuildwheel](https://cibuildwheel.readthedocs.io/en/stable/); all common platforms have wheels provided in boost-histogram: -| System | Arch | Python versions | PyPy versions | -| ------------- | ------ | ---------------------------------- | ------------- | -| manylinux2014 | 64-bit | 3.9, 3.10, 3.11, 3.12, 3.13, 3.13t | 3.9, 3.10 | -| manylinux2014 | ARM64 | 3.9, 3.10, 3.11, 3.12, 3.13, 3.13t | 3.9, 3.10 | -| musllinux_1_1 | 64-bit | 3.9, 3.10, 3.11, 3.12, 3.13, 3.13t | | -| macOS | 64-bit | 3.9, 3.10, 3.11, 3.12, 3.13, 3.13t | 3.9, 3.10 | -| macOS | Arm64 | 3.9, 3.10, 3.11, 3.12, 3.13, 3.13t | 3.9, 3.10 | -| Windows | 32-bit | 3.9, 3.10, 3.11, 3.12, 3.13, 3.13t | | -| Windows | 64-bit | 3.9, 3.10, 3.11, 3.12, 3.13, 3.13t | 3.9, 3.10 | - -PowerPC or IBM-Z wheels are not provided but are available on request. +| System | Arch | Python versions | PyPy versions | +| --------- | ------ | ------------------------------------------ | ------------- | +| manylinux | 64-bit | 3.10, 3.11, 3.12, 3.13, 3.13t, 3.14, 3.14t | 3.11 | +| manylinux | ARM64 | 3.10, 3.11, 3.12, 3.13, 3.13t, 3.14, 3.14t | 3.11 | +| musllinux | 64-bit | 3.10, 3.11, 3.12, 3.13, 3.13t, 3.14, 3.14t | | +| macOS | 64-bit | 3.10, 3.11, 3.12, 3.13, 3.13t, 3.14, 3.14t | 3.11 | +| macOS | Arm64 | 3.10, 3.11, 3.12, 3.13, 3.13t, 3.14, 3.14t | 3.11 | +| Windows | 32-bit | 3.10, 3.11, 3.12, 3.13, 3.13t, 3.14, 3.14t | | +| Windows | 64-bit | 3.10, 3.11, 3.12, 3.13, 3.13t, 3.14, 3.14t | 3.11 | +| Windows | ARM64 | 3.11, 3.12, 3.13, 3.13t, 3.14, 3.14t | | + +PowerPC, IBM-Z, and RISC-V wheels are not provided but are available on request. If you are on a Linux system that is not part of the "many" in manylinux or musl in musllinux, such as ClearLinux, building from source is usually fine, since the compilers on those systems are often quite new. It will just take longer to install when it is using the sdist instead of a wheel. All dependencies are header-only and included. diff --git a/examples/README.md b/examples/README.md index 2459d6634..dde1e30c6 100644 --- a/examples/README.md +++ b/examples/README.md @@ -1,6 +1,6 @@ # Boost Histogram examples -The examples require Python 3.9. It is left as an exercise for the reader to +The examples require Python 3.10. It is left as an exercise for the reader to convert back to older versions if they so desire. ### Setup diff --git a/notebooks/xarray.ipynb b/notebooks/xarray.ipynb index 4e01da306..e99734438 100644 --- a/notebooks/xarray.ipynb +++ b/notebooks/xarray.ipynb @@ -178,7 +178,7 @@ " name=\"_\".join(a.name for a in args) + \"_histogram\",\n", " coords=[\n", " (f\"{a.name}_bin\", arr.flatten(), a.attrs)\n", - " for a, arr in zip(args, h.axes.centers)\n", + " for a, arr in zip(args, h.axes.centers, strict=False)\n", " ],\n", " )" ] @@ -270,7 +270,7 @@ " name=\"_\".join(a.name for a in args) + \"_histogram\",\n", " coords=[\n", " (f\"{a.name}_bin\", arr.flatten(), a.attrs)\n", - " for a, arr in zip(args, h.axes.centers)\n", + " for a, arr in zip(args, h.axes.centers, strict=False)\n", " ],\n", " )" ] diff --git a/pyproject.toml b/pyproject.toml index 913721ba2..7f5645415 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ name = "boost-histogram" dynamic = ["version"] description = "The Boost::Histogram Python wrapper." readme = "README.md" -requires-python = ">=3.9" +requires-python = ">=3.10" authors = [ { name = "Hans Dembinski", email = "hans.dembinski@gmail.com" }, { name = "Henry Schreiner", email = "hschrein@cern.ch" }, @@ -37,7 +37,6 @@ classifiers = [ "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.14", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Free Threading :: 3 - Stable", "Programming Language :: Python :: Free Threading", "Programming Language :: Python :: Implementation :: CPython", @@ -158,7 +157,7 @@ required_plugins = ["pytest-benchmark"] log_cli_level = "DEBUG" [tool.mypy] -python_version = "3.9" +python_version = "3.10" files = ["src"] strict = true enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] @@ -181,8 +180,8 @@ test-command = "python -m pytest -n auto --benchmark-disable tests" test-sources = ["pyproject.toml", "tests"] test-environment.CI = "1" # Hypothosis needs this on GraalPy test-skip = [ - "cp3{9,10}-win_arm64", - "cp3{9,10}-musllinux_*", + "cp310-win_arm64", + "cp310-musllinux_*", "pp310-manylinux_aarch64", "pp310-macosx_arm64", "cp31*-musllinux_*", # Threading test crashes @@ -228,7 +227,7 @@ environment.UV_INDEX_STRATEGY = "unsafe-best-match" test-command = "pytest --benchmark-disable tests" [tool.pylint] -py-version = "3.9" +py-version = "3.10" ignore-patterns = ['.*\.pyi'] ignore = "version.py" extension-pkg-allow-list = ["boost_histogram._core"] diff --git a/src/boost_histogram/_core/axis/transform.pyi b/src/boost_histogram/_core/axis/transform.pyi index 9cfb372df..4e514affa 100644 --- a/src/boost_histogram/_core/axis/transform.pyi +++ b/src/boost_histogram/_core/axis/transform.pyi @@ -1,4 +1,5 @@ -from typing import Any, Callable +from collections.abc import Callable +from typing import Any from typing_extensions import Self diff --git a/src/boost_histogram/_utils.py b/src/boost_histogram/_utils.py index 57124996e..f535eb25d 100644 --- a/src/boost_histogram/_utils.py +++ b/src/boost_histogram/_utils.py @@ -1,10 +1,8 @@ from __future__ import annotations -import itertools -import sys import typing -from collections.abc import Iterator -from typing import Any, Callable, ClassVar, Protocol, TypeVar +from collections.abc import Callable, Iterator +from typing import ClassVar, Protocol, TypeVar import boost_histogram @@ -165,16 +163,3 @@ def _walk_subclasses(cls: type[object]) -> Iterator[type[object]]: # user subclasses to work yield from _walk_subclasses(base) yield base - - -def zip_strict(*args: Any) -> Iterator[tuple[Any, ...]]: - if sys.version_info >= (3, 10): - yield from zip(*args, strict=True) - return - - marker = object() - for each in itertools.zip_longest(*args, fillvalue=marker): - for val in each: - if val is marker: - raise ValueError("zip() arguments are not the same length") - yield each diff --git a/src/boost_histogram/axis/__init__.py b/src/boost_histogram/axis/__init__.py index e944975a1..a49c32e0b 100644 --- a/src/boost_histogram/axis/__init__.py +++ b/src/boost_histogram/axis/__init__.py @@ -1,17 +1,15 @@ from __future__ import annotations import copy -from collections.abc import Iterable, Iterator +from collections.abc import Callable, Iterable, Iterator from dataclasses import dataclass from functools import partial from typing import ( Any, - Callable, ClassVar, Literal, TypedDict, TypeVar, - Union, ) import numpy as np # pylint: disable=unused-import @@ -20,7 +18,7 @@ from .._compat.typing import Self from .._core import axis as ca -from .._utils import cast, register, zip_strict +from .._utils import cast, register from . import transform from .transform import AxisTransform @@ -59,7 +57,7 @@ def _opts(**kwargs: bool) -> set[str]: return {k for k, v in kwargs.items() if v} -AxCallOrInt = Union[int, Callable[["Axis"], int]] +AxCallOrInt = int | Callable[["Axis"], int] @dataclass(order=True, frozen=True) @@ -287,21 +285,21 @@ def __getitem__(self, i: AxCallOrInt) -> int | str | tuple[float, float]: @property def edges(self) -> np.typing.NDArray[Any]: - return self._ax.edges + return self._ax.edges # type: ignore[no-any-return] @property def centers(self) -> np.typing.NDArray[Any]: """ An array of bin centers. """ - return self._ax.centers + return self._ax.centers # type: ignore[no-any-return] @property def widths(self) -> np.typing.NDArray[Any]: """ An array of bin widths. """ - return self._ax.widths + return self._ax.widths # type: ignore[no-any-return] # Contains all common methods and properties for Regular axes @@ -910,7 +908,7 @@ def __setattr__(self, attr: str, values: Any) -> None: try: super().__setattr__(attr, values) except AttributeError: - for s, v in zip_strict(self, values): + for s, v in zip(self, values, strict=True): s.__setattr__(attr, v) value.__doc__ = Axis.value.__doc__ diff --git a/src/boost_histogram/histogram.py b/src/boost_histogram/histogram.py index 97c9b731a..8f8156f05 100644 --- a/src/boost_histogram/histogram.py +++ b/src/boost_histogram/histogram.py @@ -2,23 +2,23 @@ import collections.abc import copy +import enum import logging import sys import threading import typing import warnings -from collections.abc import Iterable, Mapping -from enum import Enum +from collections.abc import Callable, Iterable, Mapping from os import cpu_count +from types import EllipsisType from typing import ( TYPE_CHECKING, Any, - Callable, ClassVar, NewType, SupportsIndex, + TypeAlias, TypeVar, - Union, ) import numpy as np @@ -35,7 +35,7 @@ from .view import MeanView, WeightedMeanView, WeightedSumView, _to_view if TYPE_CHECKING: - from builtins import ellipsis + pass try: @@ -55,8 +55,8 @@ raise new_exception from err -# This is a StrEnum as defined in Python 3.10 -class Kind(str, Enum): +# This is a StrEnum as defined in Python 3.11 +class Kind(str, enum.Enum): COUNT = "COUNT" MEAN = "MEAN" @@ -102,11 +102,13 @@ def __dir__() -> list[str]: CppAxis = NewType("CppAxis", object) -SimpleIndexing = Union[SupportsIndex, slice, RebinProtocol] -InnerIndexing = Union[SimpleIndexing, Callable[[Axis], int]] -FullInnerIndexing = Union[InnerIndexing, list[InnerIndexing]] -IndexingWithMapping = Union[FullInnerIndexing, Mapping[int, FullInnerIndexing]] -IndexingExpr = Union[IndexingWithMapping, tuple[IndexingWithMapping, ...], "ellipsis"] +SimpleIndexing: TypeAlias = SupportsIndex | slice | RebinProtocol +InnerIndexing: TypeAlias = SimpleIndexing | Callable[[Axis], int] +FullInnerIndexing: TypeAlias = InnerIndexing | list[InnerIndexing] +IndexingWithMapping: TypeAlias = FullInnerIndexing | Mapping[int, FullInnerIndexing] +IndexingExpr: TypeAlias = ( + IndexingWithMapping | tuple[IndexingWithMapping, ...] | EllipsisType +) T = TypeVar("T") @@ -122,7 +124,7 @@ def _fill_cast( return value if not inner and isinstance(value, (tuple, list)): - return tuple(_fill_cast(a, inner=True) for a in value) + return tuple(_fill_cast(a, inner=True) for a in value) # type: ignore[misc] if hasattr(value, "__iter__") or hasattr(value, "__array__"): return np.asarray(value) @@ -185,9 +187,9 @@ def _combine_group_contents( pos = [slice(None)] * (i) if new_view.dtype.names: for field in new_view.dtype.names: - new_view[(*pos, jj, ...)][field] += reduced_view[(*pos, j, ...)][field] + new_view[(*pos, jj, ...)][field] += reduced_view[(*pos, j, ...)][field] # type: ignore[arg-type] else: - new_view[(*pos, jj, ...)] += reduced_view[(*pos, j, ...)] + new_view[(*pos, jj, ...)] += reduced_view[(*pos, j, ...)] # type: ignore[arg-type] H = TypeVar("H", bound="Histogram") @@ -497,7 +499,7 @@ def __array__( kwargs = {} if copy is not None: kwargs["copy"] = copy - return np.asarray(self.view(False), dtype=dtype, **kwargs) # type: ignore[call-overload] + return np.asarray(self.view(False), dtype=dtype, **kwargs) # type: ignore[call-overload, no-any-return] __hash__ = None # type: ignore[assignment] @@ -577,10 +579,12 @@ def _compute_inplace_op( msg = f"Number of dimensions {len(other.shape)} must match histogram {self.ndim}" raise ValueError(msg) - if all(a in {b, 1} for a, b in zip(other.shape, self.shape)): + if all(a in {b, 1} for a, b in zip(other.shape, self.shape, strict=False)): view = self.view(flow=False) getattr(view, name)(other) - elif all(a in {b, 1} for a, b in zip(other.shape, self.axes.extent)): + elif all( + a in {b, 1} for a, b in zip(other.shape, self.axes.extent, strict=False) + ): view = self.view(flow=True) getattr(view, name)(other) else: @@ -641,7 +645,7 @@ def fill( threads = cpu_count() if threads is None or threads == 1: - self._hist.fill(*args_ars, weight=weight_ars, sample=sample_ars) + self._hist.fill(*args_ars, weight=weight_ars, sample=sample_ars) # type: ignore[arg-type] return self if self._hist._storage_type in { @@ -651,7 +655,7 @@ def fill( raise RuntimeError("Mean histograms do not support threaded filling") data: list[list[np.typing.NDArray[Any]] | list[str]] = [ - np.array_split(a, threads) if not isinstance(a, str) else [a] * threads + np.array_split(a, threads) if not isinstance(a, str) else [a] * threads # type: ignore[arg-type, list-item] for a in args_ars ] @@ -660,14 +664,14 @@ def fill( assert threads is not None weights = [weight_ars] * threads else: - weights = np.array_split(weight_ars, threads) + weights = np.array_split(weight_ars, threads) # type: ignore[arg-type] samples: list[Any] if sample_ars is None or np.isscalar(sample_ars): assert threads is not None samples = [sample_ars] * threads else: - samples = np.array_split(sample_ars, threads) + samples = np.array_split(sample_ars, threads) # type: ignore[arg-type] if self._hist._storage_type is _core.storage.atomic_int64: @@ -694,7 +698,7 @@ def fun( thread_list = [ threading.Thread(target=fun, args=arrays) - for arrays in zip(weights, samples, *data) + for arrays in zip(weights, samples, *data, strict=False) ] for thread in thread_list: @@ -902,7 +906,7 @@ def to_numpy( hist, *edges = self._hist.to_numpy(flow) hist = self.view(flow=flow) if view else self.values(flow=flow) - return (hist, edges) if dd else (hist, *edges) + return (hist, edges) if dd else (hist, *edges) # type: ignore[return-value] def copy(self, *, deep: bool = True) -> Self: """ @@ -1185,7 +1189,7 @@ def __setitem__(self, index: IndexingExpr, value: ArrayLike | Accumulator) -> No if ( in_array.ndim > 0 and len(view.dtype) > 0 - and len(in_array.dtype) == 0 + and len(in_array.dtype) == 0 # type: ignore[arg-type] and len(view.dtype) == in_array.shape[-1] ): value_shape = in_array.shape[:-1] @@ -1318,8 +1322,8 @@ def values(self, flow: bool = False) -> np.typing.NDArray[Any]: view: Any = self.view(flow) # TODO: Might be a NumPy typing bug if len(view.dtype) == 0: - return view - return view.value + return view # type: ignore[no-any-return] + return view.value # type: ignore[no-any-return] def variances(self, flow: bool = False) -> np.typing.NDArray[Any] | None: """ @@ -1351,7 +1355,7 @@ def variances(self, flow: bool = False) -> np.typing.NDArray[Any] | None: if hasattr(view, "sum_of_weights"): valid = view.sum_of_weights**2 > view.sum_of_weights_squared - return np.divide( + return np.divide( # type: ignore[no-any-return] view.variance, view.sum_of_weights, out=np.full(view.sum_of_weights.shape, np.nan), @@ -1359,14 +1363,14 @@ def variances(self, flow: bool = False) -> np.typing.NDArray[Any] | None: ) if hasattr(view, "count"): - return np.divide( + return np.divide( # type: ignore[no-any-return] view.variance, view.count, out=np.full(view.count.shape, np.nan), where=view.count > 1, ) - return view.variance + return view.variance # type: ignore[no-any-return] def counts(self, flow: bool = False) -> np.typing.NDArray[Any]: """ @@ -1394,10 +1398,10 @@ def counts(self, flow: bool = False) -> np.typing.NDArray[Any]: view: Any = self.view(flow) if len(view.dtype) == 0: - return view + return view # type: ignore[no-any-return] if hasattr(view, "sum_of_weights"): - return np.divide( + return np.divide( # type: ignore[no-any-return] view.sum_of_weights**2, view.sum_of_weights_squared, out=np.zeros_like(view.sum_of_weights, dtype=np.float64), @@ -1405,9 +1409,9 @@ def counts(self, flow: bool = False) -> np.typing.NDArray[Any]: ) if hasattr(view, "count"): - return view.count + return view.count # type: ignore[no-any-return] - return view.value + return view.value # type: ignore[no-any-return] if TYPE_CHECKING: diff --git a/src/boost_histogram/numpy.py b/src/boost_histogram/numpy.py index 1a47516c1..78073f8b3 100644 --- a/src/boost_histogram/numpy.py +++ b/src/boost_histogram/numpy.py @@ -67,7 +67,7 @@ def histogramdd( range = (None,) * rank axs: list[_axis.Axis] = [] - for n, (b, r) in enumerate(zip(bins, range)): + for n, (b, r) in enumerate(zip(bins, range, strict=False)): if np.issubdtype(type(b), np.integer): if r is None: # Nextafter may affect bin edges slightly @@ -160,7 +160,7 @@ def histogram( # I think it's safe and the union is in the wrong place result = histogramdd( (a,), - (bins,), + (bins,), # type: ignore[arg-type] (range,), normed, weights, @@ -181,6 +181,7 @@ def histogram( for f, np_f in zip( (histogram, histogram2d, histogramdd), (np.histogram, np.histogram2d, np.histogramdd), + strict=False, ): H = """\ Return a boost-histogram object using the same arguments as numpy's {}. diff --git a/src/boost_histogram/serialization/_storage.py b/src/boost_histogram/serialization/_storage.py index 99c464061..f484184ca 100644 --- a/src/boost_histogram/serialization/_storage.py +++ b/src/boost_histogram/serialization/_storage.py @@ -106,7 +106,7 @@ def _data_from_dict(data: dict[str, Any], /) -> np.typing.NDArray[Any]: storage_type = data["type"] if storage_type in {"int", "double"}: - return data["values"] + return data["values"] # type: ignore[no-any-return] if storage_type == "weighted": return np.stack([data["values"], data["variances"]], axis=-1) if storage_type == "mean": diff --git a/src/boost_histogram/tag.py b/src/boost_histogram/tag.py index 2704b15e4..61eb1a716 100644 --- a/src/boost_histogram/tag.py +++ b/src/boost_histogram/tag.py @@ -190,7 +190,7 @@ def group_mapping(self, axis: PlottableAxis) -> Sequence[int]: matched_ixes = [np.abs(axis.edges - edge).argmin() for edge in newedges] missing_edges = [ edge - for ix, edge in zip(matched_ixes, newedges) + for ix, edge in zip(matched_ixes, newedges, strict=False) if not np.isclose(axis.edges[ix], edge) ] if missing_edges: diff --git a/src/boost_histogram/typing.py b/src/boost_histogram/typing.py index cbde5849a..e7f065fa5 100644 --- a/src/boost_histogram/typing.py +++ b/src/boost_histogram/typing.py @@ -1,9 +1,9 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Protocol, Union +from types import EllipsisType +from typing import TYPE_CHECKING, Any, Protocol, TypeAlias if TYPE_CHECKING: - from builtins import ellipsis from collections.abc import Sequence from numpy import ufunc as Ufunc @@ -13,7 +13,7 @@ from boost_histogram._core.accumulators import Mean, WeightedMean, WeightedSum from boost_histogram._core.hist import _BaseHistogram as CppHistogram - Accumulator = Union[WeightedSum, Mean, WeightedMean] + Accumulator = WeightedSum | Mean | WeightedMean else: ArrayLike = Any Ufunc = Any @@ -38,10 +38,12 @@ def index(self, value: float | str) -> int: ... def __len__(self) -> int: ... -StdIndex = Union[int, slice, "ellipsis", tuple[Union[slice, int, "ellipsis"], ...]] -StrIndex = Union[ - int, slice, str, "ellipsis", tuple[Union[slice, int, str, "ellipsis"], ...] -] +StdIndex: TypeAlias = ( + int | slice | EllipsisType | tuple[slice | int | EllipsisType, ...] +) +StrIndex: TypeAlias = ( + int | slice | str | EllipsisType | tuple[slice | int | str | EllipsisType, ...] +) class RebinProtocol(Protocol): diff --git a/src/boost_histogram/view.py b/src/boost_histogram/view.py index 6bb25d514..18bf40b54 100644 --- a/src/boost_histogram/view.py +++ b/src/boost_histogram/view.py @@ -1,7 +1,7 @@ from __future__ import annotations -from collections.abc import Mapping, MutableMapping -from typing import Any, Callable, ClassVar, Literal +from collections.abc import Callable, Mapping, MutableMapping +from typing import Any, ClassVar, Literal import numpy as np @@ -11,17 +11,17 @@ UFMethod = Literal["__call__", "reduce", "reduceat", "accumulate", "outer", "at"] -class View(np.ndarray): +class View(np.ndarray[Any, Any]): __slots__ = () _FIELDS: ClassVar[tuple[str, ...]] _PARENT: type[WeightedSum | WeightedMean | Mean] def __getitem__(self, ind: StrIndex) -> np.typing.NDArray[Any]: # type: ignore[override] - sliced = super().__getitem__(ind) + sliced = super().__getitem__(ind) # type: ignore[index] # If the shape is empty, return the parent type if not sliced.shape: - return self._PARENT._make(*sliced) + return self._PARENT._make(*sliced) # type: ignore[return-value] # If the dtype has changed, return a normal array (no longer a record) if sliced.dtype != self.dtype: @@ -45,18 +45,18 @@ def __setitem__(self, ind: StrIndex, value: ArrayLike) -> None: # type: ignore[ super().__setitem__(ind, value) return - current_ndim = super().__getitem__(ind).ndim + current_ndim = super().__getitem__(ind).ndim # type: ignore[index] array: np.typing.NDArray[Any] = np.asarray(value) msg = "Needs matching ndarray or n+1 dim array" if array.ndim == current_ndim + 1: if len(self._FIELDS) == array.shape[-1]: - self.__setitem__(ind, self._PARENT._array(*np.moveaxis(array, -1, 0))) + self.__setitem__(ind, self._PARENT._array(*np.moveaxis(array, -1, 0))) # type: ignore[assignment] return msg += f", final dimension should be {len(self._FIELDS)} for this storage, got {array.shape[-1]} instead" raise ValueError(msg) if self.dtype == array.dtype: - super().__setitem__(ind, array) + super().__setitem__(ind, array) # type: ignore[index] return msg += f", {current_ndim}D {self.dtype} or {current_ndim + 1}D required, got {array.ndim}D {array.dtype}" @@ -132,7 +132,7 @@ def __array_ufunc__( ufunc(raw_inputs[0]["value"], out=result["value"], **kwargs) result["variance"] = raw_inputs[0]["variance"] - return result.view(self.__class__) + return result.view(self.__class__) # type: ignore[no-any-return] if method == "__call__" and len(raw_inputs) == 2: (result,) = ( @@ -156,11 +156,11 @@ def __array_ufunc__( out=result["variance"], **kwargs, ) - return result.view(self.__class__) + return result.view(self.__class__) # type: ignore[no-any-return] # If unsupported, just pass through (will return not implemented) # pylint: disable-next=no-member - return super().__array_ufunc__(ufunc, method, *raw_inputs, **kwargs) + return super().__array_ufunc__(ufunc, method, *raw_inputs, **kwargs) # type: ignore[no-any-return] # View with normal value or array if ufunc in {np.add, np.subtract}: @@ -190,7 +190,7 @@ def __array_ufunc__( out=result["variance"], **kwargs, ) - return result.view(self.__class__) + return result.view(self.__class__) # type: ignore[no-any-return] if ufunc in {np.multiply, np.divide, np.true_divide, np.floor_divide}: if self.dtype == raw_inputs[0].dtype: @@ -220,12 +220,12 @@ def __array_ufunc__( **kwargs, ) - return result.view(self.__class__) + return result.view(self.__class__) # type: ignore[no-any-return] # ufuncs that are allowed to reduce if ufunc in {np.add} and method == "reduce" and len(raw_inputs) == 1: results = (ufunc.reduce(self[field], **kwargs) for field in self._FIELDS) - return self._PARENT._make(*results) + return self._PARENT._make(*results) # type: ignore[return-value] # ufuncs that are allowed to accumulate if ufunc in {np.add} and method == "accumulate" and len(raw_inputs) == 1: @@ -236,11 +236,11 @@ def __array_ufunc__( ) for field in self._FIELDS: ufunc.accumulate(self[field], out=result[field], **kwargs) - return result.view(self.__class__) + return result.view(self.__class__) # type: ignore[no-any-return] # If unsupported, just pass through (will return NotImplemented or things like == will work but not return subclasses) # pylint: disable-next=no-member - return super().__array_ufunc__(ufunc, method, *raw_inputs, **kwargs) + return super().__array_ufunc__(ufunc, method, *raw_inputs, **kwargs) # type: ignore[no-any-return] @fields( @@ -261,7 +261,7 @@ class WeightedMeanView(View): @property def variance(self) -> np.typing.NDArray[Any]: with np.errstate(divide="ignore", invalid="ignore"): - return self["_sum_of_weighted_deltas_squared"] / ( + return self["_sum_of_weighted_deltas_squared"] / ( # type: ignore[no-any-return] self["sum_of_weights"] - self["sum_of_weights_squared"] / self["sum_of_weights"] ) @@ -280,7 +280,7 @@ class MeanView(View): @property def variance(self) -> np.typing.NDArray[Any]: with np.errstate(divide="ignore", invalid="ignore"): - return self["_sum_of_deltas_squared"] / (self["count"] - 1) + return self["_sum_of_deltas_squared"] / (self["count"] - 1) # type: ignore[no-any-return] def _to_view( @@ -290,6 +290,6 @@ def _to_view( if item.dtype.names == cls._FIELDS: ret = item.view(cls) if value and ret.shape: - return ret.value + return ret.value # type: ignore[no-any-return, attr-defined] return ret return item diff --git a/tests/test_accumulators.py b/tests/test_accumulators.py index 2dfcf1ee4..1c3958bcb 100644 --- a/tests/test_accumulators.py +++ b/tests/test_accumulators.py @@ -28,7 +28,7 @@ def test_weighted_sum(): vari = [4, 5, 6] a = bh.accumulators.WeightedSum() - for val, var in zip(vals, vari): + for val, var in zip(vals, vari, strict=False): a += bh.accumulators.WeightedSum(val, variance=var) assert a.value == 6 @@ -60,7 +60,7 @@ def test_weighted_mean(): vals = [4, 1] weights = [1, 2] a = bh.accumulators.WeightedMean() - for v, w in zip(vals, weights): + for v, w in zip(vals, weights, strict=False): a(v, weight=w) assert a.sum_of_weights == 3.0 diff --git a/tests/test_minihist_title.py b/tests/test_minihist_title.py index a89c90f0e..58811e7cd 100644 --- a/tests/test_minihist_title.py +++ b/tests/test_minihist_title.py @@ -45,7 +45,7 @@ def name(self): @name.setter def name(self, values): - for ax, val in zip(self, values): + for ax, val in zip(self, values, strict=False): ax._ax.raw_metadata["name"] = f"test: {val}"