diff --git a/src/pyhf/cli/infer.py b/src/pyhf/cli/infer.py index 9722c52d16..19808b0fee 100644 --- a/src/pyhf/cli/infer.py +++ b/src/pyhf/cli/infer.py @@ -9,7 +9,8 @@ from pyhf.infer import hypotest from pyhf.infer import mle from pyhf.workspace import Workspace -from pyhf import get_backend, set_backend, optimize +from pyhf.tensor.manager import get_backend, set_backend +from pyhf import optimize log = logging.getLogger(__name__) diff --git a/src/pyhf/compat.py b/src/pyhf/compat.py index 95d57ba144..0b13246992 100644 --- a/src/pyhf/compat.py +++ b/src/pyhf/compat.py @@ -19,7 +19,7 @@ def paramset_to_rootnames(paramset): paramset (:obj:`pyhf.paramsets.paramset`): The parameter set. Returns: - :obj:`List[str]` or :obj:`str`: The generated parameter names + :obj:`list[str]` or :obj:`str`: The generated parameter names (for the non-scalar/scalar case) respectively. Example: diff --git a/src/pyhf/infer/__init__.py b/src/pyhf/infer/__init__.py index 213a481b01..b8f4a3dda0 100644 --- a/src/pyhf/infer/__init__.py +++ b/src/pyhf/infer/__init__.py @@ -1,7 +1,7 @@ """Inference for Statistical Models.""" from pyhf.infer import utils -from pyhf import get_backend +from pyhf.tensor.manager import get_backend from pyhf import exceptions diff --git a/src/pyhf/infer/calculators.py b/src/pyhf/infer/calculators.py index 6fc83b00af..39de048738 100644 --- a/src/pyhf/infer/calculators.py +++ b/src/pyhf/infer/calculators.py @@ -9,7 +9,7 @@ """ from pyhf.infer.mle import fixed_poi_fit -from pyhf import get_backend +from pyhf.tensor.manager import get_backend from pyhf.infer import utils import tqdm diff --git a/src/pyhf/infer/intervals/upper_limits.py b/src/pyhf/infer/intervals/upper_limits.py index 599a33c9de..ea31221903 100644 --- a/src/pyhf/infer/intervals/upper_limits.py +++ b/src/pyhf/infer/intervals/upper_limits.py @@ -3,7 +3,7 @@ import numpy as np from scipy.optimize import toms748 -from pyhf import get_backend +from pyhf.tensor.manager import get_backend from pyhf.infer import hypotest __all__ = ["linear_grid_scan", "toms748_scan", "upper_limit"] diff --git a/src/pyhf/infer/mle.py b/src/pyhf/infer/mle.py index 5e6357ae2b..b1b0138d29 100644 --- a/src/pyhf/infer/mle.py +++ b/src/pyhf/infer/mle.py @@ -1,6 +1,6 @@ """Module for Maximum Likelihood Estimation.""" -from pyhf import get_backend +from pyhf.tensor.manager import get_backend from pyhf.exceptions import UnspecifiedPOI __all__ = ["fit", "fixed_poi_fit", "twice_nll"] diff --git a/src/pyhf/infer/test_statistics.py b/src/pyhf/infer/test_statistics.py index 33537f017a..136e6bcde1 100644 --- a/src/pyhf/infer/test_statistics.py +++ b/src/pyhf/infer/test_statistics.py @@ -1,4 +1,4 @@ -from pyhf import get_backend +from pyhf.tensor.manager import get_backend from pyhf.infer.mle import fixed_poi_fit, fit from pyhf.exceptions import UnspecifiedPOI diff --git a/src/pyhf/modifiers/lumi.py b/src/pyhf/modifiers/lumi.py index f8697b7bee..a340a832f0 100644 --- a/src/pyhf/modifiers/lumi.py +++ b/src/pyhf/modifiers/lumi.py @@ -1,6 +1,7 @@ import logging -from pyhf import get_backend, events +from pyhf.tensor.manager import get_backend +from pyhf import events from pyhf.parameters import ParamViewer log = logging.getLogger(__name__) diff --git a/src/pyhf/modifiers/normfactor.py b/src/pyhf/modifiers/normfactor.py index fb723664f0..b04dac8b38 100644 --- a/src/pyhf/modifiers/normfactor.py +++ b/src/pyhf/modifiers/normfactor.py @@ -1,6 +1,7 @@ import logging -from pyhf import get_backend, events +from pyhf.tensor.manager import get_backend +from pyhf import events from pyhf.parameters import ParamViewer log = logging.getLogger(__name__) diff --git a/src/pyhf/modifiers/normsys.py b/src/pyhf/modifiers/normsys.py index e536bfe7a4..457d3fdb47 100644 --- a/src/pyhf/modifiers/normsys.py +++ b/src/pyhf/modifiers/normsys.py @@ -1,6 +1,7 @@ import logging -from pyhf import get_backend, events +from pyhf.tensor.manager import get_backend +from pyhf import events from pyhf import interpolators from pyhf.parameters import ParamViewer diff --git a/src/pyhf/modifiers/staterror.py b/src/pyhf/modifiers/staterror.py index a6d6d499c5..18c9a03488 100644 --- a/src/pyhf/modifiers/staterror.py +++ b/src/pyhf/modifiers/staterror.py @@ -1,5 +1,6 @@ +from __future__ import annotations + import logging -from typing import List import pyhf from pyhf import events @@ -10,7 +11,7 @@ log = logging.getLogger(__name__) -def required_parset(sigmas, fixed: List[bool]): +def required_parset(sigmas, fixed: list[bool]): n_parameters = len(sigmas) return { 'paramset_type': 'constrained_by_normal', diff --git a/src/pyhf/optimize/opt_jax.py b/src/pyhf/optimize/opt_jax.py index 5567678844..be14632123 100644 --- a/src/pyhf/optimize/opt_jax.py +++ b/src/pyhf/optimize/opt_jax.py @@ -1,6 +1,6 @@ """JAX Backend Function Shim.""" -from pyhf import get_backend +from pyhf.tensor.manager import get_backend from pyhf.tensor.common import _TensorViewer import jax import logging diff --git a/src/pyhf/optimize/opt_numpy.py b/src/pyhf/optimize/opt_numpy.py index 8f3d4178cf..7032c9e748 100644 --- a/src/pyhf/optimize/opt_numpy.py +++ b/src/pyhf/optimize/opt_numpy.py @@ -1,6 +1,6 @@ """Numpy Backend Function Shim.""" -from pyhf import get_backend +from pyhf.tensor.manager import get_backend from pyhf import exceptions diff --git a/src/pyhf/parameters/paramsets.py b/src/pyhf/parameters/paramsets.py index 2562c89305..3a59d4a1e8 100644 --- a/src/pyhf/parameters/paramsets.py +++ b/src/pyhf/parameters/paramsets.py @@ -1,5 +1,3 @@ -from typing import List - import pyhf __all__ = [ @@ -29,7 +27,7 @@ def __init__(self, **kwargs): ) @property - def suggested_fixed(self) -> List[bool]: + def suggested_fixed(self) -> list[bool]: if isinstance(self._suggested_fixed, bool): return [self._suggested_fixed] * self.n_parameters return self._suggested_fixed diff --git a/src/pyhf/pdf.py b/src/pyhf/pdf.py index ca051d1652..b8cfe612bc 100644 --- a/src/pyhf/pdf.py +++ b/src/pyhf/pdf.py @@ -2,7 +2,7 @@ import copy import logging -from typing import List, Union +from typing import Union import pyhf.parameters import pyhf @@ -406,7 +406,7 @@ def param_set(self, name): """ return self.par_map[name]['paramset'] - def suggested_fixed(self) -> List[bool]: + def suggested_fixed(self) -> list[bool]: """ Identify the fixed parameters in the model. diff --git a/src/pyhf/probability.py b/src/pyhf/probability.py index 0a37d55cf8..8505757840 100644 --- a/src/pyhf/probability.py +++ b/src/pyhf/probability.py @@ -1,6 +1,6 @@ """The probability density function module.""" -from pyhf import get_backend +from pyhf.tensor.manager import get_backend __all__ = ["Independent", "Normal", "Poisson", "Simultaneous"] diff --git a/src/pyhf/readxml.py b/src/pyhf/readxml.py index a694dab292..0898343df5 100644 --- a/src/pyhf/readxml.py +++ b/src/pyhf/readxml.py @@ -5,7 +5,6 @@ IO, Callable, Iterable, - List, MutableMapping, MutableSequence, Sequence, @@ -99,7 +98,7 @@ def extract_error(hist: uproot.behaviors.TH1.TH1) -> list[float]: """ variance = hist.variances() if hist.weighted else hist.to_numpy()[0] - return cast(List[float], np.sqrt(variance).tolist()) + return cast(list[float], np.sqrt(variance).tolist()) def import_root_histogram( @@ -222,7 +221,7 @@ def process_sample( modtag.attrib.get('HistoPath', ''), modtag.attrib['HistoName'], ) - staterr = np.multiply(extstat, data).tolist() + staterr = cast(list[float], np.multiply(extstat, data).tolist()) if not staterr: raise RuntimeError('cannot determine stat error.') modifier_staterror: StatError = { diff --git a/src/pyhf/tensor/numpy_backend.py b/src/pyhf/tensor/numpy_backend.py index e843330bb3..d9ad1d05b8 100644 --- a/src/pyhf/tensor/numpy_backend.py +++ b/src/pyhf/tensor/numpy_backend.py @@ -3,7 +3,16 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Callable, Generic, Mapping, Sequence, TypeVar, Union +from typing import ( + Any, + TYPE_CHECKING, + Callable, + Generic, + Mapping, + Sequence, + TypeVar, + Union, +) import numpy as np @@ -205,9 +214,12 @@ def conditional( """ return true_callable() if predicate else false_callable() - def tolist(self, tensor_in: Tensor[T] | list[T]) -> list[T]: + def tolist( + self, tensor_in: Tensor[T] | list[T] + ) -> int | float | complex | list[T] | list[Any]: try: - return tensor_in.tolist() # type: ignore[union-attr,no-any-return] + result = tensor_in.tolist() # type: ignore[union-attr] + return cast(Union[int, float, complex, list[T], list[Any]], result) except AttributeError: if isinstance(tensor_in, list): return tensor_in @@ -654,4 +666,5 @@ def transpose(self, tensor_in: Tensor[T]) -> ArrayLike: .. versionadded:: 0.7.0 """ - return tensor_in.transpose() + result = tensor_in.transpose() + return cast(ArrayLike, result)