Skip to content

Commit 00ca4be

Browse files
committed
fix up errors
1 parent da7d673 commit 00ca4be

File tree

15 files changed

+32
-115
lines changed

15 files changed

+32
-115
lines changed

docs/examples/notebooks/ImpactPlot.ipynb

Lines changed: 2 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -143,24 +143,10 @@
143143
},
144144
{
145145
"cell_type": "code",
146-
"execution_count": 5,
146+
"execution_count": null,
147147
"metadata": {},
148148
"outputs": [],
149-
"source": [
150-
"def calc_impact(idx, b, e, i, width, poi_index):\n",
151-
" _, _, bb, ee = fitresults([(idx, b + e)])\n",
152-
" poi_up_post = bb[poi_index]\n",
153-
"\n",
154-
" _, _, bb, ee = fitresults([(idx, b - e)])\n",
155-
" poi_dn_post = bb[poi_index]\n",
156-
"\n",
157-
" _, _, bb, ee = fitresults([(idx, b + width)])\n",
158-
" poi_up_pre = bb[poi_index]\n",
159-
"\n",
160-
" _, _, bb, ee = fitresults([(idx, b - width)])\n",
161-
" poi_dn_pre = bb[poi_index]\n",
162-
" return np.asarray([poi_dn_post, poi_up_post, poi_dn_pre, poi_up_pre])"
163-
]
149+
"source": "def calc_impact(idx, b, e, i, width, poi_index):\n _, _, bb, _ = fitresults([(idx, b + e)])\n poi_up_post = bb[poi_index]\n\n _, _, bb, _ = fitresults([(idx, b - e)])\n poi_dn_post = bb[poi_index]\n\n _, _, bb, _ = fitresults([(idx, b + width)])\n poi_up_pre = bb[poi_index]\n\n _, _, bb, _ = fitresults([(idx, b - width)])\n poi_dn_pre = bb[poi_index]\n return np.asarray([poi_dn_post, poi_up_post, poi_dn_pre, poi_up_pre])"
164150
},
165151
{
166152
"cell_type": "code",

docs/examples/notebooks/binderexample/StatisticalAnalysis.ipynb

Lines changed: 2 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -171,82 +171,10 @@
171171
},
172172
{
173173
"cell_type": "code",
174-
"execution_count": 7,
174+
"execution_count": null,
175175
"metadata": {},
176176
"outputs": [],
177-
"source": [
178-
"def get_mc_counts(pars):\n",
179-
" deltas, factors = pdf.modifications(pars)\n",
180-
" allsum = pyhf.tensorlib.concatenate(\n",
181-
" deltas + [pyhf.tensorlib.astensor(pdf.nominal_rates)]\n",
182-
" )\n",
183-
" nom_plus_delta = pyhf.tensorlib.sum(allsum, axis=0)\n",
184-
" nom_plus_delta = pyhf.tensorlib.reshape(\n",
185-
" nom_plus_delta, (1,) + pyhf.tensorlib.shape(nom_plus_delta)\n",
186-
" )\n",
187-
" allfac = pyhf.tensorlib.concatenate(factors + [nom_plus_delta])\n",
188-
" return pyhf.tensorlib.product(allfac, axis=0)\n",
189-
"\n",
190-
"\n",
191-
"animate_plot_pieces = None\n",
192-
"\n",
193-
"\n",
194-
"def init_plot(fig, ax, par_settings):\n",
195-
" global animate_plot_pieces\n",
196-
"\n",
197-
" nbins = sum(list(pdf.config.channel_nbins.values()))\n",
198-
" x = np.arange(nbins)\n",
199-
" data = np.zeros(nbins)\n",
200-
" items = []\n",
201-
" for i in [3, 2, 1, 0]:\n",
202-
" items.append(ax.bar(x, data, 1, alpha=1.0))\n",
203-
" animate_plot_pieces = (\n",
204-
" items,\n",
205-
" ax.scatter(\n",
206-
" x, workspace.data(pdf, include_auxdata=False), c=\"k\", alpha=1.0, zorder=99\n",
207-
" ),\n",
208-
" )\n",
209-
"\n",
210-
"\n",
211-
"def animate(ax=None, fig=None, **par_settings):\n",
212-
" global animate_plot_pieces\n",
213-
" items, obs = animate_plot_pieces\n",
214-
" pars = pyhf.tensorlib.astensor(pdf.config.suggested_init())\n",
215-
" for k, v in par_settings.items():\n",
216-
" pars[par_name_dict[k]] = v\n",
217-
"\n",
218-
" mc_counts = get_mc_counts(pars)\n",
219-
" rectangle_collection = zip(*map(lambda x: x.patches, items))\n",
220-
"\n",
221-
" for rectangles, binvalues in zip(rectangle_collection, mc_counts[:, 0].T):\n",
222-
" offset = 0\n",
223-
" for sample_index in [3, 2, 1, 0]:\n",
224-
" rect = rectangles[sample_index]\n",
225-
" binvalue = binvalues[sample_index]\n",
226-
" rect.set_y(offset)\n",
227-
" rect.set_height(binvalue)\n",
228-
" offset += rect.get_height()\n",
229-
"\n",
230-
" fig.canvas.draw()\n",
231-
"\n",
232-
"\n",
233-
"def plot(ax=None, order=[3, 2, 1, 0], **par_settings):\n",
234-
" pars = pyhf.tensorlib.astensor(pdf.config.suggested_init())\n",
235-
" for k, v in par_settings.items():\n",
236-
" pars[par_name_dict[k]] = v\n",
237-
"\n",
238-
" mc_counts = get_mc_counts(pars)\n",
239-
" bottom = None\n",
240-
" # nb: bar_data[0] because evaluating only one parset\n",
241-
" for i, sample_index in enumerate(order):\n",
242-
" data = mc_counts[sample_index][0]\n",
243-
" x = np.arange(len(data))\n",
244-
" ax.bar(x, data, 1, bottom=bottom, alpha=1.0)\n",
245-
" bottom = data if i == 0 else bottom + data\n",
246-
" ax.scatter(\n",
247-
" x, workspace.data(pdf, include_auxdata=False), c=\"k\", alpha=1.0, zorder=99\n",
248-
" )"
249-
]
177+
"source": "def get_mc_counts(pars):\n deltas, factors = pdf.modifications(pars)\n allsum = pyhf.tensorlib.concatenate(\n deltas + [pyhf.tensorlib.astensor(pdf.nominal_rates)]\n )\n nom_plus_delta = pyhf.tensorlib.sum(allsum, axis=0)\n nom_plus_delta = pyhf.tensorlib.reshape(\n nom_plus_delta, (1,) + pyhf.tensorlib.shape(nom_plus_delta)\n )\n allfac = pyhf.tensorlib.concatenate(factors + [nom_plus_delta])\n return pyhf.tensorlib.product(allfac, axis=0)\n\n\nanimate_plot_pieces = None\n\n\ndef init_plot(fig, ax, par_settings):\n global animate_plot_pieces\n\n nbins = sum(list(pdf.config.channel_nbins.values()))\n x = np.arange(nbins)\n data = np.zeros(nbins)\n items = []\n for i in [3, 2, 1, 0]:\n items.append(ax.bar(x, data, 1, alpha=1.0))\n animate_plot_pieces = (\n items,\n ax.scatter(\n x, workspace.data(pdf, include_auxdata=False), c=\"k\", alpha=1.0, zorder=99\n ),\n )\n\n\ndef animate(ax=None, fig=None, **par_settings):\n global animate_plot_pieces\n items, _ = animate_plot_pieces\n pars = pyhf.tensorlib.astensor(pdf.config.suggested_init())\n for k, v in par_settings.items():\n pars[par_name_dict[k]] = v\n\n mc_counts = get_mc_counts(pars)\n rectangle_collection = zip(*map(lambda x: x.patches, items))\n\n for rectangles, binvalues in zip(rectangle_collection, mc_counts[:, 0].T):\n offset = 0\n for sample_index in [3, 2, 1, 0]:\n rect = rectangles[sample_index]\n binvalue = binvalues[sample_index]\n rect.set_y(offset)\n rect.set_height(binvalue)\n offset += rect.get_height()\n\n fig.canvas.draw()\n\n\ndef plot(ax=None, order=[3, 2, 1, 0], **par_settings):\n pars = pyhf.tensorlib.astensor(pdf.config.suggested_init())\n for k, v in par_settings.items():\n pars[par_name_dict[k]] = v\n\n mc_counts = get_mc_counts(pars)\n bottom = None\n # nb: bar_data[0] because evaluating only one parset\n for i, sample_index in enumerate(order):\n data = mc_counts[sample_index][0]\n x = np.arange(len(data))\n ax.bar(x, data, 1, bottom=bottom, alpha=1.0)\n bottom = data if i == 0 else bottom + data\n ax.scatter(\n x, workspace.data(pdf, include_auxdata=False), c=\"k\", alpha=1.0, zorder=99\n )"
250178
},
251179
{
252180
"cell_type": "markdown",

src/pyhf/infer/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ def hypotest(
177177
teststat, sig_plus_bkg_distribution, bkg_only_distribution
178178
)
179179
)
180-
CLsb_exp, CLb_exp, CLs_exp = calc.expected_pvalues(
180+
CLsb_exp, _, CLs_exp = calc.expected_pvalues(
181181
sig_plus_bkg_distribution, bkg_only_distribution
182182
)
183183

src/pyhf/infer/calculators.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -524,7 +524,7 @@ def expected_pvalues(self, sig_plus_bkg_distribution, bkg_only_distribution):
524524
:math:`\mathrm{CL}_{b}`, and :math:`\mathrm{CL}_{s}`.
525525
"""
526526
# Calling pvalues is easier then repeating the CLs calculation here
527-
tb, _ = get_backend()
527+
_, _ = get_backend()
528528
return list(
529529
map(
530530
list,

src/pyhf/infer/test_statistics.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def _qmu_like(
2323
If the lower bound of the POI is 0 this automatically implements
2424
qmu_tilde. Otherwise this is qmu (no tilde).
2525
"""
26-
tensorlib, optimizer = get_backend()
26+
tensorlib, _ = get_backend()
2727
tmu_like_stat, (mubhathat, muhatbhat) = _tmu_like(
2828
mu, data, pdf, init_pars, par_bounds, fixed_params, return_fitted_pars=True
2929
)
@@ -44,7 +44,7 @@ def _tmu_like(
4444
If the lower bound of the POI is 0 this automatically implements
4545
tmu_tilde. Otherwise this is tmu (no tilde).
4646
"""
47-
tensorlib, optimizer = get_backend()
47+
tensorlib, _ = get_backend()
4848
mubhathat, fixed_poi_fit_lhood_val = fixed_poi_fit(
4949
mu, data, pdf, init_pars, par_bounds, fixed_params, return_fitted_val=True
5050
)
@@ -515,7 +515,7 @@ def q0(mu, data, pdf, init_pars, par_bounds, fixed_params, return_fitted_pars=Fa
515515
)
516516
mu = 0.0
517517

518-
tensorlib, optimizer = get_backend()
518+
tensorlib, _ = get_backend()
519519

520520
tmu_like_stat, (mubhathat, muhatbhat) = _tmu_like(
521521
mu, data, pdf, init_pars, par_bounds, fixed_params, return_fitted_pars=True

src/pyhf/optimize/opt_jax.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ def wrap_objective(objective, data, pdf, stitch_pars, do_grad=False, jit_pieces=
4545
Returns:
4646
objective_and_grad (:obj:`func`): tensor backend wrapped objective,gradient pair
4747
"""
48-
tensorlib, _ = get_backend()
48+
_, _ = get_backend()
4949
# NB: tuple arguments that need to be hashable (static_argnums)
5050
if do_grad:
5151

src/pyhf/readxml.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -283,9 +283,12 @@ def process_data(
283283

284284

285285
def process_channel(
286-
channelxml: ET.ElementTree, resolver: ResolverType, track_progress: bool = False
286+
channelxml: ET.ElementTree[ET.Element[str]],
287+
resolver: ResolverType,
288+
track_progress: bool = False,
287289
) -> tuple[str, list[float], list[Sample], list[Parameter]]:
288290
channel = channelxml.getroot()
291+
assert channel is not None
289292

290293
inputfile = channel.attrib.get('InputFile', '')
291294
histopath = channel.attrib.get('HistoPath', '')
@@ -316,7 +319,7 @@ def process_channel(
316319

317320

318321
def process_measurements(
319-
toplvl: ET.ElementTree,
322+
toplvl: ET.ElementTree[ET.Element[str]],
320323
other_parameter_configs: Sequence[Parameter] | None = None,
321324
) -> list[Measurement]:
322325
"""

src/pyhf/tensor/manager.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -142,11 +142,11 @@ def set_backend(
142142

143143
try:
144144
new_optimizer = getattr(
145-
OptimizerRetriever, f"{custom_optimizer.lower()}_optimizer"
145+
OptimizerRetriever, f"{custom_optimizer.lower()!r}_optimizer"
146146
)()
147147
except TypeError:
148148
raise exceptions.InvalidOptimizer(
149-
f"The optimizer provided is not supported: {custom_optimizer}. Select from one of the supported optimizers: scipy, minuit"
149+
f"The optimizer provided is not supported: {custom_optimizer!r}. Select from one of the supported optimizers: scipy, minuit"
150150
)
151151
else:
152152
new_optimizer = custom_optimizer

tests/test_import.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -486,7 +486,7 @@ def test_import_noChannelData(mocker, datadir):
486486

487487
basedir = datadir.joinpath("xmlimport_noChannelData")
488488
with pytest.raises(
489-
RuntimeError, match="Channel channel1 is missing data. See issue #1911"
489+
RuntimeError, match=r"Channel channel1 is missing data. See issue #1911"
490490
):
491491
pyhf.readxml.parse(basedir.joinpath("config/example.xml"), basedir)
492492

@@ -499,7 +499,7 @@ def test_import_noChannelDataPaths(mocker, datadir):
499499
basedir = datadir.joinpath("xmlimport_noChannelDataPaths")
500500
with pytest.raises(
501501
NotImplementedError,
502-
match="Conversion of workspaces without data is currently not supported.\nSee https://github.com/scikit-hep/pyhf/issues/566",
502+
match=r"Conversion of workspaces without data is currently not supported.\nSee https://github.com/scikit-hep/pyhf/issues/566",
503503
):
504504
pyhf.readxml.parse(basedir.joinpath("config/example.xml"), basedir)
505505

tests/test_infer.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -313,7 +313,7 @@ def test_hypotest_return_calculator(
313313
Check that the return structure of pyhf.infer.hypotest with the
314314
addition of the return_calculator keyword arg is as expected
315315
"""
316-
*_, model = hypotest_args
316+
*_, _model = hypotest_args
317317

318318
# only those return flags where the toggled return value
319319
# is placed in front of the calculator in the returned tuple
@@ -393,7 +393,7 @@ def _make_main_pdf(self, pars):
393393
return pyhf.probability.Poisson(expected_main)
394394

395395
def _make_constraint_pdf(self, pars):
396-
mu, gamma = pars
396+
_, gamma = pars
397397
return pyhf.probability.Poisson(gamma * self.factor)
398398

399399
def expected_data(self, pars, include_auxdata=True):
@@ -499,7 +499,7 @@ def test_emperical_distribution(tmp_path, hypotest_args):
499499
tb = pyhf.tensorlib
500500
np.random.seed(0)
501501

502-
mu_test, data, model = hypotest_args
502+
mu_test, _, model = hypotest_args
503503
init_pars = model.config.suggested_init()
504504
par_bounds = model.config.suggested_bounds()
505505
fixed_params = model.config.suggested_fixed()

0 commit comments

Comments
 (0)