Skip to content

Commit

Permalink
enh: fix ruff comments
Browse files Browse the repository at this point in the history
  • Loading branch information
jonas-eschle committed Apr 16, 2024
1 parent 95a3ac5 commit 3679b65
Show file tree
Hide file tree
Showing 38 changed files with 348 additions and 400 deletions.
4 changes: 1 addition & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -58,11 +58,9 @@ repos:
- repo: https://github.com/nbQA-dev/nbQA
rev: 1.8.5
hooks:
- id: nbqa-isort
additional_dependencies: [ isort==5.6.4 ]

- id: nbqa-pyupgrade
additional_dependencies: [ pyupgrade==2.7.4 ]
additional_dependencies: [ pyupgrade ]
args: [ --py38-plus ]


Expand Down
2 changes: 2 additions & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,11 @@
# https://www.sphinx-doc.org/en/master/usage/configuration.html

# -- Path setup --------------------------------------------------------------
from __future__ import annotations

import sys
from pathlib import Path

from hepstats import __version__ as version

project_dir = Path(__file__).parents[1]
Expand Down
15 changes: 7 additions & 8 deletions notebooks/hypotests/FC_interval_asy.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -23,19 +23,19 @@
},
"outputs": [],
"source": [
"from __future__ import annotations\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import zfit\n",
"from zfit.loss import UnbinnedNLL\n",
"from zfit.minimize import Minuit\n",
"\n",
"zfit.settings.set_seed(10)\n",
"\n",
"\n",
"from hepstats.hypotests import ConfidenceInterval\n",
"from hepstats.hypotests.calculators import AsymptoticCalculator\n",
"from hepstats.hypotests.exceptions import POIRangeError\n",
"from hepstats.hypotests.parameters import POIarray"
"from hepstats.hypotests.parameters import POIarray\n",
"from zfit.loss import UnbinnedNLL\n",
"from zfit.minimize import Minuit\n",
"\n",
"zfit.settings.set_seed(10)"
]
},
{
Expand Down Expand Up @@ -102,7 +102,6 @@
"# minimisation of the loss function\n",
"minimum = minimizer.minimize(loss=nll)\n",
"minimum.hesse()\n",
"print(minimum)\n",
"\n",
"x_err = minimum.params[mean][\"hesse\"][\"error\"]"
]
Expand Down
21 changes: 10 additions & 11 deletions notebooks/hypotests/FC_interval_freq.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -21,21 +21,21 @@
},
"outputs": [],
"source": [
"import os\n",
"from __future__ import annotations\n",
"\n",
"from pathlib import Path\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import zfit\n",
"from zfit.loss import UnbinnedNLL\n",
"from zfit.minimize import Minuit\n",
"\n",
"zfit.settings.set_seed(10)\n",
"\n",
"\n",
"from hepstats.hypotests import ConfidenceInterval\n",
"from hepstats.hypotests.calculators import FrequentistCalculator\n",
"from hepstats.hypotests.exceptions import POIRangeError\n",
"from hepstats.hypotests.parameters import POIarray"
"from hepstats.hypotests.parameters import POIarray\n",
"from zfit.loss import UnbinnedNLL\n",
"from zfit.minimize import Minuit\n",
"\n",
"zfit.settings.set_seed(10)"
]
},
{
Expand Down Expand Up @@ -102,7 +102,6 @@
"# minimisation of the loss function\n",
"minimum = minimizer.minimize(loss=nll)\n",
"minimum.hesse()\n",
"print(minimum)\n",
"\n",
"x_err = minimum.params[mean][\"hesse\"][\"error\"]"
]
Expand Down Expand Up @@ -171,7 +170,7 @@
"\n",
" zfit.settings.set_seed() # randomized\n",
"\n",
" if not os.path.isfile(toys_fname):\n",
" if not Path(toys_fname).is_file():\n",
" calculator = FrequentistCalculator(minimum, minimizer)\n",
"\n",
" x_min = results_n[\"x\"] - results_n[\"x_err\"] * 3\n",
Expand All @@ -193,7 +192,7 @@
" else:\n",
" calculator = FrequentistCalculator.from_yaml(toys_fname, minimum, minimizer)\n",
"\n",
" keys = np.unique([k[0].value for k in calculator.keys()])\n",
" keys = np.unique([k[0].value for k in calculator])\n",
" keys.sort()\n",
" poinull = POIarray(mean, keys)\n",
"\n",
Expand Down
69 changes: 20 additions & 49 deletions notebooks/hypotests/Simultaneous_fit_discovery_splot.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@
},
"outputs": [],
"source": [
"from __future__ import annotations\n",
"\n",
"import hepunits as u\n",
"import matplotlib.pyplot as plt\n",
"import mplhep\n",
Expand Down Expand Up @@ -93,9 +95,7 @@
"# create some data\n",
"signal_np = np.random.normal(loc=mu_true, scale=sigma_true, size=n_sig_rare)\n",
"bkg_np_raw = np.random.exponential(size=20000, scale=700)\n",
"bkg_np = (\n",
" bkg_np_raw[bkg_np_raw < 1000][:n_bkg_rare] + 5000\n",
") # just cutting right, but zfit could also cut"
"bkg_np = bkg_np_raw[bkg_np_raw < 1000][:n_bkg_rare] + 5000 # just cutting right, but zfit could also cut"
]
},
{
Expand Down Expand Up @@ -150,14 +150,10 @@
"sigma = zfit.Parameter(\"sigma\", 20, 1, 200)\n",
"signal = zfit.pdf.Gauss(mu=mu, sigma=sigma, obs=obs)\n",
"\n",
"lam = zfit.Parameter(\n",
" \"lambda\", -0.002, -0.1, -0.00001, step_size=0.001\n",
") # floating, also without limits\n",
"lam = zfit.Parameter(\"lambda\", -0.002, -0.1, -0.00001, step_size=0.001) # floating, also without limits\n",
"comb_bkg = zfit.pdf.Exponential(lam, obs=obs)\n",
"\n",
"sig_yield = zfit.Parameter(\n",
" \"sig_yield\", n_sig_rare + 30, step_size=3\n",
") # step size: default is small, use appropriate\n",
"sig_yield = zfit.Parameter(\"sig_yield\", n_sig_rare + 30, step_size=3) # step size: default is small, use appropriate\n",
"bkg_yield = zfit.Parameter(\"bkg_yield\", n_bkg_rare - 40, step_size=1)\n",
"# Create extended PDFs\n",
"extended_sig = signal.create_extended(sig_yield)\n",
Expand All @@ -180,9 +176,7 @@
},
"outputs": [],
"source": [
"constraint = zfit.constraint.GaussianConstraint(\n",
" mu, observation=5275 * u.MeV, sigma=15 * u.MeV\n",
")"
"constraint = zfit.constraint.GaussianConstraint(mu, observation=5275 * u.MeV, sigma=15 * u.MeV)"
]
},
{
Expand Down Expand Up @@ -216,9 +210,7 @@
}
},
"outputs": [],
"source": [
"print(result.params)"
]
"source": []
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -282,9 +274,7 @@
"\n",
"# load data into zfit\n",
"obs_reso = zfit.Space(\"Bmass_reso\", (5000, 6000))\n",
"data_reso = zfit.Data.from_numpy(\n",
" obs=obs_reso, array=np.concatenate([signal_np_reso, bkg_np_reso], axis=0)\n",
")"
"data_reso = zfit.Data.from_numpy(obs=obs_reso, array=np.concatenate([signal_np_reso, bkg_np_reso], axis=0))"
]
},
{
Expand Down Expand Up @@ -465,9 +455,7 @@
}
},
"outputs": [],
"source": [
"print(result_simultaneous.params)"
]
"source": []
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -524,11 +512,9 @@
" # Line plots of the total pdf and the sub-pdfs.\n",
" y = model.ext_pdf(x) * binwidth\n",
" ax.plot(x, y, label=\"total\", color=\"royalblue\")\n",
" for m, l, c in zip(\n",
" model.get_models(), [\"background\", \"signal\"], [\"forestgreen\", \"crimson\"]\n",
" ):\n",
" ym = m.ext_pdf(x) * binwidth\n",
" ax.plot(x, ym, label=l, color=c)\n",
" for mod, label, color in zip(model.get_models(), [\"background\", \"signal\"], [\"forestgreen\", \"crimson\"]):\n",
" ym = mod.ext_pdf(x) * binwidth\n",
" ax.plot(x, ym, label=label, color=color)\n",
"\n",
" ax.set_title(data.data_range.obs[0])\n",
" ax.set_xlim(lower, upper)\n",
Expand All @@ -539,9 +525,7 @@
"\n",
"fig, axs = plt.subplots(1, 2, figsize=(16, 6))\n",
"\n",
"for mod, dat, ax, nb in zip(\n",
" nll_simultaneous.model, nll_simultaneous.data, axs, [30, 60]\n",
"):\n",
"for mod, dat, ax, nb in zip(nll_simultaneous.model, nll_simultaneous.data, axs, [30, 60]):\n",
" plot_fit_projection(mod, dat, nbins=nb, ax=ax)"
]
},
Expand Down Expand Up @@ -580,9 +564,7 @@
}
},
"outputs": [],
"source": [
"print(result_simultaneous.params[sig_yield])"
]
"source": []
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -804,13 +786,10 @@
},
"outputs": [],
"source": [
"calculator_low_sig = AsymptoticCalculator(\n",
" input=nll_simultaneous_low_sig, minimizer=minimizer, asimov_bins=100\n",
")\n",
"calculator_low_sig = AsymptoticCalculator(input=nll_simultaneous_low_sig, minimizer=minimizer, asimov_bins=100)\n",
"\n",
"discovery_low_sig = Discovery(calculator=calculator_low_sig, poinull=sig_yield_poi)\n",
"discovery_low_sig.result()\n",
"print(f\"\\n {calculator_low_sig.bestfit.params} \\n\")"
"discovery_low_sig.result()"
]
},
{
Expand Down Expand Up @@ -990,8 +969,7 @@
"sigma_scaling.floating = False\n",
"\n",
"# Minimizes the loss.\n",
"result_sw = minimizer.minimize(nll_sw)\n",
"print(result_sw.params)"
"result_sw = minimizer.minimize(nll_sw)"
]
},
{
Expand Down Expand Up @@ -1053,9 +1031,7 @@
"source": [
"from hepstats.splot import compute_sweights\n",
"\n",
"weights = compute_sweights(model_reso, data_sw)\n",
"\n",
"print(weights)"
"weights = compute_sweights(model_reso, data_sw)"
]
},
{
Expand All @@ -1070,10 +1046,7 @@
}
},
"outputs": [],
"source": [
"print(\"Sum of signal sWeights: \", np.sum(weights[reso_sig_yield]))\n",
"print(\"Sum of background sWeights: \", np.sum(weights[reso_bkg_yield]))"
]
"source": []
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -1159,9 +1132,7 @@
}
},
"outputs": [],
"source": [
"print(f\"Correlation between m and t: {np.corrcoef(np_m_sw, np_t_sw)[0, 1]}\")"
]
"source": []
},
{
"cell_type": "markdown",
Expand Down
14 changes: 7 additions & 7 deletions notebooks/hypotests/confidenceinterval_asy_zfit.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,17 @@
},
"outputs": [],
"source": [
"from __future__ import annotations\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import zfit\n",
"from utils import one_minus_cl_plot, plotfitresult, pltdist\n",
"from zfit.loss import ExtendedUnbinnedNLL\n",
"from zfit.minimize import Minuit\n",
"\n",
"from hepstats.hypotests import ConfidenceInterval\n",
"from hepstats.hypotests.calculators import AsymptoticCalculator\n",
"from hepstats.hypotests.parameters import POIarray"
"from hepstats.hypotests.parameters import POIarray\n",
"from utils import one_minus_cl_plot, plotfitresult, pltdist\n",
"from zfit.loss import ExtendedUnbinnedNLL\n",
"from zfit.minimize import Minuit"
]
},
{
Expand Down Expand Up @@ -169,8 +170,7 @@
"source": [
"# minimisation of the loss function\n",
"minimum = minimizer.minimize(loss=nll)\n",
"minimum.hesse()\n",
"print(minimum)"
"minimum.hesse()"
]
},
{
Expand Down
18 changes: 8 additions & 10 deletions notebooks/hypotests/confidenceinterval_freq_zfit.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,17 @@
},
"outputs": [],
"source": [
"from __future__ import annotations\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import zfit\n",
"from utils import one_minus_cl_plot, plotfitresult, pltdist\n",
"from zfit.loss import ExtendedUnbinnedNLL\n",
"from zfit.minimize import Minuit\n",
"\n",
"from hepstats.hypotests import ConfidenceInterval\n",
"from hepstats.hypotests.calculators import FrequentistCalculator\n",
"from hepstats.hypotests.parameters import POIarray"
"from hepstats.hypotests.parameters import POIarray\n",
"from utils import one_minus_cl_plot, plotfitresult, pltdist\n",
"from zfit.loss import ExtendedUnbinnedNLL\n",
"from zfit.minimize import Minuit"
]
},
{
Expand Down Expand Up @@ -154,8 +155,7 @@
"outputs": [],
"source": [
"# minimisation of the loss function\n",
"minimum = minimizer.minimize(loss=nll)\n",
"print(minimum)"
"minimum = minimizer.minimize(loss=nll)"
]
},
{
Expand Down Expand Up @@ -200,9 +200,7 @@
"source": [
"# instantation of the calculator\n",
"# calculator = FrequentistCalculator(nll, minimizer, ntoysnull=100)\n",
"calculator = FrequentistCalculator.from_yaml(\n",
" \"toys/ci_freq_zfit_toys.yml\", nll, minimizer, ntoysnull=2000\n",
")\n",
"calculator = FrequentistCalculator.from_yaml(\"toys/ci_freq_zfit_toys.yml\", nll, minimizer, ntoysnull=2000)\n",
"calculator.bestfit = minimum # optionnal"
]
},
Expand Down
Loading

0 comments on commit 3679b65

Please sign in to comment.