Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Add Cramer-rao uncertainties + covariance using autodiff to non-minuit fits by default #2269

Open
wants to merge 17 commits into
base: main
Choose a base branch
from
Open
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
fix torch by casting res to tensor
phinate committed Oct 20, 2023
commit 123f2a0dbd3d186e324869b9b98a3cbcb574e1c3
35 changes: 21 additions & 14 deletions src/pyhf/optimize/mixins.py
Original file line number Diff line number Diff line change
@@ -50,16 +50,6 @@ def _internal_minimize(
par_names=par_names,
)

# so we can check if valid for uncertainty calc later
try:
minimizer_name = minimizer.name
if minimizer_name == "minuit":
self.using_minuit = True
else:
self.using_minuit = False
except AttributeError:
self.using_minuit = False

result = self._minimize(
minimizer,
func,
@@ -81,6 +71,7 @@ def _internal_postprocess(
self,
fitresult,
stitch_pars,
using_minuit,
return_uncertainties=False,
uncertainties=None,
hess_inv=None,
@@ -106,7 +97,7 @@ def _internal_postprocess(
# https://github.com/scikit-hep/iminuit/issues/762
# https://github.com/scikit-hep/pyhf/issues/1918
# https://github.com/scikit-hep/cabinetry/pull/346
if self.using_minuit:
if using_minuit:
uncertainties = np.where(fitresult.minuit.fixed, 0.0, uncertainties)

# stitch in zero-uncertainty for fixed values
@@ -189,6 +180,22 @@ def minimize(
- minimum (:obj:`float`): if ``return_fitted_val`` flagged, return minimized objective value
- result (:class:`scipy.optimize.OptimizeResult`): if ``return_result_obj`` flagged
"""
# literally just for the minimizer name to check if we're using minuit
_minimizer = self._get_minimizer(
lambda x: x,
[0],
[0, 1],
)

# so we can check if valid for uncertainty calc later
if hasattr(_minimizer, "name"):
if _minimizer.name == "minuit":
using_minuit = True
else:
using_minuit = False
else:
using_minuit = False

# Configure do_grad based on backend "automagically" if not set by user
tensorlib, _ = get_backend()
do_grad = tensorlib.default_do_grad if do_grad is None else do_grad
@@ -221,16 +228,16 @@ def minimize(
)

# compute uncertainties with automatic differentiation
if not self.using_minuit and tensorlib.name in ['jax', 'pytorch']:
hess_inv = tensorlib.fisher_cov(pdf, result.x, data)
if not using_minuit and tensorlib.name in ['jax', 'pytorch']:
hess_inv = tensorlib.fisher_cov(pdf, tensorlib.astensor(result.x), data)
uncertainties = tensorlib.sqrt(tensorlib.diagonal(hess_inv))
else:
hess_inv = None
uncertainties = None
result = self._internal_postprocess(
result,
stitch_pars,
pdf,
using_minuit,
return_uncertainties=return_uncertainties,
uncertainties=uncertainties,
hess_inv=hess_inv,