From a03f9d7e41b0a71b9a7b4bb3c7e44f607d02919a Mon Sep 17 00:00:00 2001 From: APMDSLHC Date: Mon, 15 Apr 2024 17:33:09 +0200 Subject: [PATCH] UL results do not break and make dummy jsonFile names if not provided --- smodels/matching/modelTester.py | 13 ++++++++----- smodels/matching/theoryPrediction.py | 13 +++++++------ smodels/statistics/pyhfInterface.py | 4 +++- unittests/timeout.ini | 2 ++ 4 files changed, 20 insertions(+), 12 deletions(-) diff --git a/smodels/matching/modelTester.py b/smodels/matching/modelTester.py index daa290684..7588c3760 100644 --- a/smodels/matching/modelTester.py +++ b/smodels/matching/modelTester.py @@ -60,6 +60,9 @@ def testPoint(inputFile, outputDir, parser, database): """ Set BSM model, if necessary """ if parser.has_option("particles","model"): runtime.modelFile = parser.get( "particles", "model" ) + else: + logger.debug('Model file has not been defined. Using input file %s to read quantum numbers.' %inputFile) + runtime.modelFile = inputFile """Get run parameters and options from the parser""" sigmacut = parser.getfloat("parameters", "sigmacut") * fb @@ -122,7 +125,7 @@ def testPoint(inputFile, outputDir, parser, database): stableWidth=stableWidth, ignorePromptQNumbers=ignorePromptQNumbers) except SModelSError as e: - print("Exception %s %s" % (e, type(e))) + logger.error("Exception %s %s" % (e, type(e))) """ Update status to fail, print error message and exit """ outputStatus.updateStatus(-1) return {os.path.basename(inputFile): masterPrinter} @@ -410,10 +413,10 @@ def testPoints(fileList, inDir, outputDir, parser, database, outputDict.update(p.get()) # Collect output to build global summary: - summaryFile = os.path.join(outputDir, 'summary.txt') - logger.info("A summary of the results can be found in %s" % - summaryFile) - printScanSummary(outputDict, summaryFile) + scanSummaryFile = os.path.join(outputDir, 'summary.txt') + logger.info("A summary of the scan results can be found in %s" % + scanSummaryFile) + printScanSummary(outputDict, scanSummaryFile) # Remove summary log from logger logger.removeHandler(fileLog) fileLog.close() diff --git a/smodels/matching/theoryPrediction.py b/smodels/matching/theoryPrediction.py index 563e243f4..4f3d8c0c5 100644 --- a/smodels/matching/theoryPrediction.py +++ b/smodels/matching/theoryPrediction.py @@ -368,11 +368,12 @@ def computeStatistics(self, expected=False): # Compute likelihoods and related parameters: llhdDict = self.statsComputer.get_five_values(expected = expected, return_nll = True ) - self.cachedObjs[expected]["nll"] = llhdDict["lbsm"] - self.cachedObjs[expected]["nll_sm"] = llhdDict["lsm"] - self.cachedObjs[expected]["nllmax"] = llhdDict["lmax"] - self.cachedObjs[expected]["muhat"] = llhdDict["muhat"] - self.cachedObjs[expected]["sigma_mu"] = llhdDict["sigma_mu"] + if llhdDict not in [ None, {} ]: + self.cachedObjs[expected]["nll"] = llhdDict["lbsm"] + self.cachedObjs[expected]["nll_sm"] = llhdDict["lsm"] + self.cachedObjs[expected]["nllmax"] = llhdDict["lmax"] + self.cachedObjs[expected]["muhat"] = llhdDict["muhat"] + self.cachedObjs[expected]["sigma_mu"] = llhdDict["sigma_mu"] class TheoryPredictionsCombiner(TheoryPrediction): @@ -691,7 +692,7 @@ def theoryPredictionsFor(database : Database, smsTopDict : Dict, for theoPred in expResults: theoPred.expResult = expResult theoPred.deltas_rel = deltas_rel - if not isinstance(theoPred.dataset,CombinedDataSet) and "CR" in theoPred.dataset.dataInfo.dataId: # Individual CRs shouldn't give results + if not isinstance(theoPred.dataset,CombinedDataSet) and not theoPred.dataset.dataInfo.dataId is None and "CR" in theoPred.dataset.dataInfo.dataId: # Individual CRs shouldn't give results theoPred.upperLimit = None else: theoPred.upperLimit = theoPred.getUpperLimit() diff --git a/smodels/statistics/pyhfInterface.py b/smodels/statistics/pyhfInterface.py index cadc0de8c..395fa45c9 100755 --- a/smodels/statistics/pyhfInterface.py +++ b/smodels/statistics/pyhfInterface.py @@ -96,6 +96,8 @@ def __init__(self, nsignals, inputJsons, jsonFiles=None, includeCRs=False, signa self.cached_likelihoods = {} ## cache of likelihoods (actually twice_nlls) self.cached_lmaxes = {} # cache of lmaxes (actually twice_nlls) self.cachedULs = {False: {}, True: {}, "posteriori": {}} + if jsonFiles is None: # If no name has been provided for the json file(s) and the channels, use fake ones + jsonFiles = dict( zip( [ "dummy%d" % i for i in range(len(inputJsons)) ], [ "" for i in range(len(inputJsons)) ] ) ) self.jsonFiles = jsonFiles self.includeCRs = includeCRs self.signalUncertainty = signalUncertainty @@ -148,7 +150,7 @@ def getWSInfo(self): if "CR" in ch["name"]: nbCRinWS += 1 if nbCRwithEM and nbCRwithEM != nbCRinWS: - logger.warning(f"Number of CRs in workspace: {nbCRwithEM} but number of CRs with EM: {nbCRwithEM}. Signal in CRs will not be patched.") + logger.warning(f"Number of CRs in workspace: {nbCRinWS} but number of CRs with EM: {nbCRwithEM}. Signal in CRs will not be patched.") if nbCRwithEM != 0 and not self.includeCRs: logger.warning("EM in CRs but includeCRs == False. Signal in CRs will not be patched.") for i_ch, ch in enumerate(ws["channels"]): diff --git a/unittests/timeout.ini b/unittests/timeout.ini index c31285447..a969a24dc 100644 --- a/unittests/timeout.ini +++ b/unittests/timeout.ini @@ -9,6 +9,8 @@ sigmacut = 0.03 minmassgap = 5.0 maxcond = 0.2 ncpus = 1 +[particles] +model=share.models.mssm [database] path = official analyses = all