# Set up the path to SModelS installation folder
import sys; sys.path.append("."); import smodels_paths
from smodels.tools.simplifiedLikelihoods import LikelihoodComputer, Data
# If the number of observed events, the number of expected background events,
# its error and the number of signal events and its error are known, the likelihood
# for the signal (assuming a truncated gaussian distribution for the background and signal uncertainties)
# can be computed as:
m=Data ( observed=5, backgrounds=4.2, covariance=0.71**2, third_moment=None, nsignal=.1 )
comp=LikelihoodComputer ( m )
print ('likelihood=',comp.likelihood(nsig = 10. ) )
print ('chi2=',comp.chi2( nsig=10.))
likelihood= 0.002043162995765441 chi2= 7.753011557803882
# In most cases one wants to compute the likelihood and chi2 for a given theory prediction computed by SModelS.
# Below we generate theory predictions and compute the likelihood and chi2 values for them
# First we import those parts of smodels that are needed for this exercise
#(We will assume the input is a SLHA file. For LHE files, use the lheDecomposer instead)
from smodels.share.models.mssm import BSMList
from smodels.share.models.SMparticles import SMList
from smodels.theory.model import Model
from smodels.theory import decomposer
from smodels.installation import installDirectory
from smodels.tools.physicsUnits import fb, GeV
from smodels.theory.theoryPrediction import theoryPredictionsFor
from smodels.experiment.databaseObj import Database
# Define the SLHA input file name
filename="inputFiles/slha/gluino_squarks.slha"
model = Model(BSMparticles = BSMList, SMparticles = SMList)
model.updateParticles(inputFile=filename)
# Load the database, do the decomposition and compute theory predictions:
# (Look at the theory predictions HowTo to learn how to compute theory predictions)
database = Database("official")
expResults = database.getExpResults(analysisIDs = [ "ATLAS-SUSY-2018-31"] )
topList = decomposer.decompose(model, sigmacut = 0.03 * fb, doCompress=True, doInvisible=True,minmassgap = 5* GeV)
allThPredictions = [theoryPredictionsFor(exp, topList) for exp in expResults]
# For each theory prediction, compute the corresponding likelihood and chi2 values
# (This is only possible for efficiency map-type results):
for i,thPreds in enumerate(allThPredictions):
if not thPreds: continue #skip results with no predictions
expID = expResults[i].globalInfo.id
dataType = expResults[i].getValuesFor('dataType')[0]
for theoryPred in thPreds:
#Compute the likelihood and chi2:
theoryPred.computeStatistics()
print ("\nExperimental Result: %s (%s-type)" %(expID,dataType)) #Result ID and type
print ("Theory prediction xsec = ",theoryPred.xsection.value) #Signal xsection*efficiency*BR
if dataType == 'efficiencyMap':
theoryPred.computeStatistics()
print('L_BSM, L_SM, L_max =', theoryPred.likelihood(), theoryPred.lsm(), theoryPred.lmax() )
else:
print ("(likelihood not available)")
Experimental Result: ATLAS-SUSY-2018-31 (upperLimit-type) Theory prediction xsec = 2.87E-04 [pb] (likelihood not available) Experimental Result: ATLAS-SUSY-2018-31 (efficiencyMap-type) Theory prediction xsec = 4.14E-06 [pb] L_BSM, L_SM, L_max = 0.01984154527455616 0.023221236676880603 0.023221236677540332