# Set up the path to SModelS installation folder
import sys; sys.path.append("."); import smodels_paths
from smodels.tools import runtime
#Define your model (list of rEven and rOdd particles)
runtime.modelFile = 'smodels.share.models.mssm'
from smodels import particlesLoader
from smodels.theory import decomposer
from smodels.tools.physicsUnits import fb, GeV, TeV
from smodels.theory.theoryPrediction import theoryPredictionsFor
from smodels.experiment.databaseObj import Database
from smodels.tools import coverage
from smodels.tools.smodelsLogging import setLogLevel
from smodels.particlesLoader import BSMList
from smodels.share.models.SMparticles import SMList
from smodels.theory.model import Model
setLogLevel("info")
model = Model(BSMparticles=BSMList, SMparticles=SMList)
slhafile = 'inputFiles/slha/lightEWinos.slha'
model.updateParticles ( inputFile = slhafile )
INFO in model.updateParticles() in 385: Loaded 62 BSM particles
# Set main options for decomposition
sigmacut = 0.01 * fb
mingap = 5. * GeV
# Decompose model (use slhaDecomposer for SLHA input or lheDecomposer for LHE input)
toplist = decomposer.decompose(model, sigmacut, doCompress=True, doInvisible=True, minmassgap=mingap)
# Access basic information from decomposition, using the topology list and topology objects:
print( "\n Decomposition Results: " )
print( "\t Total number of topologies: %i " %len(toplist) )
nel = sum([len(top.elementList) for top in toplist])
print( "\t Total number of elements = %i " %nel )
Decomposition Results: Total number of topologies: 51 Total number of elements = 14985
# Set the path to the database
database = Database("official")
# Load the experimental results to be used.
# In this case, all results are employed.
listOfExpRes = database.getExpResults( analysisIDs = [ "ATLAS-SUSY-2015-06" ])
# Print basic information about the results loaded.
# Count the number of loaded UL and EM experimental results:
nUL, nEM = 0, 0
for exp in listOfExpRes:
# expType = exp.getValuesFor('dataType')[0]
expType = exp.datasets[0].dataInfo.dataType
if expType == 'upperLimit':
nUL += 1
elif expType == 'efficiencyMap':
nEM += 1
# print("\n Loaded Database with %i UL results and %i EM results " %(nUL,nEM))
INFO in databaseObj.loadBinaryFile() in 496: loading binary db file /home/lessa/.cache/smodels/official220.pcl format version 214 INFO in databaseObj.loadBinaryFile() in 503: Loaded database from /home/lessa/.cache/smodels/official220.pcl in 1.4 secs.
# Compute the theory predictions for each experimental result and print them:
print("\n Theory Predictions and Constraints:")
rmax = 0.
bestResult = None
for expResult in listOfExpRes:
predictions = theoryPredictionsFor(expResult, toplist)
if not predictions: continue # Skip if there are no constraints from this result
print('\n %s (%i TeV)' %(expResult.globalInfo.id,expResult.globalInfo.sqrts.asNumber(TeV)))
for theoryPrediction in predictions:
dataset = theoryPrediction.dataset
datasetID = theoryPrediction.dataId()
mass = theoryPrediction.mass
txnames = [str(txname) for txname in theoryPrediction.txnames]
PIDs = theoryPrediction.PIDs
print( "------------------------" )
print( "TxNames = ",txnames )
print( "Theory Prediction = ",theoryPrediction.xsection.value ) #Signal cross section
# Get the corresponding upper limit:
print( "UL for theory prediction = ",theoryPrediction.upperLimit )
# Compute the r-value
r = theoryPrediction.getRValue()
print( "r = ",r )
#Compute likelihhod and chi^2 for EM-type results:
if theoryPrediction.dataType() == 'efficiencyMap':
theoryPrediction.computeStatistics()
print('L_BSM, L_SM, L_max =', theoryPrediction.likelihood(), theoryPrediction.lsm(), theoryPrediction.lmax() )
if r > rmax:
rmax = r
bestResult = expResult.globalInfo.id
# Print the most constraining experimental result
print( "\nThe largest r-value (theory/upper limit ratio) is ",rmax )
if rmax > 1.:
print( "(The input model is likely excluded by %s)" %bestResult )
else:
print( "(The input model is not excluded by the simplified model results)" )
Theory Predictions and Constraints: ATLAS-SUSY-2015-06 (13 TeV) ------------------------ TxNames = ['T1', 'T2'] Theory Prediction = 5.28E-06 [pb] UL for theory prediction = 1.79E+00 [fb] r = 0.0029506296753791764 L_BSM, L_SM, L_max = 0.007168383173308842 0.007214985691269061 0.007214985691269061 The largest r-value (theory/upper limit ratio) is 0.0029506296753791764 (The input model is not excluded by the simplified model results)
#Find out missing topologies for sqrts=8*TeV:
uncovered = coverage.Uncovered(toplist,sqrts=8.*TeV)
#First sort coverage groups by label
groups = sorted(uncovered.groups[:], key = lambda g: g.label)
#Print uncovered cross-sections:
for group in groups:
print("\nTotal cross-section for %s (fb): %10.3E\n" %(group.description,group.getTotalXSec()))
missingTopos = uncovered.getGroup('missing (prompt)')
#Print some of the missing topologies:
if missingTopos.generalElements:
print('Missing topologies (up to 3):' )
for genEl in missingTopos.generalElements[:3]:
print('Element:', genEl)
print('\tcross-section (fb):', genEl.missingX)
else:
print("No missing topologies found\n")
missingDisplaced = uncovered.getGroup('missing (displaced)')
#Print elements with displaced decays:
if missingDisplaced.generalElements:
print('\nElements with displaced vertices (up to 2):' )
for genEl in missingDisplaced.generalElements[:2]:
print('Element:', genEl)
print('\tcross-section (fb):', genEl.missingX)
else:
print("\nNo displaced decays")
Total cross-section for missing topologies (fb): 3.852E+03 Total cross-section for missing topologies with displaced decays (fb): 0.000E+00 Total cross-section for missing topologies with prompt decays (fb): 3.852E+03 Total cross-section for topologies outside the grid (fb): 1.530E+03 Missing topologies (up to 3): Element: [[[jet,jet]],[[l,nu]]] (MET,MET) cross-section (fb): 644.0092445884675 Element: [[[jet,jet]],[[nu,ta]]] (MET,MET) cross-section (fb): 321.1539651599444 Element: [[[jet,jet]],[[b,b]]] (MET,MET) cross-section (fb): 274.43218534400495 No displaced decays