Source code for decomposition.theorySMS

"""
.. module:: theorySMS
   :synopsis: This is a class for describing Simplified Model Topologies
              used for decomposing BSM models.

.. moduleauthor:: Andre Lessa <lessa.a.p@gmail.com>
"""


from smodels.decomposition.exceptions import SModelSDecompositionError as SModelSError
from smodels.base.genericSMS import GenericSMS
from smodels.base.particle import Particle

[docs]class TheorySMS(GenericSMS): """ A class for describing Simplified Model Topologies generated by the decompostion of full BSM models. """ def __init__(self): """ Initialize basic attributes. """ GenericSMS.__init__(self) # Production cross-section: self.prodXSec = None # Total branching ratio (product of BRs) self.decayBRs = 1.0 # Maximum weight: self.maxWeight = None # List of SMS topologies which could have generated self (such as during compression) self.ancestors = [self] self.smsID = 0 # SMS identifier # Type of analyses which have SMS matching self: self.coveredBy = set() # Type of analyses which have SMS matching self and for # which the physical parameters are covered self.testedBy = set() def __cmp__(self,other): """ Compare self to other based on the compareTo method. :parameter other: TheorySMS object :return: 0, if objects are equal, -1 if self < other, 1 if other > sekf """ return self.compareTo(other) def __lt__(self, other): return self.__cmp__(other) == -1 def __gt__(self, other): return self.__cmp__(other) == 1 def __eq__(self, other): return self.__cmp__(other) == 0 def __ne__(self, other): return self.__cmp__(other) != 0 def __hash__(self): return object.__hash__(self) def __add__(self, other): """ Combines the equivalent nodes (add particles) and the weightLists. Can only be done if self and other have the same topology and ordering. :param other: TheorySMS object :return: new SMS with the combined particles (TheorySMS object) """ if not isinstance(other,TheorySMS): raise SModelSError("Can not compare TheorySMS and %s" %str(type(other))) elif self.canonName != other.canonName: raise SModelSError("Can not add elements with distinct topologies") # Make a copy newSMS = self.copy() # Add nodes from other newSMS.addNodesFrom(other) # Add other attributes newSMS.prodXSec = self.prodXSec + other.prodXSec newSMS.ancestors = self.ancestors[:] + other.ancestors[:] newSMS.weightList = self.weightList + other.weightList newSMS.maxWeight = self.maxWeight + other.maxWeight # Decay BRs can no longer be properly defined: newSMS.decayBRs = None return newSMS
[docs] def setGlobalProperties(self,sort=True,canonName=True,weight=True): """ Compute and set global properties for the SMS (sort, renumber nodes, compute the canonical name and its total weight). Should only be called once the SMS will no longer be modified. :parameter canonName: If True, compute its canonical name :parameter sort: If True, sort the SMS :parameter weight: If True, compute its total weight """ if canonName: self._canonName = self.computeCanonName() if sort: self.sort(force=True) if weight: self.weightList = self.computeWeightList()
[docs] def compareTo(self, other): """ Compare self to other. If the SMS are not sorted, sort them and then do a direct comparison of each node with the same nodeIndex. :param other: SMS object to be compared against self :return: 0, if objects are equal, -1 if self < other, 1 if other > sekf """ if not isinstance(other,TheorySMS): raise SModelSError("Can not compare TheorySMS and %s" %str(type(other))) # Make sure the SMS are sorted # (if it has already been sorted, it does nothing) self.sort() other.sort() cmp = self.compareSubTrees(other,self.rootIndex,other.rootIndex) return cmp
[docs] def copy(self, emptyNodes=False): """ Returns a shallow copy of self. :param emptyNodes: If True, does not copy any of the nodes from self. :return: TheorySMS object """ newSMS = TheorySMS() newSMS.maxWeight = self.maxWeight newSMS.prodXSec = self.prodXSec newSMS.decayBRs = self.decayBRs if hasattr(self,'weightList'): newSMS.weightList = self.weightList.copy() newSMS._sorted = self._sorted newSMS.ancestors = self.ancestors[:] newSMS.smsID = self.smsID newSMS.coveredBy = set(list(self.coveredBy)[:]) newSMS.testedBy = set(list(self.testedBy)[:]) # Set nodes: if not emptyNodes: nodesObjDict = {n : node for n,node in zip(self.nodeIndices,self.nodes)} newSMS.copyTreeFrom(self, nodesObjDict) return newSMS
[docs] def addNodesFrom(self, other): """ Combines the nodes (add particles) in equivalent nodes in each trees. Can only be done if the trees have the same topology and ordering. :param other: TheorySMS object """ if other.canonName != self.canonName: raise SModelSError("Can not add trees with distinct topologies") newMapping = {} for n in self.nodeIndices: newMapping[n] = self.indexToNode(n) + other.indexToNode(n) # Just change the nodeIndex -> node mapping: self.updateNodeObjects(nodeObjectDict=newMapping)
[docs] def attachDecay(self, motherIndex, decayNodes, br=1.0, copy=True): """ Attaches a decay to self. If copy = True, returns a copy of self with the decay attached. :param motherIndex: Node index for the mother to which the decay should be added. :param decayNodes: Particle nodes for the daughters. :br: Branching ratio value for the decay :param copy: if True, return a copy of self, with the decay attached. :return: new tree with the other composed with self. """ if self.out_degree(motherIndex) != 0: raise SModelSError("Can not attach decay to an intermediate node.") motherNode = decayNodes[0] daughterNodes = decayNodes[1] if copy: newSMS = self.copy() else: newSMS = self # Update maximum weight newSMS.maxWeight = self.maxWeight*br # Update BRs: newSMS.decayBRs = self.decayBRs*br # Update mother node: newSMS.updateNodeObjects({motherIndex : motherNode}) # Add daughter nodes and edges: for d in daughterNodes: dIndex = newSMS.add_node(d) newSMS.add_edge(motherIndex, dIndex) # The tree is no longer sorted newSMS._sorted = False return newSMS
[docs] def computeWeightList(self): """ Computes the SMS weight (production cross-section*BRs) using maxWeight and the production cross-section. :return: CrossSectionList object """ prodXSec = self.prodXSec brs = self.decayBRs return prodXSec*brs
def _getAncestorsDict(self, igen=0): """ Returns a dictionary with all the ancestors of the SMS. The dictionary keys are integers labeling the generation (number of generations away from self) and the values are a list of SMS objects (ancestors) for that generation. igen is used as the counter for the initial generation. The output is also stored in self._ancestorsDict for future use. :param igen: Auxiliary integer indicating to which generation self belongs. :return: Dictionary with generation index as key and ancestors as values (e.g. {igen+1 : [mother1, mother2], igen+2 : [grandmother1,..],...}) """ ancestorsDict = {igen + 1: []} for ancestor in self.ancestors: if ancestor is self: continue ancestorsDict[igen + 1].append(ancestor) for jgen, smsList in ancestor._getAncestorsDict(igen + 1).items(): if jgen not in ancestorsDict: ancestorsDict[jgen] = [] ancestorsDict[jgen] += smsList return ancestorsDict
[docs] def getAncestors(self): """ Get a list of all the ancestors of self. The list is ordered so the mothers appear first, then the grandmother, then the grandgrandmothers,... :return: A list of SMS objects containing all the ancestors sorted by generation. """ ancestorsDict = self._getAncestorsDict() allAncestors = [] for igen in sorted(ancestorsDict.keys()): allAncestors += ancestorsDict[igen] return allAncestors
[docs] def isRelatedTo(self, other): """ Checks if self has other as an ancestor or they share ancestors. Returns True if self and other have at least one ancestor in common, otherwise returns False. :return: True/False """ if self is other: return True # Get ids for all ancestors of self, including self ancestorsA = set([id(self)]+[id(sms) for sms in self.getAncestors()]) # Get ids for all ancestors of other, including other ancestorsB = set([id(other)]+[id(sms) for sms in other.getAncestors()]) # Check if any ids intersect if ancestorsA.intersection(ancestorsB): return True else: return False
[docs] def setTestedBy(self, resultType): """ Tag the sms as tested by the result type. It also recursively tags all of its ancestors. :param resultType: String describing the type of result (e.g. 'prompt', 'displaced') """ self.testedBy.add(resultType) for ancestor in self.getAncestors(): ancestor.testedBy.add(resultType)
[docs] def setCoveredBy(self, resultType): """ Tag the sms as covered by the result type (it is tested AND its parameters are within the result grid). It also recursively tags all of its ancestors. :param resultType: String describing the type of result (e.g. 'prompt', 'displaced') """ self.coveredBy.add(resultType) for ancestor in self.getAncestors(): ancestor.coveredBy.add(resultType)
[docs] def compress(self, doCompress, doInvisible, minmassgap): """ Keep compressing the original SMS and the derived ones till they can be compressed no more. :parameter doCompress: if True, perform mass compression :parameter doInvisible: if True, perform invisible compression :parameter minmassgap: value (in GeV) of the maximum mass difference for compression (if mass difference < minmassgap, perform mass compression) :returns: list with the compressed SMS (TheorySMS objects) """ if not doCompress and not doInvisible: return [] added = True newList = [self] # Keep compressing the new topologies generated so far until no new # compressions can happen: while added: added = False # Check for mass compressed topologies if doCompress: for sms in newList: newSMS = sms.massCompress(minmassgap) # Avoids double counting # (elements sharing the same parent are removed during clustering) if (newSMS is not None) and (newSMS not in newList): newList.append(newSMS) added = True # Check for invisible compressed topologies (look for effective # LSP, such as LSP + neutrino = LSP') if doInvisible: for sms in newList: newSMS = sms.invisibleCompress() # Avoids double counting # (elements sharing the same parent are removed during clustering) if (newSMS is not None) and (newSMS not in newList): newList.append(newSMS) added = True newList.pop(0) # Remove original element return newList
[docs] def massCompress(self, minmassgap): """ Perform mass compression. It is only done if there is one decay of type BSM_i -> BSM_j + (any number of SM), where mass(BSM_i) - mass(BSM_j) < minmassgap AND BSM_i have a prompt decay. :parameter minmassgap: value (in GeV) of the maximum mass difference for compression (if mass difference < minmassgap -> perform mass compression) :returns: compressed copy of self, if two masses in the SMS can be considered degenerate; None, if compression is not possible; """ # Start with self and make a copy later if needed newSMSList = [self] # Dummy list to store newSMS # Loop over nodes from root to leaves: for momIndex, daughterIndices in self.genIndexIterator(): newSMS = newSMSList[0] if momIndex is newSMS.rootIndex: # Skip primary vertex continue if momIndex not in newSMS.nodeIndices: # In case the mother has been removed by compression continue # Convert node index to node object mom = newSMS.indexToNode(momIndex) if mom.isSM: # Only compress BSM states continue bsmDaughter = [] smDaughters = [] for dIndex in daughterIndices: # Convert to node object d = newSMS.indexToNode(dIndex) # Split daughters into final states SM and others (BSM) if hasattr(d, 'isSM') and d.isSM and newSMS.out_degree(dIndex) == 0: smDaughters.append(dIndex) else: # Compute mass different between BSM daughter and mom massDiff = mom.mass - d.mass bsmDaughter.append(dIndex) # Skip decays to multiple BSM particles or to SM particles only if len(bsmDaughter) != 1: continue bsmDaughter = bsmDaughter[0] # Skip if mass difference is above minimum or if the parent is long-lived if massDiff >= minmassgap or not mom.particle.isPrompt(): continue # If making first compression, copy self: if newSMS is self: newSMS = self.copy() newSMS.ancestors = [self] newSMSList[0] = newSMS # Get grandmother: gMomIndex = newSMS.parentIndex(momIndex) # Remove mother and all SM daughters: removeIndices = smDaughters + [momIndex] newSMS.remove_nodes_from(removeIndices) # Attach BSM daughter to grandmother: newSMS.add_edge(gMomIndex, bsmDaughter) newSMS = newSMSList[0] # If no compression was made, return None if newSMS is self: return None # Set the compressed topology weight as the original weight # (it can not longer be computed from its nodes) newSMS.weightList = self.weightList # Recompute the global properties (except for the weightList) # and sort the new SMS newSMS.setGlobalProperties(weight=False) return newSMS
[docs] def invisibleCompress(self): """ Perform invisible compression. It is done if there is a decay of the type BSM > xxx if all daughters are leaves and can be considered MET and the mother can be considered MET OR decays promptly. :returns: compressed copy of the SMS, if element ends with invisible particles; None, if compression is not possible """ # Start with self and make a copy later if needed newSMSList = [self] # Dummy list to store newSMS while True: newSMS = newSMSList[0] # Loop over nodes: nNodes = newSMS.number_of_nodes() for momIndex, daughterIndices in newSMS.genIndexIterator(): if momIndex == newSMS.rootIndex: # Skip primary vertex continue # Skip node if its daughters are not stable if any(newSMS.out_degree(d) != 0 for d in daughterIndices): continue # Convert indices to node objects: mom = newSMS.indexToNode(momIndex) daughters = newSMS.indexToNode(daughterIndices) # Check if all daughters can be considered MET neutralDaughters = all(d.particle.isMET() for d in daughters) # Check if the mother is MET or prompt: neutralMom = (mom.isMET() or mom.isPrompt()) # If mother and daughters are neutral, remove daughters if (not neutralDaughters) or (not neutralMom): continue # If making first compression, copy self: if newSMS is self: newSMS = self.copy() newSMS.ancestors = [self] newSMSList[0] = newSMS newSMS.remove_nodes_from(daughterIndices) # Replace mother particle by invisible (generic) particle # with its width equal to the maximum width amongst the daughters maxWidth = max([d.totalwidth for d in daughters]) invParticle = Particle(label='inv', mass=mom.mass, eCharge=0, colordim=1, _isInvisible=True, totalwidth=maxWidth, pdg=mom.pdg, isSM=mom.isSM) newmom = mom.copy() newmom.particle = invParticle newSMS.updateNodeObjects({momIndex : newmom}) # For safety break loop since tree structure changed break if newSMS.number_of_nodes() == nNodes: break # No compressions could be made, stop newSMS = newSMSList[0] if newSMS is self: return None # Recompute the global properties (except for the weightList) # and sort the new SMS newSMS.setGlobalProperties(weight=False) return newSMS