1
################################################################################
3
# Copyright (c) 2009 The MadGraph Development team and Contributors
5
# This file is a part of the MadGraph 5 project, an application which
6
# automatically generates Feynman diagrams and matrix elements for arbitrary
7
# high-energy processes in the Standard Model and beyond.
9
# It is subject to the MadGraph license which should accompany this
12
# For more information, please visit: http://madgraph.phys.ucl.ac.be
14
################################################################################
15
"""Definition for the objects used in the decay module.
16
DecayParticle: this object contains all the decay related properties
17
including all the decay verteices and decay channels.
18
This object also has the 'is_stable' label to denote
19
wether this particle is stable.
20
DecayParticleList: this helper class will help to turn Particle
21
in base_objects into DecayParticle.
22
DecayModel: This model contains DecayParticle. Particle in base_objects
23
will be automatically converted into DecayParticle either
24
during the initialization or when a ParticleList is set as
25
the particles of DecayModel through set function.
26
This model can search all the decay_vertexlist for all
27
its particles at one time. The function 'find_stable_particles'
28
and 'find_decay_groups_general' will find stable particles
29
automatically from interaction and mass without any input.
30
Channel: A diagram object specialized for decay process. This includes
31
several helper functions for channel generations and the functions
32
to calculate the approximate decay width.
33
ChannelList: A list of channels.
35
Users can run DecayParticle.find_channels(final_state_number, model)
36
to get the channels with final state number they request. Or they
37
may run DecayModel.find_all_channels(final_state_number) to search
38
the channels for all particles in the given model."""
51
import madgraph.core.base_objects as base_objects
52
import madgraph.core.diagram_generation as diagram_generation
53
import madgraph.core.color_amp as color_amp
54
import madgraph.core.color_algebra as color
55
import madgraph.core.helas_objects as helas_objects
56
import models.import_ufo as import_ufo
57
from madgraph import MadGraph5Error, MG5DIR
60
#===============================================================================
61
# Logger for decay_module
62
#===============================================================================
64
logger = logging.getLogger('decay.decay_objects')
67
#===============================================================================
69
#===============================================================================
70
class DecayParticle(base_objects.Particle):
71
"""DecayParticle includes the decay vertices and channels for this
72
particle. The label 'is_stable' specifies whether the particle
73
is stable. The function find_channels will launch all necessary
74
functions it needs. If users try to find channels with more final
75
state particles, they can run find_channels_nextlevel to get the
76
channels in next level.
78
sorted_keys = ['name', 'antiname', 'spin', 'color',
79
'charge', 'mass', 'width', 'pdg_code',
80
'texname', 'antitexname', 'line', 'propagating',
81
'is_part', 'self_antipart', 'is_stable',
82
'decay_vertexlist', 'decay_channels', 'apx_decaywidth',
83
'apx_decaywidth_err', 'decay_amplitudes', '2body_massdiff'
87
def __init__(self, init_dict={}, force=False):
88
"""Creates a new particle object. If a dictionary is given, tries to
89
use it to give values to properties.
90
A repeated assignment is to avoid error of inconsistent pdg_code and
91
initial particle id of vertex"""
96
assert isinstance(init_dict, dict), \
97
"Argument %s is not a dictionary" % repr(init_dict)
99
#To avoid the pdg_code remain 0 and then induce the error when
102
pid = init_dict['pdg_code']
103
self.set('pdg_code', pid)
107
for item in init_dict.keys():
109
self.set(item, init_dict[item], force)
114
def default_setup(self):
115
"""Default values for all properties"""
117
super(DecayParticle, self).default_setup()
119
self['is_stable'] = False
120
#log of the find_vertexlist history
121
self['vertexlist_found'] = False
122
self['max_vertexorder'] = 0
124
# The decay_vertexlist is a dictionary with vertex list as items and
125
# final state particles and on shell condition as keys.
126
# decay_channels corresponds to one diagram for each channel,
127
# while decay_amplitudes are a series of diagrams with the same
128
# initial and final states.
130
self['decay_vertexlist'] = {}
131
self['decay_channels'] = {}
132
self['decay_amplitudes'] = {}
133
self['apx_decaywidth'] = 0.
134
self['apx_decaywidth_err'] = 0.
135
self['2body_massdiff'] = 0.
138
""" Evaluate some special properties first if the user request. """
140
if name == 'apx_decaywidth' \
142
and not self['is_stable']:
143
self.update_decay_attributes(True, False, True)
145
elif name == 'apx_decaywidth_err' and not self[name]:
146
self.estimate_width_error()
149
# call the mother routine
150
return DecayParticle.__bases__[0].get(self, name)
153
def check_vertex_condition(self, partnum, onshell,
154
value = base_objects.VertexList(), model = {}):
155
"""Check the validity of decay condition, including,
156
partnum: final state particle number,
157
onshell: on-shell condition,
158
value : the assign vertexlist
159
model : the specific model"""
161
#Check if partnum is an integer.
162
if not isinstance(partnum, int):
163
raise self.PhysicsObjectError, \
164
"Final particle number %s must be an integer." % str(partnum)
166
#Check if onshell condition is Boolean number.
167
if not isinstance(onshell, bool):
168
raise self.PhysicsObjectError, \
169
"%s must be a Boolean number" % str(onshell)
171
#Check if the value is a Vertexlist(in base_objects) or a list of vertex
172
if not isinstance(value, base_objects.VertexList):
173
raise self.PhysicsObjectError, \
174
"%s must be VertexList type." % str(value)
176
#Check if the model is a valid object.
177
if not (isinstance(model, base_objects.Model) or model == {}):
178
raise self.PhysicsObjectError, \
179
"%s must be a Model" % str(model)
181
#Check if the mother particle is in the 'model'
182
if not (self.get_pdg_code() in model.get('particle_dict').keys()):
183
raise self.PhysicsObjectError, \
184
"The model, %s, does not contain particle %s." \
185
%(model.get('name'), self.get_name())
188
def check_vertexlist(self, partnum, onshell, value, model = {}):
189
"""Check if the all the vertex in the vertexlist satisfy the following
190
conditions. If so, return true; if not, raise error messages.
192
1. There is an appropriate leg for initial particle.
193
2. The number of final particles equals to partnum.
194
3. If model is not None, check the onshell condition and
195
the initial particle id is the same as calling particle.
197
#Check the validity of arguments first
198
self.check_vertex_condition(partnum, onshell, value, model)
200
#Determine the number of final particles.
201
#Find all the possible initial particle(s).
202
#Check onshell condition if the model is given.
204
if (abs(eval(self.get('mass')) == 0.)) and (len(value) != 0):
205
raise self.PhysicsObjectError, \
206
"Massless particle %s cannot decay." % self['name']
209
# Reset the number of initial/final particles,
210
# initial particle id, and total and initial mass
216
# Calculate the total mass
217
total_mass = sum([abs(eval(model.get_particle(l['id']).get('mass'))) for l in vert['legs']])
218
ini_mass = abs(eval(self.get('mass')))
220
# Check the onshell condition
221
if (ini_mass.real > (total_mass.real - ini_mass.real))!=onshell:
222
raise self.PhysicsObjectError, \
223
"The on-shell condition is not satisfied."
225
for leg in vert.get('legs'):
226
# Check if all legs are label by true
227
if not leg.get('state'):
228
raise self.PhysicsObjectError, \
229
"The state of leg should all be true"
231
# Identify the initial particle
232
if leg.get('id') == self.get_pdg_code():
233
# Double anti particle is also radiation
237
elif leg.get('id') == self.get_anti_pdg_code() and \
238
not self.get('self_antipart'):
241
# Calculate the final particle number
242
num_final = len(vert.get('legs'))-num_ini
244
# Check the number of final particles is the same as partnum
245
if num_final != partnum:
246
raise self.PhysicsObjectError, \
247
"The vertex is a %s -body decay, not a %s -body one."\
248
% (str(num_final), str(partnum))
250
# Check if there is any appropriate leg as initial particle.
252
raise self.PhysicsObjectError, \
253
"There is no leg satisfied the mother particle %s"\
254
% str(self.get_pdg_code())
256
# Check if the vertex is radiation
258
raise self.PhysicsObjectError, \
259
"The vertex is radiactive for mother particle %s"\
260
% str(self.get_pdg_code())
264
def check_channels(self, partnum, onshell, value = [], model = {}):
265
"""Check the validity of decay channel condition, including,
266
partnum: final state particle number,
267
onshell: on-shell condition,
268
value : the assign channel list, all channels in it must
269
be consistent to the given partnum and onshell.
270
model : the specific model."""
272
# Check if partnum is an integer.
273
if not isinstance(partnum, int):
274
raise self.PhysicsObjectError, \
275
"Final particle number %s must be an integer." % str(partnum)
277
# Check if onshell condition is Boolean number.
278
if not isinstance(onshell, bool):
279
raise self.PhysicsObjectError, \
280
"%s must be a Boolean number" % str(onshell)
282
# Check if the value is a ChannelList
283
if (not isinstance(value, ChannelList) and value):
284
raise self.PhysicsObjectError, \
285
"%s must be ChannelList type." % str(value)
288
# Check if the partnum is correct for all channels in value
289
if any(ch for ch in value if \
290
len(ch.get_final_legs()) != partnum):
291
raise self.PhysicsObjectError, \
292
"The final particle number of channel should be %d."\
295
# Check if the initial particle in all channels are as self.
296
if any(ch for ch in value if \
297
abs(ch.get_initial_id()) != abs(self.get('pdg_code'))):
298
raise self.PhysicsObjectError, \
299
"The initial particle is not %d or its antipart." \
300
% self.get('pdg_code')
302
# Check if the onshell condition is right
303
if not (isinstance(model, base_objects.Model) or model == {}):
304
raise self.PhysicsObjectError, \
305
"%s must be a Model" % str(model)
307
# Check if the mother particle is in the 'model'
308
if not (self.get_pdg_code() in model.get('particle_dict').keys()):
309
raise self.PhysicsObjectError, \
310
"The model, %s, does not contain particle %s." \
311
%(model.get('name'), self.get_name())
312
if any([ch for ch in value if onshell != ch.get_onshell(model)]):
313
raise self.PhysicsObjectError, \
314
"The onshell condition is not consistent with the model."
318
def check_amplitudes(self, partnum, value):
319
"""Check the validity of DecayAmplitudes condition, including,
320
partnum: final state particle number,
321
value : the assign amplitudelist, all amplitudes in it must
322
be consistent to the processess they hold.
324
# Check if partnum is an integer.
325
if not isinstance(partnum, int):
326
raise self.PhysicsObjectError, \
327
"Final particle number %s must be an integer." % str(partnum)
329
# Check if the value is a DecayAmplitudeList
330
if (not isinstance(value, DecayAmplitudeList) and value):
331
raise self.PhysicsObjectError, \
332
"%s must be DecayAmplitudeList type." % str(value)
336
def filter(self, name, value):
337
"""Filter for valid DecayParticle vertexlist."""
339
if name == 'decay_vertexlist' or name == 'decay_channels':
340
#Value must be a dictionary.
341
if not isinstance(value, dict):
342
raise self.PhysicsObjectError, \
343
"Decay_vertexlist or decay_channels %s must be a dictionary." % str(value)
345
# key must be two element tuple
346
for key, item in value.items():
347
if not isinstance(key, tuple):
348
raise self.PhysicsObjectError,\
349
"Key %s must be a tuple." % str(key)
352
raise self.PhysicsObjectError,\
353
"Key %s must have two elements." % str(key)
355
if name == 'decay_vertexlist':
356
self.check_vertexlist(key[0], key[1], item)
357
if name == 'decay_channels':
358
self.check_channels(key[0], key[1], item)
360
if name == 'decay_amplitudes':
362
#Value must be a dictionary.
363
if not isinstance(value, dict):
364
raise self.PhysicsObjectError, \
365
"Decay_amplitudes %s must be a dictionary." % str(value)
367
# For each key and item, check them with check_amplitudes
368
for key, item in value.items():
369
self.check_amplitudes(key, item)
371
if name == 'vertexlist_found' or name == 'is_stable':
372
if not isinstance(value, bool):
373
raise self.PhysicsObjectError, \
374
"Propery %s should be Boolean type." % name
376
if name == 'max_vertexorder':
377
if not isinstance(value, int):
378
raise self.PhysicsObjectError, \
379
"Property %s should be int type." % name
381
# Check apx_decaywidth and apx_decaywidth_err
382
if name == 'apx_decaywidth' or name == 'apx_decaywidth_err' \
383
or name == '2body_massdiff':
384
if not isinstance(value, float) and not isinstance(value, int):
385
raise self.PhysicsObjectError, \
386
"Property %s must be float type." % str(value)
388
super(DecayParticle, self).filter(name, value)
392
def reset_decay_attributes(self, reset_width, reset_err, reset_br):
393
""" Depend on the given arguments,
394
reset the apx_decaywidth, apx_decaywidth_err, and
395
branching ratio of the amplitudes in this particle.
396
It is necessary when the channels are changed,
397
e.g. find next level."""
401
self['apx_decaywidth'] = 0.
405
self['apx_decaywidth_err'] = 0.
407
# Reset the branching ratio inside amplitudes
409
for n, amplist in self['decay_amplitudes'].items():
413
def update_decay_attributes(self, reset_width, reset_err, reset_br, model=None):
414
""" This function will update the attributes related to decay width,
415
including total width, branching ratios (of each amplitudes),
417
The arguments specify which attributes needs to be updated.
418
Note that the width of amplitudes will not be changed.
419
If the apx_decaywidth_err needs to be updated, the model
420
must be provided!. """
422
# Reset the related properties
423
self.reset_decay_attributes(reset_width, reset_err, reset_br)
425
# Update the total width first.
426
# (So the decaywidth_err and branching ratios can be calculated with
429
for n, amplist in self['decay_amplitudes'].items():
431
# Do not calculate the br in this moment
432
self['apx_decaywidth'] += amp.get('apx_decaywidth')
434
# Update the apx_decaywidth_err
436
self.estimate_width_error(model)
438
# Update the branching ratio in the end with the updated total width
440
for n, amplist in self['decay_amplitudes'].items():
442
# Reset the br first, so the get function will recalculate
447
def estimate_width_error(self, model=None):
448
""" This function will estimate the width error from the highest order
450
If model is provided, the apx_decaywidth_err of each channel will
451
be (re)calculated. """
453
if self['apx_decaywidth']:
454
final_level = max([k[0] for k, i in self['decay_channels'].items()])
456
# Do not recalculate the apx_decaywidth_nextlevel here
457
err = sum([c.get('apx_decaywidth_nextlevel', model) \
458
for c in self.get_channels(final_level, False)])/ \
459
self['apx_decaywidth']
461
elif self.get('is_stable'):
466
self['apx_decaywidth_err'] = err
470
def decaytable_string(self, format='normal'):
471
""" Output the string for the decay table.
472
If format is 'full', all the channels in the process will be
475
seperator = str('#'*80)
476
output = '\n'+seperator
477
output += str('\n#\n#\tPDG\t\tWIDTH\t\tERROR\n')
478
output += str('DECAY\t%8d\t%.5e %.3e #%s decay\n') \
479
%(self.get('pdg_code'),
480
self.get('apx_decaywidth'),
481
self.get('apx_decaywidth_err'),
484
# Write the decay table from 2-body decay.
487
if n in self.get('decay_amplitudes').keys():
488
# Do not print empty amplitudes
489
if len(self.get_amplitudes(n)):
491
output += '\n#\tBR\tNDA '
492
# ID (ID1, ID2, ID3...)
493
output += ' '.join(['ID%d' %(i+1) for i in range(n)])
498
% self.get_amplitudes(n).decaytable_string(format)
500
# Increase n to nextlevel
505
return output+'\n#\n#'
508
def get_vertexlist(self, partnum ,onshell):
509
"""Return the n-body decay vertexlist.
511
If onshell=false, return the on-shell list and vice versa.
513
#check the validity of arguments
514
self.check_vertex_condition(partnum, onshell)
516
return self.get('decay_vertexlist')[(partnum, onshell)]
518
def set_vertexlist(self, partnum ,onshell, value, model = {}):
519
"""Set the n_body_decay_vertexlist,
521
onshell: True for on-shell decay, and False for off-shell
522
value: the decay_vertexlist that is tried to assign.
523
model: the underlying model for vertexlist
524
Use to check the correctness of on-shell condition.
526
#Check the vertexlist by check_vertexlist
527
#Error is raised (by check_vertexlist) if value is not valid
528
if self.check_vertexlist(partnum, onshell, value, model):
529
self['decay_vertexlist'][(partnum, onshell)] = value
531
def get_max_vertexorder(self):
532
""" Get the max vertex order of this particle"""
533
# Do not include keys without vertexlist in it
534
# Both onshell and offshell are consider
535
if not self['vertexlist_found']:
536
logger.warning("The vertexlist of this particle has not been searched."+"Try find_vertexlist first.")
539
vertnum_list = [k[0] for k in self['decay_vertexlist'].keys() \
540
if self['decay_vertexlist'][k]]
542
self['max_vertexorder'] = max(vertnum_list)
544
self['max_vertexorder'] = 0
546
return self['max_vertexorder']
548
# OBSOLETE function. It is recommended to run the find_vertexlist in
550
def find_vertexlist(self, model, option=False):
551
"""Find the possible decay channel to decay,
552
for both on-shell and off-shell.
553
If option=False (default),
554
do not rewrite the VertexList if it exists.
555
If option=True, rewrite the VertexList anyway.
558
#Raise error if self is not in model.
559
if not (self.get_pdg_code() in model.get('particle_dict').keys()):
560
raise self.PhysicsObjectError, \
561
"The parent particle %s is not in the model %s." \
562
% (self.get('name'), model.get('name'))
564
#Raise error if option is not Boolean value
565
if not isinstance(option, bool):
566
raise self.PhysicsObjectError, \
567
"The option %s must be True or False." % str(option)
569
#If 'vertexlist_found' is true and option is false,
570
#no action is proceed.
571
if self['vertexlist_found'] and not option:
572
return 'The vertexlist has been setup.', \
573
'No action proceeds because of False option.'
575
# Reset the decay vertex before finding
576
self['decay_vertexlist'] = {(2, False) : base_objects.VertexList(),
577
(2, True) : base_objects.VertexList(),
578
(3, False) : base_objects.VertexList(),
579
(3, True) : base_objects.VertexList()}
581
#Set the vertexlist_found at the end
582
self['vertexlist_found'] = True
584
# Do not include the massless and stable particle
585
model.get('stable_particles')
586
if self.get('is_stable'):
589
#Go through each interaction...
590
for temp_int in model.get('interactions'):
591
#Save the particle dictionary (pdg_code & anti_pdg_code to particle)
592
partlist = temp_int.get('particles')
594
#The final particle number = total particle -1
595
partnum = len(partlist)-1
596
#Allow only 2 and 3 body decay
600
#Check if the interaction contains mother particle
601
if model.get_particle(self.get_anti_pdg_code()) in partlist:
603
part_id_list = [p.get('pdg_code') for p in partlist]
604
if (part_id_list.count(self.get('pdg_code'))) > 1:
608
ini_mass = abs(eval(self.get('mass')))
609
vert = base_objects.Vertex()
610
legs = base_objects.LegList()
612
# Setup all the legs and find final_mass
613
for part in partlist:
614
legs.append(base_objects.Leg({'id': part.get_pdg_code()}))
615
total_mass += abs(eval(part.get('mass')))
616
#Initial particle has not been found: ini_found = True
617
if (part == model.get_particle(self.get_anti_pdg_code())):
619
ini_leg.set('id', self.get_pdg_code())
621
#Sort the outgoing leglist for comparison sake (removable!)
623
# Append the initial leg
626
vert.set('id', temp_int.get('id'))
627
vert.set('legs', legs)
628
temp_vertlist = base_objects.VertexList([vert])
630
#Check validity of vertex (removable)
631
"""self.check_vertexlist(partnum,
632
ini_mass > final_mass,
633
temp_vertlist, model)"""
635
#Append current vert to vertexlist
637
self['decay_vertexlist'][(partnum, \
638
ini_mass > (total_mass-ini_mass))].\
641
self['decay_vertexlist'][(partnum, \
642
ini_mass > (total_mass-ini_mass))] \
643
= base_objects.VertexList([vert])
647
def get_channels(self, partnum ,onshell):
648
"""Return the n-body decay channels.
650
If onshell=false, return the on-shell list and vice versa.
652
#check the validity of arguments
653
self.check_channels(partnum, onshell)
654
return self.get('decay_channels')[(partnum, onshell)]
656
def set_channels(self, partnum ,onshell, value, model = {}):
657
"""Set the n_body_decay_vertexlist, value is overloading to both
658
ChannelList and list of Channels (auto-transformation will proceed)
660
onshell: True for on-shell decay, and False for off-shell
661
value: the decay_vertexlist that is tried to assign.
662
model: the underlying model for vertexlist
663
Use to check the correctness of on-shell condition.
665
#Check the vertexlist by check_vertexlist
666
#Error is raised (by check_vertexlist) if value is not valid
667
if isinstance(value, ChannelList):
668
if self.check_channels(partnum, onshell, value, model):
669
self['decay_channels'][(partnum, onshell)] = value
670
elif isinstance(value, list) and \
671
all([isinstance(c, Channel) for c in value]):
672
value_transform = ChannelList(value)
673
if self.check_channels(partnum, onshell, value_transform, model):
674
self['decay_channels'][(partnum, onshell)] = value_transform
676
raise self.PhysicsObjectError, \
677
"The input must be a list of diagrams."
679
def get_max_level(self):
680
""" Get the max channel level that the particle have so far. """
681
# Turn off the logger in get_amplitude temporarily
685
# Look at the amplitudes or channels to find the max_level
686
while self.get_amplitudes(n) or ((n,False) in self['decay_channels'].keys()):
689
# n is the failed value, return n-1.
692
def get_amplitude(self, final_ids):
693
"""Return the amplitude with the given final pids.
694
If no suitable amplitude is found, retun None.
696
if not isinstance(final_ids, list):
697
raise self.PhysicsObjectError,\
698
"The final particle ids %s must be a list of integer." \
701
if any([not isinstance(i, int) for i in final_ids]):
702
raise self.PhysicsObjectError,\
703
"The final particle ids %s must be a list of integer." \
706
# Sort the given id list first
708
# Search in the amplitude list of given final particle number
709
if self.get_amplitudes(len(final_ids)):
710
for amp in self.get_amplitudes(len(final_ids)):
711
ref_list = [l['id'] \
712
for l in amp.get('process').get_final_legs()]
714
if ref_list == final_ids:
719
def get_amplitudes(self, partnum):
720
"""Return the n-body decay amplitudes.
722
If the amplitudes do not exist, return none.
724
#check the validity of arguments
725
if not isinstance(partnum, int):
726
raise self.PhysicsObjectError, \
727
"The particle number %s must be an integer." %str(partnum)
730
return self.get('decay_amplitudes')[partnum]
732
logger.info('The amplitudes of %d for final particle number % d do not exist' % (self['pdg_code'],partnum))
735
def set_amplitudes(self, partnum, value):
736
"""Set the n_body decay_amplitudes.
738
value: the decay_amplitudes that is tried to assign.
741
#Check the value by check_amplitudes
742
if isinstance(value, DecayAmplitudeList):
743
if self.check_amplitudes(partnum, value):
744
self['decay_amplitudes'][partnum] = value
745
elif isinstance(value, list) and \
746
all([isinstance(a, DecayAmplitude) for a in value]):
747
value_transform = DecayAmplitudeList(value)
748
if self.check_amplitudes(partnum, value_transform):
749
self['decay_amplitudes'][partnum] = value_transform
751
raise self.PhysicsObjectError, \
752
"The input must be a list of decay amplitudes."
755
def find_channels(self, partnum, model):
756
""" Function for finding decay channels up to the final particle
757
number given by max_partnum.
758
The branching ratios and err are recalculated in the end.
760
1. Any channel must be a. only one vertex, b. an existing channel
762
2. Given the maxima channel order, the program start looking for
763
2-body decay channels until the channels with the given order.
764
3. For each channel order,
765
a. First looking for any single vertex with such order and
766
construct the channel. Setup the has_idpart property.
767
b. Then finding the channel from off-shell sub-level channels.
768
Note that any further decay of on-shell sub-level channel
769
is not a new channel. For each sub-level channel, go through
770
the final legs and connect vertex with the right order
771
(with the aid of connect_channel_vertex function).
772
c. If the new channel does not exist in 'decay_channels',
773
then if the new channel has no identical particle, append it.
774
If the new channel has identical particle, check if it is not
775
equivalent with the existing channels. Then append it.
778
# Check validity of argument
779
if not isinstance(partnum, int):
780
raise self.PhysicsObjectError, \
781
"Max final particle number %s should be integer." % str(partnum)
782
if not isinstance(model, DecayModel):
783
raise self.PhysicsObjectError, \
784
"The second argument %s should be a DecayModel object." \
786
if not self in model['particles']:
787
raise self.PhysicsObjectError, \
788
"The model %s does not contain particle %s" \
789
% (model.get('name'), self.get('name'))
791
# If vertexlist has not been found before, run model.find_vertexlist
792
if not model['vertexlist_found']:
793
logger.info("Vertexlist of this model has not been searched."+ \
794
"Automatically run the model.find_vertexlist()")
795
model.find_vertexlist()
797
# Find stable particles of this model
798
model.get('stable_particles')
800
# If the channel list exist, return.
801
if (partnum, True) in self['decay_channels'].keys() or \
802
(partnum, False) in self['decay_channels'].keys():
803
logger.info("Particle %s has found channels in %d-body level. " \
804
% (self['name'], partnum) +\
805
"No channel search will not proceed." )
808
# Set empty decay_channels for stable particles
809
if self.get('is_stable'):
810
for num in range(2, partnum+1):
811
self['decay_channels'][(num, True)] = ChannelList()
812
self['decay_channels'][(num, False)] = ChannelList()
813
logger.info("Particle %s is stable. " % self['name'] +
814
"No channel search will not proceed." )
817
# Running the coupling constants
818
model.running_externals(abs(eval(self.get('mass'))))
819
model.running_internals()
821
self['apx_decaywidth'] = 0.
822
# Find channels from 2-body decay to partnum-body decay channels.
823
for clevel in range(2, partnum+1):
824
self.find_channels_nextlevel(model)
826
# Update decay width err and branching ratios, but not the total width
827
self.update_decay_attributes(False, True, True)
829
def find_channels_nextlevel(self, model):
830
""" Find channels from all the existing lower level channels to
831
the channels with one more final particle. It is implemented when
832
the channels lower than clevel are all found. The user could
833
first setup channels by find_channels and then use this function
834
if they want to have high level decay.
835
NOTE that the width of channels are added sucessively into mother
836
particle during the search.
837
NO calculation of branching ratios and width err are performed!
838
For the search of two-body decay channels, the 2body_massdiff
839
is recorded in the end."""
841
# Find the next channel level
843
clevel = max([key[0] for key in self['decay_channels'].keys()])+1
847
# Initialize the item in dictionary
848
self['decay_channels'][(clevel, True)] = ChannelList()
849
self['decay_channels'][(clevel, False)] = ChannelList()
851
# Return if this particle is stable
852
if self.get('is_stable'):
853
logger.info("Particle %s is stable." %self['name'] +\
854
"No channel search will not proceed.")
857
# The initial vertex (identical vertex).
858
ini_vert = base_objects.Vertex({'id': 0, 'legs': base_objects.LegList([\
859
base_objects.Leg({'id':self.get_anti_pdg_code(),
860
'number':1, 'state': False}),
861
base_objects.Leg({'id':self.get_pdg_code(), 'number':2})])})
863
connect_channel_vertex = self.connect_channel_vertex
864
check_repeat = self.check_repeat
866
# If there is a vertex in clevel, construct it
867
if (clevel, True) in self['decay_vertexlist'].keys() or \
868
(clevel, False) in self['decay_vertexlist'].keys():
869
for vert in (self.get_vertexlist(clevel, True) + \
870
self.get_vertexlist(clevel, False)):
871
temp_channel = Channel()
872
temp_vert = copy.deepcopy(vert)
873
# Set the leg number (starting from 2)
874
[l.set('number', i+2) \
875
for i, l in enumerate(temp_vert.get('legs'))]
876
# The final one has 'number' as 2
877
temp_vert.get('legs')[-1]['number'] = 2
878
# Append the true vertex and then the ini_vert
879
temp_channel['vertices'].append(temp_vert)
880
temp_channel['vertices'].append(ini_vert)
881
# Setup the 'has_idpart' property
882
if Channel.check_idlegs(temp_vert):
883
temp_channel['has_idpart'] = True
885
# Add width to total width if onshell and
886
if temp_channel.get_onshell(model):
887
self['apx_decaywidth'] += temp_channel.get_apx_decaywidth(model)
888
# Calculate the order
889
temp_channel.calculate_orders(model)
890
self.get_channels(clevel, temp_channel.get_onshell(model)).\
893
# Go through sub-channels and try to add vertex to reach partnum
894
for sub_clevel in range(max((clevel - model.get_max_vertexorder()+1),2),
896
# The vertex level that should be combine with sub_clevel
897
vlevel = clevel - sub_clevel+1
898
# Go through each 'off-shell' channel in the given sub_clevel.
899
# Decay of on-shell channel is not a new channel.
900
for sub_c in self.get_channels(sub_clevel, False):
901
# Scan each leg to see if there is any appropriate vertex
902
for index, leg in enumerate(sub_c.get_final_legs()):
903
# Get the particle even for anti-particle leg.
904
inter_part = model.get_particle(abs(leg['id']))
905
# If this inter_part is stable, do not attach vertices to it
906
if inter_part.get('is_stable'):
908
# Get the vertexlist in vlevel
909
# Both on-shell and off-shell vertex
910
# should be considered.
912
vlist_a = inter_part.get_vertexlist(vlevel, True)
916
vlist_b = inter_part.get_vertexlist(vlevel, False)
920
# Find appropriate vertex
921
for vert in (vlist_a + vlist_b):
922
# Connect sub_channel to the vertex
923
# the connect_channel_vertex will
924
# inherit the 'has_idpart' from sub_c
925
temp_c = self.connect_channel_vertex(sub_c, index,
927
temp_c_o = temp_c.get_onshell(model)
928
# Append this channel if not exist yet
929
if not self.check_repeat(clevel,
931
temp_c.calculate_orders(model)
932
# Calculate the width if onshell
933
# Add to the apx_decaywidth of mother particle
935
self['apx_decaywidth'] += temp_c.\
936
get_apx_decaywidth(model)
937
self.get_channels(clevel, temp_c_o).append(temp_c)
939
# For two-body decay, record the maxima mass difference
941
for channel in self.get_channels(2, True):
942
mass_diff = abs(eval(self['mass'])) - sum(channel.get('final_mass_list'))
943
if mass_diff > self['2body_massdiff']:
944
self.set('2body_massdiff', mass_diff)
947
## Sort the channels by their width
948
#self.get_channels(clevel, False).sort(channelcmp_width)
950
# Group channels into amplitudes
951
self.group_channels_2_amplitudes(clevel, model)
953
def connect_channel_vertex(self, sub_channel, index, vertex, model):
954
""" Helper function to connect a vertex to one of the legs
955
in the channel. The argument 'index' specified the position of
956
the leg in sub_channel final_legs which will be connected with
957
vertex. If leg is for anti-particle, the vertex will be transform
958
into anti-part with minus vertex id."""
960
# Copy the vertex to prevent the change of leg number
961
new_vertex = copy.deepcopy(vertex)
963
# Setup the final leg number that is used in the channel.
964
leg_num = max([l['number'] for l in sub_channel.get_final_legs()])
966
# Add minus sign to the vertex id and leg id if the leg in sub_channel
967
# is for anti-particle
968
is_anti_leg = sub_channel.get_final_legs()[index]['id'] < 0
970
new_vertex['id'] = -new_vertex['id']
971
for leg in new_vertex['legs']:
972
leg['id'] = model.get_particle(leg['id']).get_anti_pdg_code()
974
# Legs continue the number of the final one in sub_channel,
975
# except the first and the last one ( these two should be connected.)
976
for leg in new_vertex['legs'][1:len(new_vertex['legs'])-1]:
977
# For the first and final legs, this loop is useless
978
# only for reverse leg id in case of mother leg is anti-particle.
980
leg['number'] = leg_num
982
# Assign correct number to each leg in the vertex.
983
# The first leg follows the mother leg.
984
new_vertex['legs'][-1]['number'] = \
985
sub_channel.get_final_legs()[index]['number']
986
new_vertex['legs'][0]['number'] = new_vertex['legs'][-1]['number']
988
new_channel = Channel()
989
# New vertex is first
990
new_channel['vertices'].append(new_vertex)
991
# Then extend the vertices of the old channel
992
new_channel['vertices'].extend(sub_channel['vertices'])
993
# Setup properties of new_channel (This slows the time)
994
new_channel.get_onshell(model)
995
new_channel.get_final_legs()
997
# The 'has_idpart' property of descendent of a channel
998
# must derive from the mother channel and the vertex
999
# (but 'id_part_list' will change and should not be inherited)
1000
new_channel['has_idpart'] = (sub_channel['has_idpart'] or \
1001
bool(Channel.check_idlegs(vertex)))
1006
def check_repeat(self, clevel, onshell, channel):
1007
""" Check whether there is any equivalent channels with the given
1008
channel. Use the check_channels_equiv function."""
1010
# To optimize the check, check if the final_mass is the same first
1011
# this will significantly reduce the number of call to
1012
# check_channels_equiv
1015
return any(Channel.check_channels_equiv(other_c, channel)\
1016
for other_c in self.get_channels(clevel, onshell) \
1017
if abs(sum(other_c['final_mass_list'])-\
1018
sum(channel['final_mass_list']))< 0.00001
1021
def group_channels_2_amplitudes(self, clevel, model):
1022
""" After the channels is found, combining channels with the same
1023
final states into amplitudes.
1024
NO CALCULATION of branching ratio at this stage!
1025
clevel: the number of final particles
1026
model: the model use in find_channels
1029
if not isinstance(clevel, int):
1030
raise self.PhysicsObjectError, \
1031
"The channel level %s must be an integer." % str(clevel)
1033
if not isinstance(model, DecayModel):
1034
raise self.PhysicsObjectError, \
1035
"The model must be an DecayModel object."
1037
# Reset the value of decay_amplitudes
1038
self.set_amplitudes(clevel, DecayAmplitudeList())
1039
# Sort the order of onshell channels according to their final mass list.
1040
self.get_channels(clevel, True).sort(channelcmp_final)
1042
for channel in self.get_channels(clevel, True):
1045
# Record the final particle id.
1046
final_pid = sorted([l.get('id') for l in channel.get_final_legs()])
1048
# Check if there is a amplitude for it. Since the channels with
1049
# the similar final states are put together. Use reversed order
1051
for amplt in reversed(self['decay_amplitudes'][clevel]):
1052
# Do not include the first leg (initial id)
1053
if sorted([l.get('id') for l in amplt['process']['legs'][1:]])\
1055
amplt['diagrams'].append(channel)
1059
# If no amplitude is satisfied, initiate a new one.
1061
self.get_amplitudes(clevel).append(DecayAmplitude(channel,
1064
# Calculate the apx_decaywidth and for every amplitude.
1065
# apr_br WILL NOT be calculated!
1066
for amp in self.get_amplitudes(clevel):
1067
amp.get('apx_decaywidth')
1068
self.get_amplitudes(clevel).sort(amplitudecmp_width)
1070
# This helper function is obselete in current algorithm...
1071
def generate_configlist(self, channel, partnum, model):
1072
""" Helper function to generate all the configuration to add
1073
vertetices to channel to create a channel with final
1074
particle number as partnum"""
1076
current_num = len(channel.get_final_legs())
1078
limit_list = [model.get_particle(abs(l.get('id'))).get_max_vertexorder() for l in channel.get_final_legs()]
1080
for leg_position in range(current_num):
1081
if limit_list[leg_position] >= 2:
1082
ini_config = [0]* current_num
1083
ini_config[leg_position] = 1
1084
configlist.append(ini_config)
1086
for i in range(partnum - current_num -1):
1088
# Add particle one by one to each config
1089
for config in configlist:
1090
# Add particle to each leg if it does not exceed limit_list
1091
for leg_position in range(current_num):
1092
# the current config + new particle*1 + mother
1093
# <= max_vertexorder
1094
if config[leg_position] + 2 <= limit_list[leg_position]:
1095
temp_config = copy.deepcopy(config)
1096
temp_config[leg_position] += 1
1097
if not temp_config in new_configlist:
1098
new_configlist.append(temp_config)
1099
if not new_configlist:
1102
configlist = new_configlist
1104
# Change the format consistent with max_vertexorder
1105
configlist = [[l+1 for l in config] for config in configlist]
1109
#===============================================================================
1111
#===============================================================================
1112
class DecayParticleList(base_objects.ParticleList):
1113
"""A class to store list of DecayParticle, Particle is also a valid
1114
element, but will automatically convert to DecayParticle"""
1116
def __init__(self, init_list=None, force=False):
1117
"""Creates a new particle list object. If a list of physics
1118
object is given, add them."""
1122
if init_list is not None:
1123
for object in init_list:
1124
self.append(object, force)
1126
def append(self, object, force=False):
1127
"""Append DecayParticle, even if object is Particle"""
1130
assert self.is_valid_element(object), \
1131
"Object %s is not a valid object for the current list" %repr(object)
1133
if isinstance(object, DecayParticle):
1134
list.append(self, object)
1136
list.append(self, DecayParticle(object, force))
1138
def generate_dict(self):
1139
"""Generate a dictionary from particle id to particle.
1140
Include antiparticles.
1145
for particle in self:
1146
particle_dict[particle.get('pdg_code')] = particle
1147
if not particle.get('self_antipart'):
1148
antipart = copy.copy(particle)
1149
antipart.set('is_part', False)
1150
particle_dict[antipart.get_pdg_code()] = antipart
1152
return particle_dict
1154
#===============================================================================
1155
# DecayModel: Model object that is used in this module
1156
#===============================================================================
1157
class DecayModel(base_objects.Model):
1158
"""DecayModel object is able construct the decay vertices for
1159
all its particles by find_vertexlist. When the user try to get stable
1160
particles, it will find all stable particles automatically according to
1161
the particle mass and interactions by find_stable_particles functions.
1162
The run of find_channels uses the function of DecayParticle.
1163
Note that Particle objects will be converted into DecayParticle
1164
either during the initialization or when the set function is used.
1166
sorted_keys = ['name', 'particles', 'parameters', 'interactions',
1167
'couplings', 'lorentz',
1168
'stable_particles', 'vertexlist_found',
1169
'reduced_interactions', 'decay_groups', 'max_vertexorder',
1171
'ab_model', 'abmodel_generated'
1174
def __init__(self, init_dict = {}, force=False):
1175
"""Reset the particle_dict so that items in it is
1176
of DecayParitcle type"""
1179
self.default_setup()
1181
assert isinstance(init_dict, dict), \
1182
"Argument %s is not a dictionary" % repr(init_dict)
1184
# Must set particles first so that the particle_dict
1185
# can point to DecayParticle
1186
# Futhermore, the set of interactions can have correct particle_dict
1187
if 'particles' in init_dict.keys():
1188
self.set('particles', init_dict['particles'], force)
1190
self['particle_dict'] = {}
1191
self.get('particle_dict')
1192
# Do not copy the particle_dict, it may be old version and point only
1193
# to Particle rather than DecayParticle.
1194
for item in init_dict.keys():
1195
if item != 'particles' and item != 'particle_dict':
1197
self.set(item, init_dict[item], force)
1202
def default_setup(self):
1203
"""The particles is changed to ParticleList"""
1204
super(DecayModel, self).default_setup()
1205
self['particles'] = DecayParticleList()
1207
self['vertexlist_found'] = False
1208
self['max_vertexorder'] = 0
1209
self['cp_conj_dict'] = {}
1210
self['decay_groups'] = []
1211
self['reduced_interactions'] = []
1212
self['stable_particles'] = []
1213
# Width from the value of param_card.
1214
self['decaywidth_list'] = {}
1215
# Properties for abstract model
1216
self['ab_model'] = AbstractModel()
1217
self['abmodel_generated'] = False
1220
def get_sorted_keys(self):
1221
return self.sorted_keys
1223
def get(self, name):
1224
""" Evaluate some special properties first if the user request. """
1226
if name == 'stable_particles' and not self['stable_particles']:
1227
self.find_stable_particles()
1228
return self['stable_particles']
1229
# reduced_interactions might be empty, cannot judge the evaluation is
1230
# done or not by it.
1231
elif (name == 'decay_groups' or name == 'reduced_interactions') and \
1232
not self['decay_groups']:
1233
self.find_decay_groups_general()
1235
elif name == 'max_vertexorder' and self['max_vertexorder'] == 0:
1236
self.get_max_vertexorder()
1237
return self['max_vertexorder']
1239
# call the mother routine
1240
return DecayModel.__bases__[0].get(self, name)
1242
def filter(self, name, value):
1243
if name == 'vertexlist_found':
1244
if not isinstance(value, bool):
1245
raise self.PhysicsObjectError, \
1246
"Property %s should be bool type." % name
1247
if name == 'max_vertexorder':
1248
if not isinstance(value, int):
1249
raise self.PhysicsObjectError,\
1250
"Property %s should be int type." % name
1251
if name == 'stable_particles' or name == 'decay_groups':
1252
if not isinstance(value, list):
1253
raise self.PhysicsObjectError,\
1254
"Property %s should be a list contains several particle list." % name
1256
if not isinstance(plist, list):
1257
raise self.PhysicsObjectError,\
1258
"Property %s should be a list contains several particle list." % name
1260
if not isinstance(p, DecayParticle):
1261
raise self.PhysicsObjectError,\
1262
"Property %s should be a list contains several particle list." % name
1264
super(DecayModel, self).filter(name, value)
1269
def set(self, name, value, force=False):
1270
"""Change the Particle into DecayParticle"""
1271
#Record the validity of set by mother routine
1272
return_value = super(DecayModel, self).set(name, value, force)
1273
#Reset the dictionaries
1276
if name == 'particles':
1277
#Reset dictionaries and decay related properties.
1278
self['particle_dict'] = {}
1279
self['got_majoranas'] = None
1280
self['vertexlist_found'] = False
1281
self['max_vertexorder'] = 0
1282
self['decay_groups'] = []
1283
self['reduced_interactions'] = []
1284
self['stable_particles'] = []
1285
self['decaywidth_list'] = {}
1287
#Convert to DecayParticleList
1288
self['particles'] = DecayParticleList(value, force)
1289
#Generate new dictionaries with items are DecayParticle
1290
self.get('particle_dict')
1291
self.get('got_majoranas')
1292
if name == 'interactions':
1293
# Reset dictionaries and decay related properties.
1294
self['interaction_dict'] = {}
1295
self['ref_dict_to1'] = {}
1296
self['ref_dict_to0'] = {}
1297
self['vertexlist_found'] = False
1298
self['max_vertexorder'] = 0
1299
self['decay_groups'] = []
1300
self['reduced_interactions'] = []
1301
self['stable_particles'] = []
1302
self['decaywidth_list'] = {}
1304
# Generate interactions with particles are DecayParticleList
1305
# Get particle from particle_dict so that the particles
1306
# of interaction is the alias of particles of DecayModel
1307
for inter in self['interactions']:
1308
inter['particles']=DecayParticleList([self.get_particle(part.get_pdg_code()) for part in inter['particles']])
1309
# Generate new dictionaries
1310
self.get('interaction_dict')
1311
self.get('ref_dict_to1')
1312
self.get('ref_dict_to0')
1316
def get_max_vertexorder(self):
1317
"""find the maxima vertex order (i.e. decay particle number)"""
1318
if not self['vertexlist_found']:
1319
logger.warning("Use find_vertexlist before get_max_vertexorder!")
1322
# Do not include key without any vertexlist in it
1323
self['max_vertexorder'] = max(sum([[k[0] \
1325
p.get('decay_vertexlist').keys() \
1326
if p.get('decay_vertexlist')[k]] \
1327
for p in self.get('particles')], [])
1329
return self['max_vertexorder']
1331
def generate_abstract_model(self, force=False):
1332
""" Generate the abstract particles in AbstractModel."""
1334
logger.info("Generating the abstract model...")
1336
self.get('stable_particles')
1337
# If vertexlist has not been found before, run model.find_vertexlist
1338
if not self['vertexlist_found']:
1339
logger.info("Vertexlist of this model has not been searched."+ \
1340
"Automatically run the model.find_vertexlist()")
1341
self.find_vertexlist()
1343
# Add all particles, including stable ones
1344
# (could appear in final states)
1345
self['ab_model'].setup_particles(self.get('particles'), force)
1347
# Add interactions, except ones with all stable particles or ones
1348
# with radiation process.
1349
self['ab_model'].setup_interactions(self.get('interactions'),
1350
self['cp_conj_dict'],
1353
def generate_abstract_amplitudes(self, part, clevel):
1354
""" Generate the abstract amplitudes in AbstractModel."""
1356
# Skip search if this particle is stable
1357
if part.get('is_stable'):
1358
logger.info("Particle %s is stable." %part['name'] +\
1359
"No abstract amplitude will be generated.")
1362
logger.info("Generating the abstract amplitudes of %s %d-body decays..." % (part['name'], clevel))
1364
self['ab_model'].generate_ab_amplitudes(part.get_amplitudes(clevel))
1367
def find_vertexlist(self):
1368
""" Check whether the interaction is able to decay from mother_part.
1369
Set the 'decay_vertexlist' of the corresponding particles.
1370
Utilize in finding all the decay table of the whole model
1373
# Return if self['vertexlist_found'] is True.\
1374
if self['vertexlist_found']:
1375
logger.info("Vertexlist has been searched before.")
1377
# Find the stable particles of this model and do not assign decay vertex
1379
self.get('stable_particles')
1381
# Valid initial particle list
1384
#Dict to store all the vertexlist (for conveniece only, removable!)
1385
#vertexlist_dict = {}
1387
for part in self.get('particles'):
1388
# Initialized the decay_vertexlist
1389
part['decay_vertexlist'] = {(2, False) : base_objects.VertexList(),
1390
(2, True) : base_objects.VertexList(),
1391
(3, False) : base_objects.VertexList(),
1392
(3, True) : base_objects.VertexList()}
1394
if not part.get('is_stable'):
1395
#All valid initial particles (mass != 0 and is_part == True)
1396
ini_list.append(part.get_pdg_code())
1397
#for partnum in [2, 3]:
1398
# for onshell in [False, True]:
1399
# vertexlist_dict[(part.get_pdg_code(), partnum,onshell)] = \
1400
# base_objects.VertexList()
1402
#Prepare the vertexlist
1403
for inter in self['interactions']:
1404
#Calculate the particle number
1405
partnum = len(inter['particles']) - 1
1407
temp_legs = base_objects.LegList()
1410
for num, part in enumerate(inter['particles']):
1411
#Check if the interaction contains valid initial particle
1412
if part.get_anti_pdg_code() in ini_list:
1415
#Create the original legs
1416
temp_legs.append(base_objects.Leg({'id':part.get_pdg_code()}))
1417
total_mass += abs(eval(part.get('mass')))
1419
#Exclude interaction without valid initial particle
1423
for num, part in enumerate(inter['particles']):
1424
#Get anti_pdg_code (pid for incoming particle)
1425
pid = part.get_anti_pdg_code()
1426
#Exclude invalid initial particle
1427
if not pid in ini_list:
1430
#Exclude initial particle appears in final particles
1431
#i.e. radiation is excluded.
1432
#count the number of abs(pdg_code)
1433
pid_list = [p.get('pdg_code') for p in inter.get('particles')]
1434
if pid_list.count(abs(pid)) > 1:
1437
ini_mass = abs(eval(part.get('mass')))
1438
onshell = ini_mass > (total_mass - ini_mass)
1440
#Create new legs for the sort later
1441
temp_legs_new = copy.deepcopy(temp_legs)
1442
temp_legs_new[num]['id'] = pid
1444
# Put initial leg in the last
1445
# and sort other legs for comparison
1446
inileg = temp_legs_new.pop(num)
1447
# The order of legs follows the order of pdg_codes in
1449
#temp_legs_new.sort(legcmp)
1450
temp_legs_new.append(inileg)
1451
temp_vertex = base_objects.Vertex({'id': inter.get('id'),
1452
'legs':temp_legs_new})
1454
#Record the vertex with key = (interaction_id, part_id)
1455
if temp_vertex not in \
1456
self.get_particle(pid).get_vertexlist(partnum, onshell):
1458
self.get_particle(pid)['decay_vertexlist'][(\
1459
partnum, onshell)].append(temp_vertex)
1461
self.get_particle(pid)['decay_vertexlist'][(\
1462
partnum, onshell)] = base_objects.VertexList(\
1465
# Set the property vertexlist_found as True and for all particles
1466
self['vertexlist_found'] = True
1467
for part in self['particles']:
1468
part['vertexlist_found'] = True
1470
# Setup the cp_conj_dict for interaction id to the id of
1472
interlist = [(i['id'], [p.get_anti_pdg_code() for p in i['particles']])\
1473
for i in self['interactions']]
1475
for inter in self['interactions']:
1476
pids = [p.get_pdg_code() for p in inter['particles']]
1478
for item in interlist:
1479
if sorted(item[1]) == sorted(pids) and \
1480
inter['id'] != item[0]:
1482
self['cp_conj_dict'][inter['id']] = item[0]
1486
logger.debug('No CP conjugate found for interaction #%d' %inter['id'])
1488
#fdata = open(os.path.join(MG5DIR, 'models', self['name'], 'vertexlist_dict.dat'), 'w')
1489
#fdata.write(str(vertexlist_dict))
1492
def color_multiplicity_def(self, colorlist):
1493
""" Defines the two-body color multiplicity. It is applied in the
1494
get_color_multiplicity in channel object.
1495
colorlist is the a list of two color indices.
1496
This function will return a list of all possible
1497
color index of the "mother" particle and the corresponding
1498
color multiplicities. """
1500
# Raise error if the colorlist is not the right format.
1501
if not isinstance(colorlist, list):
1502
raise self.PhysicsObjectError,\
1503
"The argument must be a list."
1505
if any([not isinstance(i, int) for i in colorlist]):
1506
raise self.PhysicsObjectError,\
1507
"The argument must be a list of integer elements."
1509
# Sort the colorlist and
1511
color_tuple = tuple(colorlist)
1512
# The dictionary of color multiplicity
1513
# The key is the final_color structure and the value is
1514
# [(ini_color_1, multiplicity_1), (ini_color_2, multiplicity_2), ...]
1516
# The trivial key and value
1517
# These define the convention we used.
1520
(1, 8): [(8, 1./2)],
1522
(3, 3): [(1, 3), (8, 0.5), (3, 1), (6, 1)],
1523
# (3, 6) -> (3, 2 rather than 3),
1524
(3, 6): [(3, 2), (8, 3./4)],
1525
(3, 8): [(3, 0.5*(3-1./3)), (6, 1)],
1526
(6, 6): [(1, 6), (8, 4./3)],
1527
(6, 8): [(3, 2), (6, 2-1./3)],
1529
# 3-body decay color structure
1531
(3, 3, 8): [(1, 8), (8, 3-1./3)],
1532
(1, 3, 8): [(3, 3)],
1533
(1, 3, 3): [(8, 1)],
1534
(3, 8, 8): [(3, 64./9)]
1537
return color_dict[color_tuple]
1539
def read_param_card(self, param_card):
1540
"""Read a param_card and set all parameters and couplings as
1541
members of this module"""
1543
if not os.path.isfile(param_card):
1544
raise MadGraph5Error, \
1545
"No such file %s" % param_card
1547
# Extract external parameters
1548
external_parameters = self['parameters'][('external',)]
1550
# Create a dictionary from LHA block name and code to parameter name
1552
for param in external_parameters:
1554
dict = parameter_dict[param.lhablock.lower()]
1557
parameter_dict[param.lhablock.lower()] = dict
1558
dict[tuple(param.lhacode)] = param.name
1559
# Now read parameters from the param_card
1561
# Read in param_card
1562
param_lines = open(param_card, 'r').read().split('\n')
1564
# Define regular expressions
1565
re_block = re.compile("^block\s+(?P<name>\w+)")
1566
re_decay = re.compile(\
1567
"^decay\s+(?P<pid>\d+)\s+(?P<value>-*\d+\.\d+e(\+|-)\d+)\s*")
1568
re_single_index = re.compile(\
1569
"^\s*(?P<i1>\d+)\s+(?P<value>-*\d+\.\d+e(\+|-)\d+)\s*")
1570
re_double_index = re.compile(\
1571
"^\s*(?P<i1>\d+)\s+(?P<i2>\d+)\s+(?P<value>-*\d+\.\d+e(\+|-)\d+)\s*")
1573
# Go through lines in param_card
1574
for line in param_lines:
1575
if not line.strip() or line[0] == '#':
1579
block_match = re_block.match(line)
1581
block = block_match.group('name')
1583
# Look for single indices
1584
single_index_match = re_single_index.match(line)
1585
double_index_match = re_double_index.match(line)
1586
decay_match = re_decay.match(line)
1587
if block and single_index_match:
1588
i1 = int(single_index_match.group('i1'))
1589
value = single_index_match.group('value')
1591
exec("globals()[\'%s\'] = %s" % (parameter_dict[block][(i1,)],
1593
logger.info("Set parameter %s = %f" % \
1594
(parameter_dict[block][(i1,)],\
1595
eval(parameter_dict[block][(i1,)])))
1597
logger.warning('No parameter found for block %s index %d' %\
1600
double_index_match = re_double_index.match(line)
1601
# Look for double indices
1602
if block and double_index_match:
1603
i1 = int(double_index_match.group('i1'))
1604
i2 = int(double_index_match.group('i2'))
1606
exec("globals()[\'%s\'] = %s" % (parameter_dict[block][(i1,i2)],
1607
double_index_match.group('value')))
1608
logger.info("Set parameter %s = %f" % \
1609
(parameter_dict[block][(i1,i2)],\
1610
eval(parameter_dict[block][(i1,i2)])))
1612
logger.warning('No parameter found for block %s index %d %d' %\
1616
decay_match = re_decay.match(line)
1619
pid = int(decay_match.group('pid'))
1620
value = decay_match.group('value')
1621
self['decaywidth_list'][(pid, True)] = float(value)
1623
exec("globals()[\'%s\'] = %s" % \
1624
(parameter_dict['decay'][(pid,)],
1626
logger.info("Set decay width %s = %f" % \
1627
(parameter_dict['decay'][(pid,)],\
1628
eval(parameter_dict['decay'][(pid,)])))
1630
logger.warning('No decay parameter found for %d' % pid)
1633
# Define all functions used
1634
for func in self['functions']:
1635
exec("def %s(%s):\n return %s" % (func.name,
1636
",".join(func.arguments),
1639
# Extract derived parameters
1640
# TO BE IMPLEMENTED allow running alpha_s coupling
1641
derived_parameters = []
1643
derived_parameters += self['parameters'][()]
1647
derived_parameters += self['parameters'][('aEWM1',)]
1651
derived_parameters += self['parameters'][('aS',)]
1655
derived_parameters += self['parameters'][('aS', 'aEWM1')]
1659
derived_parameters += self['parameters'][('aEWM1', 'aS')]
1664
# Now calculate derived parameters
1665
# TO BE IMPLEMENTED use running alpha_s for aS-dependent params
1666
for param in derived_parameters:
1667
exec("globals()[\'%s\'] = %s" % (param.name, param.expr))
1668
if not eval(param.name) and eval(param.name) != 0:
1669
logger.warning("%s has no expression: %s" % (param.name,
1672
logger.info("Calculated parameter %s = %f" % \
1673
(param.name, eval(param.name)))
1675
logger.info("Calculated parameter %s = (%f, %f)" % \
1677
eval(param.name).real, eval(param.name).imag))
1682
couplings += self['couplings'][()]
1686
couplings += self['couplings'][('aEWM1',)]
1690
couplings += self['couplings'][('aS',)]
1694
couplings += self['couplings'][('aS', 'aEWM1')]
1698
couplings += self['couplings'][('aEWM1', 'aS')]
1702
# Now calculate all couplings
1703
# TO BE IMPLEMENTED use running alpha_s for aS-dependent couplings
1704
for coup in couplings:
1705
exec("globals()[\'%s\'] = %s" % (coup.name, coup.expr))
1706
if not eval(coup.name) and eval(coup.name) != 0:
1707
logger.warning("%s has no expression: %s" % (coup.name,
1709
logger.info("Calculated coupling %s = (%f, %f)" % \
1711
eval(coup.name).real, eval(coup.name).imag))
1713
# Set alpha_s original value
1718
def running_externals(self, q, loopnum=2):
1719
""" Recalculate external parameters at the given scale. """
1721
# Raise error for wrong type of q
1722
if not isinstance(q, int) and not isinstance(q, long) and \
1723
not isinstance(q, float):
1724
raise self.PhysicsObjectError, \
1725
"The argument %s should be numerical type." %str(q)
1727
# Declare global value. amZ0 is the alpha_s at Z pole
1728
global aS, MT, MB, MZ, amZ0
1730
# Setup the alpha_s at different scale
1737
# MZ, MB are already read in from param_card
1745
# Calculate alpha_s at the scale q
1747
# Running the alpha_s from MZ_ref to MB_ref (fermion_num = 5)
1748
t = 2 * math.log(MB_ref/MZ_ref)
1749
amb = self.newton1(t, amZ0, loopnum, 5.)
1751
# Running the alpha_s from MB_ref to MC_ref (fermion_num = 4)
1752
t = 2.*math.log(MC_ref/MB_ref)
1753
amc = self.newton1(t, amb, loopnum, 4.)
1755
# Running the alpha_s from MC_ref to q
1756
t = 2.*math.log(q/MC_ref)
1757
a_out = self.newton1(t, amc, loopnum, 3.)
1761
# Running the alpha_s from MB_ref to q (fermion_num = 4)
1762
t = 2.*math.log(q/MB_ref)
1763
a_out = self.newton1(t, amb, loopnum, 4.)
1765
# Running the alpha_s from MZ_ref to MB_ref (fermion_num = 5)
1766
t = 2 * math.log(q/MZ_ref)
1767
a_out = self.newton1(t, amZ0, loopnum, 5.)
1769
# Save the result alpha_s
1773
def newton1(self, t, a_in, loopnum, nf):
1774
""" Calculate the running strong coupling constant from a_in
1775
using the t as the energy running factor,
1776
loop number given by loopnum, number of fermions by nf."""
1778
# Setup the accuracy.
1781
# Functions used in the beta function
1787
for i in range(3,6):
1788
b0[i] = (11. - 2.*i/3.)/4./math.pi
1789
c1[i] = (102. - 38./3.*i)/4./math.pi/(11. - 2.*i/3.)
1790
c2[i] = (2857./2. - 5033.*i/18. + 325* i**2 /54.)\
1791
/16./math.pi**2/(11. - 2.*i/3.)
1792
delc[i] = math.sqrt(4*c2[i] - c1[i]**2)
1794
f2 = lambda x: (1./x + c1[nf] * math.log((c1[nf]*x)/(1. + c1[nf]*x)))
1795
f3 = lambda x: (1./x + 0.5*c1[nf] * \
1796
math.log((c2[nf]* x**2)\
1797
/(1. +c1[nf]*x +c2[nf]* x**2))\
1798
-(c1[nf]**2 - 2.*c2[nf])/delc[nf]* \
1799
math.atan((2.*c2[nf]*x + c1[nf])/delc[nf]))
1801
# Return the 1-loop alpha_s
1803
return a_in/(1. + a_in * b0[nf] * t)
1804
# For higher order correction, setup the initial value of a_out
1806
a_out = a_in/(1. + a_in * b0[nf] * t + \
1807
c1[nf] * a_in * math.log(1. + a_in * b0[nf] * t))
1811
# Start the iteration
1815
f = b0[nf]*t + f2(a_in) - f2(a_out)
1816
fp = 1./(a_out**2 * (1. + c1[nf]*a_out))
1818
f = b0[nf]*t + f3(a_in) - f3(a_out)
1819
fp = 1./(a_out**2 * (1. + c1[nf]*a_out + c2[nf]* a_out**2))
1821
a_out = a_out - f/fp
1822
delta = abs(f/fp/a_out)
1826
def running_internals(self):
1827
""" Recalculate parameters and couplings which depend on
1828
running external parameters to the given energy scale.
1829
RUN running_externals before run this function."""
1831
# External parameters that must be recalculate for different energy
1833
run_ext_params = ['aS']
1835
# Extract derived parameters
1836
derived_parameters = []
1837
# Take only keys that depend on running external parameters
1838
ordered_keys = [key for key in self['parameters'].keys() if \
1839
key != ('external',) and \
1840
any([run_ext in key for run_ext in run_ext_params])]
1841
# Sort keys, keys depend on fewer number parameters should be
1843
ordered_keys.sort(key=len)
1844
for key in ordered_keys:
1845
derived_parameters += self['parameters'][key]
1847
# Now calculate derived parameters
1848
# TO BE IMPLEMENTED use running alpha_s for aS-dependent params
1849
for param in derived_parameters:
1850
exec("globals()[\'%s\'] = %s" % (param.name, param.expr))
1851
if not eval(param.name) and eval(param.name) != 0:
1852
logger.warning("%s has no expression: %s" % (param.name,
1855
logger.info("Recalculated parameter %s = %f" % \
1856
(param.name, eval(param.name)))
1858
logger.info("Recalculated parameter %s = (%f, %f)" % \
1860
eval(param.name).real, eval(param.name).imag))"""
1862
# Extract couplings from couplings that depend on fewer
1863
# number of external parameters.
1865
# Only take keys that contain running external parameters.
1866
ordered_keys = [key for key in self['couplings'].keys() if \
1867
key != ('external',) and \
1868
any([run_ext in key for run_ext in run_ext_params])]
1870
ordered_keys.sort(key=len)
1871
for key in ordered_keys:
1872
couplings += self['couplings'][key]
1874
# Now calculate all couplings
1875
# TO BE IMPLEMENTED use running alpha_s for aS-dependent couplings
1876
for coup in couplings:
1877
exec("globals()[\'%s\'] = %s" % (coup.name, coup.expr))
1878
if not eval(coup.name) and eval(coup.name) != 0:
1879
logger.warning("%s has no expression: %s" % (coup.name,
1881
"""logger.info("Recalculated coupling %s = (%f, %f)" % \
1883
eval(coup.name).real, eval(coup.name).imag))"""
1886
def find_decay_groups(self):
1887
""" Find groups of particles which can decay into each other,
1888
keeping Standard Model particles outside for now. This allows
1889
to find particles which are absolutely stable based on their
1894
1. Start with any non-SM particle. Look for all
1895
interactions which has this particle in them.
1897
2. Any particles with single-particle interactions with this
1898
particle and with any number of SM particles are in the same
1901
3. If any of these particles have decay to only SM
1902
particles, the complete decay group becomes "sm"
1904
5. Iterate through all particles, to cover all particles and
1908
self.sm_ids = [1,2,3,4,5,6,11,12,13,14,15,16,21,22,23,24]
1909
self['decay_groups'] = [[]]
1911
particles = [p for p in self.get('particles') if \
1912
p.get('pdg_code') not in self.sm_ids]
1914
for particle in particles:
1915
# Check if particles is already in a decay group
1916
if particle not in sum(self['decay_groups'], []):
1917
# Insert particle in new decay group
1918
self['decay_groups'].append([particle])
1919
self.find_decay_groups_for_particle(particle)
1921
def find_decay_groups_for_particle(self, particle):
1922
"""Recursive routine to find decay groups starting from a
1927
1. Pick out all interactions with this particle
1929
2. For any interaction which is not a radiation (i.e., has
1930
this particle twice):
1932
a. If there is a single non-sm particle in
1933
the decay, add particle to this decay group. Otherwise, add to
1934
SM decay group or new decay group.
1936
b. If there are more than 1 non-sm particles: if all particles
1937
in decay groups, merge decay groups according to different
1939
2 non-sm particles: either both are in this group, which means
1940
this is SM, or one is in this group, so the other has to be
1941
SM, or both are in the same decay group, then this group is SM.
1942
3 non-sm particles: either 1 is in this group, then the other
1943
two must be in same group or 2 is in this group, then third
1944
must also be in this group, or 2 is in the same group, then
1945
third must be in this group (not yet implemented). No other
1946
cases can be dealt with.
1947
4 or more: Not implemented (not phenomenologically interesting)."""
1949
# interactions with this particle which are not radiation
1950
interactions = [i for i in self.get('interactions') if \
1951
particle in i.get('particles') and \
1952
i.get('particles').count(particle) == 1 and \
1953
(particle.get('self_antipart') or
1954
not self.get_particle(particle.get_anti_pdg_code())\
1955
in i.get('particles'))]
1958
interaction = interactions.pop(0)
1959
non_sm_particles = [p for p in interaction.get('particles') \
1960
if p != particle and \
1961
not p.get('pdg_code') in self.sm_ids and \
1962
not (p.get('is_part') and p in \
1963
self['decay_groups'][0] or \
1964
not p.get('is_part') and \
1965
self.get_particle(p.get('pdg_code')) in \
1966
self['decay_groups'][0])]
1967
group_index = [i for (i, g) in enumerate(self['decay_groups']) \
1968
if particle in g][0]
1970
if len(non_sm_particles) == 0:
1971
# The decay group of this particle is the SM group
1973
group = self['decay_groups'].pop(group_index)
1974
self['decay_groups'][0].extend(group)
1976
elif len(non_sm_particles) == 1:
1977
# The other particle should be in my decay group
1978
particle2 = non_sm_particles[0]
1979
if not particle2.get('is_part'):
1980
particle2 = self.get_particle(particle2.get_anti_pdg_code())
1981
if particle2 in self['decay_groups'][group_index]:
1982
# This particle is already in this decay group,
1983
# and has been treated.
1985
elif particle2 in sum(self['decay_groups'], []):
1986
# This particle is in a different decay group - merge
1987
group_index2 = [i for (i, g) in \
1988
enumerate(self['decay_groups']) \
1989
if particle2 in g][0]
1990
group = self['decay_groups'].pop(max(group_index,
1992
self['decay_groups'][min(group_index, group_index2)].\
1995
# Add particle2 to this decay group
1996
self['decay_groups'][group_index].append(particle2)
1998
elif len(non_sm_particles) > 1:
1999
# Check if any of the particles are not already in any
2000
# decay group. If there are any, let another particle
2001
# take care of this interaction instead, later on.
2003
non_checked_particles = [p for p in non_sm_particles if \
2004
(p.get('is_part') and not p in \
2005
sum(self['decay_groups'], []) or \
2006
not p.get('is_part') and not \
2008
p.get_anti_pdg_code()) in \
2009
sum(self['decay_groups'], []))
2012
if not non_checked_particles:
2013
# All particles have been checked. Analyze interaction.
2015
if len(non_sm_particles) == 2:
2016
# Are any of the particles in my decay group already?
2017
this_group_particles = [p for p in non_sm_particles \
2018
if p in self['decay_groups'][\
2020
if len(this_group_particles) == 2:
2021
# There can't be any conserved quantum
2022
# number! Should be SM group!
2023
group = self['decay_groups'].pop(group_index)
2024
self['decay_groups'][0].extend(group)
2026
elif len(this_group_particles) == 1:
2027
# One particle is in the same group as this particle
2028
# The other (still non_sm yet) must be SM group.
2029
particle2 = [p for p in non_sm_particles \
2030
if p != this_group_particles[0]][0]
2031
if not particle2.get('is_part'):
2032
particle2 = self.get_particle(particle2.get_anti_pdg_code())
2034
group_index2 = [i for (i, g) in \
2035
enumerate(self['decay_groups'])\
2036
if particle2 in g][0]
2037
group_2 = self['decay_groups'].pop(group_index2)
2038
self['decay_groups'][0].extend(group_2)
2041
# If the two particles are in another same group,
2042
# this particle must be the SM particle.
2043
# Transform the 1st non_sm_particle into particle
2044
particle1 = non_sm_particles[0]
2045
if not particle1.get('is_part'):
2046
particle1 = self.get_particle(\
2047
particle1.get_anti_pdg_code())
2048
# Find the group of particle1
2049
group_index1 = [i for (i, g) in \
2050
enumerate(self['decay_groups']) \
2051
if particle1 in g][0]
2053
# If the other non_sm_particle is in the same group
2054
# as particle1, try to merge this particle to SM
2055
if non_sm_particles[1] in \
2056
self['decay_groups'][group_index1]:
2058
group = self['decay_groups'].pop(group_index)
2059
self['decay_groups'][0].extend(group)
2061
if len(non_sm_particles) == 3:
2062
# Are any of the particles in my decay group already?
2063
this_group_particles = [p for p in non_sm_particles \
2064
if p in self['decay_groups'][\
2066
if len(this_group_particles) == 2:
2067
# Also the 3rd particle has to be in this group.
2069
particle2 = [p for p in non_sm_particles if p not \
2070
in this_group_particles][0]
2071
if not particle2.get('is_part'):
2072
particle2 = self.get_particle(\
2073
particle2.get_anti_pdg_code())
2074
group_index2 = [i for (i, g) in \
2075
enumerate(self['decay_groups']) \
2076
if particle2 in g][0]
2077
group = self['decay_groups'].pop(max(group_index,
2079
self['decay_groups'][min(group_index, group_index2)].\
2081
if len(this_group_particles) == 1:
2082
# The other two particles have to be in
2084
other_group_particles = [p for p in \
2085
non_sm_particles if p not \
2086
in this_group_particles]
2087
particle1 = other_group_particles[0]
2088
if not particle1.get('is_part'):
2089
particle1 = self.get_particle(\
2090
particle1.get_anti_pdg_code())
2091
group_index1 = [i for (i, g) in \
2092
enumerate(self['decay_groups']) \
2093
if particle1 in g][0]
2094
particle2 = other_group_particles[0]
2095
if not particle2.get('is_part'):
2096
particle2 = self.get_particle(\
2097
particle2.get_anti_pdg_code())
2098
group_index2 = [i for (i, g) in \
2099
enumerate(self['decay_groups']) \
2100
if particle2 in g][0]
2102
if group_index1 != group_index2:
2104
group = self['decay_groups'].pop(max(group_index1,
2106
self['decay_groups'][min(group_index1,
2110
# One more case possible to say something
2111
# about: When two of the three particles are
2112
# in the same group, the third particle has to
2113
# be in the present particle's group. I'm not
2114
# doing this case now though.
2116
# For cases with number of non-sm particles > 3,
2117
# There are also possibilities to say something in
2118
# particular situations. Don't implement this now
2121
def find_decay_groups_general(self):
2122
"""Iteratively find decay groups, valid for vertices in all orders
2123
SM particle is defined as all MASSLESS particles.
2125
1. Establish the reduced_interactions
2126
a. Read non-sm particles only
2127
(not in sm_ids and not in decay_groups[0])
2128
b. If the particle appears in this interaction before,
2129
not only stop read it but also remove the existing one.
2130
c. If the interaction has only one particle,
2131
move this particle to SM-like group and void this interaction.
2132
d. If the interaction has no particle in it, delete it.
2133
2. Iteratively reduce the interaction
2134
a. If there are two particles in this interaction,
2135
they must be in the same group.
2136
And we can delete this interaction since we cannot draw more
2138
b. If there are only one particle in this interaction,
2139
this particle must be SM-like group
2140
And we can delete this interaction since we cannot draw more
2142
c. If any two particles in this interaction already belong to the
2143
same group, remove the two particles. Delete particles that
2144
become SM-like as well. If this interaction becomes empty
2145
after these deletions, delete this interaction.
2146
d. If the iteration does not change the reduced_interaction at all
2147
stop the iteration. All the remaining reduced_interaction must
2148
contain at least three non SM-like particles. And each of
2149
them belongs to different groups.
2150
3. If there is any particle that has not been classified,
2151
this particle is lonely i.e. it does not related to
2152
other particles. Add this particle to decay_groups.
2155
# Setup the SM particles and initial decay_groups, reduced_interactions
2156
self['decay_groups'] = [[]]
2157
# self['reduced_interactions'] contains keys in 'id' as interaction id,
2158
# 'particle' as the reduced particle content, and 'groupid_list' as
2159
# the decay group id list
2160
self['reduced_interactions'] = []
2163
# Setup the original 'SM' particles, i.e. particle without mass.
2164
sm_ids = [p.get('pdg_code') for p in self.get('particles')\
2165
if abs(eval(p.get('mass'))) == 0.]
2166
self['decay_groups'] = [[p for p in self.get('particles')\
2167
if abs(eval(p.get('mass'))) == 0.]]
2169
#Read the interaction information and setup
2170
for inter in self.get('interactions'):
2171
temp_int = {'id':inter.get('id'), 'particles':[]}
2172
for part in inter['particles']:
2173
#If this particle is anti-particle, convert it.
2174
if not part.get('is_part'):
2175
part = self.get_particle(part.get_anti_pdg_code())
2177
#Read this particle if it is not in SM
2178
if not part.get('pdg_code') in sm_ids and \
2179
not part in self['decay_groups'][0]:
2180
#If pid is not in the interaction yet, append it
2181
if not part in temp_int['particles']:
2182
temp_int['particles'].append(part)
2183
#If pid is there already, remove it since double particles
2184
#is equivalent to none.
2186
temp_int['particles'].remove(part)
2188
# If there is only one particle in this interaction, this must in SM
2189
if len(temp_int['particles']) == 1:
2190
# Remove this particle and add to decay_groups
2191
part = temp_int['particles'].pop(0)
2192
self['decay_groups'][0].append(part)
2194
# Finally, append only interaction with nonzero particles
2195
# to reduced_interactions.
2196
if len(temp_int['particles']):
2197
self['reduced_interactions'].append(temp_int)
2198
# So interactions in reduced_interactions are all
2199
# with non-zero particles in this stage
2201
# Now start the iterative interaction reduction
2205
for inter in self['reduced_interactions']:
2206
#If only two particles in inter, they are in the same group
2207
if len(inter['particles']) == 2:
2208
#If they are in different groups, merge them.
2209
#Interaction is useless.
2211
# Case for the particle is in decay_groups
2212
if inter['particles'][0] in sum(self['decay_groups'], []):
2213
group_index_0 =[i for (i,g) in\
2214
enumerate(self['decay_groups'])\
2215
if inter['particles'][0] in g][0]
2217
# If the second one is also in decay_groups, merge them.
2218
if inter['particles'][1] in sum(self['decay_groups'], []):
2219
if not inter['particles'][1] in \
2220
self['decay_groups'][group_index_0]:
2221
group_index_1 =[i for (i,g) in \
2222
enumerate(self['decay_groups'])\
2223
if inter['particles'][1]
2225
# Remove the outer group
2226
group_1 = self['decay_groups'].pop(max(\
2227
group_index_0, group_index_1))
2228
# Merge with the inner one
2229
self['decay_groups'][min(group_index_0, \
2230
group_index_1)].extend(group_1)
2231
# The other one is no in decay_groups yet
2232
# Add inter['particles'][1] to the group of
2233
# inter['particles'][0]
2235
self['decay_groups'][group_index_0].append(
2236
inter['particles'][1])
2237
# Case for inter['particles'][0] is not in decay_groups yet.
2239
# If only inter[1] is in decay_groups instead,
2240
# add inter['particles'][0] to its group.
2241
if inter['particles'][1] in sum(self['decay_groups'], []):
2242
group_index_1 =[i for (i,g) in \
2243
enumerate(self['decay_groups'])\
2244
if inter['particles'][1] in g][0]
2245
# Add inter['particles'][0]
2246
self['decay_groups'][group_index_1].append(
2247
inter['particles'][0])
2249
# Both are not in decay_groups
2250
# Add both particles to decay_groups
2252
self['decay_groups'].append(inter['particles'])
2254
# No matter merging or not the interaction is useless now.
2256
self['reduced_interactions'].remove(inter)
2259
# If only one particle in this interaction,
2260
# this particle must be SM-like group.
2261
elif len(inter['particles']) == 1:
2262
if inter['particles'][0] in sum(self['decay_groups'], []):
2263
group_index_1 =[i for (i,g) in \
2264
enumerate(self['decay_groups'])\
2265
if inter['particles'][0] in g][0]
2266
# If it is not, merge it with SM.
2267
if group_index_1 > 0:
2268
self['decay_groups'][0].\
2269
extend(self['decay_groups'].pop(group_index_1))
2271
# Inter['Particles'][0] not in decay_groups yet,
2272
# add it to SM-like group
2274
self['decay_groups'][0].extend(inter['particles'])
2276
# The interaction is useless now. Kill it.
2277
self['reduced_interactions'].remove(inter)
2280
# Case for more than two particles in this interaction.
2281
# Remove particles with the same group.
2282
elif len(inter['particles']) > 2:
2283
#List to store the id of each particle's decay group
2285
# This list is to prevent removing elements during the
2286
# for loop to create errors.
2287
# If the value is normal int, the particle in this position
2288
# is valid. Else, it is already removed.
2289
ref_list = range(len(inter['particles']))
2290
for i, part in enumerate(inter['particles']):
2292
group_ids.append([n for (n,g) in \
2293
enumerate(self['decay_groups']) \
2295
# No group_ids if this particle is not in decay_groups
2297
group_ids.append(None)
2300
# If a particle is SM-like, remove it!
2301
# (necessary if some particles turn to SM-like during
2302
# the loop then we could reduce the number and decide
2303
# groups of the rest particle
2304
if group_ids[i] == 0:
2308
# See if any valid previous particle has the same group.
2309
# If so, both the current one and the previous one
2312
if (group_ids[i] == group_ids[j] and \
2313
group_ids[i] != None) and ref_list[j] != None:
2314
# Both of the particles is useless for
2315
# the determination of parity
2321
# Remove the particles label with None in ref_list
2322
# Remove from the end to prevent errors in list index.
2323
for i in range(len(inter['particles'])-1, -1, -1):
2324
if ref_list[i] == None:
2325
inter['particles'].pop(i)
2327
# Remove the interaction if there is no particle in it
2328
if not len(inter['particles']):
2329
self['reduced_interactions'].remove(inter)
2331
# Start a new iteration...
2334
# Check if there is any particle that cannot be classified.
2335
# Such particle is in the group of its own.
2336
for part in self.get('particles'):
2337
if not part in sum(self['decay_groups'], []) and \
2338
not part.get('pdg_code') in sm_ids:
2339
self['decay_groups'].append([part])
2341
# For conveniences, record the decay_group id in particles of
2342
# reduced interactions
2343
for inter in self['reduced_interactions']:
2344
inter['groupid_list'] = [[i \
2346
enumerate(self['decay_groups']) \
2348
for p in inter['particles']]
2350
return self['decay_groups']
2352
def find_stable_particles(self):
2353
""" Find stable particles that are protected by parity conservation
2354
(massless particle is not included).
2356
1. Find the lightest massive particle in each group.
2357
2. For each reduced interaction, the group of the particle which
2358
lightest mass is greater than the sum of all other particles
2359
is not (may not be) stable.
2360
3. Replace the lightest mass of unstable group as its decay products
2361
4. Repeat 2., until no replacement can be made.
2365
# If self['decay_groups'] is None, find_decay_groups first.
2366
if not self['decay_groups']:
2367
self.find_decay_groups_general()
2369
# The list for the lightest particle for each groups
2370
# The first element is preserved for the SM-like group.
2371
stable_candidates = [[]]
2372
# The list for the lightest mass for each groups
2373
lightestmass_list = [0.]
2375
# Set the massless particles into stable_particles
2376
self['stable_particles'] = [[]]
2377
for p in self.get('particles'):
2378
if abs(eval(p.get('mass'))) == 0. :
2379
p.set('is_stable', True)
2380
self['stable_particles'][-1].append(p)
2382
# Find lightest particle in each group.
2383
# SM-like group is excluded.
2384
for group in self['decay_groups'][1:]:
2385
# The stable particles of each group is described by a sublist
2386
# (take degeneracy into account). Group index is the index in the
2387
# stable_candidates. Suppose the lightest particle is the 1st one.
2388
stable_candidates.append([group[0]])
2389
for part in group[1:]:
2390
# If the mass is smaller, replace the the list.
2391
if abs(eval(part.get('mass'))) < \
2392
abs(eval(stable_candidates[-1][0].get('mass'))) :
2393
stable_candidates[-1] = [part]
2394
# If degenerate, append current particle to the list.
2395
elif abs(eval(part.get('mass'))) == \
2396
abs(eval(stable_candidates[-1][0].get('mass'))) :
2397
stable_candidates[-1].append(part)
2398
# Record the lightest mass into lightestmass_list
2399
lightestmass_list.append(abs(eval(stable_candidates[-1][0].get('mass'))))
2402
# Deal with the reduced interaction
2406
for inter in self['reduced_interactions']:
2407
# Find the minial mass for each particle
2408
masslist = [lightestmass_list[inter['groupid_list'][i]] \
2409
for i in range(len(inter['particles']))]
2411
# Replace the minial mass to possible decay products
2412
for i, m in enumerate(masslist):
2413
if 2*m > sum(masslist):
2414
# Clear the stable_candidates in this group
2415
stable_candidates[inter['groupid_list'][i]] = []
2416
# The lightest mass becomes the mass of decay products.
2417
lightestmass_list[inter['groupid_list'][i]] = \
2422
# Append the resulting stable particles
2423
for stable_particlelist in stable_candidates:
2424
if stable_particlelist:
2425
self['stable_particles'].append(stable_particlelist)
2427
# Set the is_stable label for particles in the stable_particles
2428
for p in sum(self['stable_particles'], []):
2429
p.set('is_stable', True)
2430
self.get_particle(p.get_anti_pdg_code()).set('is_stable', True)
2432
# Run the advance find_stable_particles to ensure that
2433
# all stable particles are found
2434
self.find_stable_particles_advance()
2436
return self['stable_particles']
2438
def find_stable_particles_advance(self):
2439
""" Find all stable particles.
2441
1. For each interaction, if one particle has mass larger than
2442
the other, than this particle's mass is replaced by
2443
the sum of its decay products' masses.
2444
The 'is_stable' label of this particle is False.
2446
2. Repeat 1., until no change was made after the whole check.
2447
3. Particles that have never been labeled as unstable are now
2451
# Record the mass of all particles
2452
# Record whether particles can decay through stable_list
2455
for part in self.get('particles'):
2456
mass[part.get('pdg_code')] = abs(eval(part.get('mass')))
2457
stable_list[part.get('pdg_code')] = True
2459
# Record minimal mass for avioding round-off error.
2460
m_min = min(min([m for i,m in mass.items() if m > 0.]), 10**(-6))
2462
# Start the iteration
2466
for inter in self['interactions']:
2467
total_m = sum([mass[p.get('pdg_code')] \
2468
for p in inter['particles']])
2470
# Skip interaction with total_m = 0
2474
# Find possible decay for each particle
2475
for part in inter['particles']:
2476
# If not stable particle yet.
2477
if not part.get('is_stable'):
2478
# This condition is to prevent round-off error.
2479
if (2*mass[part.get('pdg_code')]-total_m) > \
2480
10**3*mass[part.get('pdg_code')]*\
2481
sys.float_info.epsilon:
2482
mass[part.get('pdg_code')] = \
2483
total_m - mass[part.get('pdg_code')]
2484
part['is_stable'] = False
2485
stable_list[part.get('pdg_code')] = False
2489
# Record the stable particle
2490
for part in self.get('particles'):
2491
if stable_list[part.get('pdg_code')]:
2492
part.set('is_stable', True)
2493
self.get_particle(part.get_anti_pdg_code()).set('is_stable',
2495
if not part in sum(self['stable_particles'], []):
2496
self['stable_particles'].append([part])
2499
def find_channels(self, part, max_partnum):
2500
""" Function that find channels for a particle.
2501
Call the function in DecayParticle."""
2502
part.find_channels(max_partnum, self)
2504
def find_all_channels(self, max_partnum, generate_abstract=False):
2505
""" Function that find channels for all particles in this model.
2506
Call the function in DecayParticle.
2507
It also write a file to compare the decay width from
2508
param_card and from the estimation of this module."""
2510
# If vertexlist has not been found before, run model.find_vertexlist
2511
if not self['vertexlist_found']:
2512
logger.info("Vertexlist of this model has not been searched."+ \
2513
"Automatically run the model.find_vertexlist()")
2514
self.find_vertexlist()
2516
# If vertexlist has not been found before, run model.find_vertexlist
2517
if generate_abstract and not self['abmodel_generated']:
2518
logger.info("AbstractModel for this model has not been generated."\
2519
+"Automatically run the model.generate_abstract_model()")
2520
self.generate_abstract_model()
2522
# Find stable particles of this model
2523
self.get('stable_particles')
2525
# Run the width of all particles from 2-body decay so that the 3-body
2526
# decay could use the width from 2-body decay.
2527
for part in self.get('particles'):
2528
# Skip search if this particle is stable
2529
if part.get('is_stable'):
2530
logger.info("Particle %s is stable." %part['name'] +\
2531
"No channel search will not proceed.")
2534
# Recalculating parameters and coupling constants
2535
self.running_externals(abs(eval(part.get('mass'))))
2536
self.running_internals()
2537
logger.info("Find 2-body channels of %s" %part.get('name'))
2538
part.find_channels_nextlevel(self)
2539
if generate_abstract:
2540
self.generate_abstract_amplitudes(part, 2)
2542
for part in self.get('particles'):
2544
# Skip search if this particle is stable
2545
if part.get('is_stable'):
2546
logger.info("Particle %s is stable." %part['name'] +\
2547
"No channel search will not proceed.")
2550
# Recalculating parameters and coupling constants
2551
self.running_externals(abs(eval(part.get('mass'))))
2552
self.running_internals()
2554
# After recalculating the parameters, find the channels to the
2556
for clevel in range(3, max_partnum+1):
2557
logger.info("Find %d-body channels of %s" %(clevel,
2559
part.find_channels_nextlevel(self)
2561
if generate_abstract:
2562
self.generate_abstract_amplitudes(part, clevel)
2565
# update the decay attributes for both max_partnum >2 or == 2.
2566
# The update should include branching ratios and apx_decaywidth_err
2567
# So the apx_decaywidth_err(s) are correct even for max_partnum ==2.
2568
part.update_decay_attributes(False, True, True, self)
2571
def find_all_channels_smart(self, precision, generate_abstract=False):
2572
""" Function that find channels for all particles in this model.
2573
Decay channels more than three final particles are searched
2574
when the precision is not satisfied.
2575
If generate_abstract = True, the abstract amplitudes will be
2578
# Raise error if precision is not a float
2579
if not isinstance(precision, float):
2580
raise self.PhysicsObjectError, \
2581
"The precision %s should be float type." % str(precision)
2583
# If vertexlist has not been found before, run model.find_vertexlist
2584
if not self['vertexlist_found']:
2585
logger.info("Vertexlist of this model has not been searched."+ \
2586
"Automatically run the model.find_vertexlist()")
2587
self.find_vertexlist()
2589
# If vertexlist has not been found before, run model.find_vertexlist
2590
if generate_abstract and not self['abmodel_generated']:
2591
logger.info("AbstractModel for this model has not been generated."\
2592
+"Automatically run the model.generate_abstract_model()")
2593
self.generate_abstract_model()
2595
# Find stable particles of this model
2596
self.get('stable_particles')
2598
# Run the width of all particles from 2-body decay so that the 3-body
2599
# decay could use the width from 2-body decay.
2600
for part in self.get('particles'):
2601
# Skip search if this particle is stable
2602
if part.get('is_stable'):
2603
logger.info("Particle %s is stable." %part['name'] +\
2604
"No channel search will not proceed.")
2607
# Recalculating parameters and coupling constants
2608
self.running_externals(abs(eval(part.get('mass'))))
2609
self.running_internals()
2611
logger.info("Find 2-body channels of %s" %part.get('name'))
2612
part.find_channels_nextlevel(self)
2613
if generate_abstract:
2614
self.generate_abstract_amplitudes(part, 2)
2617
# Search for higher final particle states, if the precision
2619
for part in self.get('particles'):
2620
# Skip search if this particle is stable
2621
if part.get('is_stable'):
2624
# Update the decaywidth_err
2625
part.update_decay_attributes(False,True,False, self)
2627
# If the error (ratio to apx_decaywidth) is larger then precision,
2628
# find next level channels.
2629
# Running coupling constants first.
2630
if part.get('apx_decaywidth_err') > precision:
2631
self.running_externals(abs(eval(part.get('mass'))))
2632
self.running_internals()
2635
while part.get('apx_decaywidth_err') > precision:
2636
logger.info("Find %d-body channels of %s" \
2639
part.find_channels_nextlevel(self)
2640
if generate_abstract:
2641
self.generate_abstract_amplitudes(part, 2)
2643
# Note that the width is updated automatically in the
2645
part.update_decay_attributes(False,True,False, self)
2648
# Finally, update the branching ratios
2649
part.update_decay_attributes(False, False, True)
2652
def write_summary_decay_table(self, name=''):
2653
""" Write a table to list the total width of all the particles
2654
and compare to the value in param_card."""
2656
# Write the result to decaywidth_MODELNAME.dat in 'mg5decay' directory
2657
path = os.path.join(MG5DIR, 'mg5decay')
2659
fdata = open(os.path.join(path,
2660
(self['name']+'_decay_summary.dat')),
2662
logger.info("\nWrite decay width summary to %s \n" \
2663
% str(os.path.join(path,
2664
(self['name']+'_decay_summary.dat'))))
2666
elif isinstance(name, str):
2667
fdata = open(os.path.join(path, name),'w')
2668
logger.info("\nWrite decay width summary to %s \n" \
2669
% str(os.path.join(path, name)))
2672
raise PhysicsObjectError,\
2673
"The file name of the decay table must be str." % str(name)
2676
summary_chart = (str('# DECAY WIDTH COMPARISON \n') +\
2677
str('# model: %s \n' %self['name']) +\
2678
str('#'*80 + '\n')+\
2679
str('#Particle ID card value apprx. value ratio') +\
2680
str(' level err \n')
2683
for part in self.get('particles'):
2684
# For non-stable particles
2685
if not part.get('is_stable'):
2686
# For width available in the param_card.
2688
summary_chart +=(str('#%11d %.4e %.4e %4.2f %3d %.2e\n'\
2689
%(part.get('pdg_code'),
2690
self['decaywidth_list']\
2691
[(part.get('pdg_code'), True)],
2692
part['apx_decaywidth'],
2693
part['apx_decaywidth']/self['decaywidth_list'][(part.get('pdg_code'), True)],
2694
part.get_max_level(),
2695
part['apx_decaywidth_err']
2697
# For width not available, do not calculate the ratio.
2699
summary_chart += (str('#%11d %.4e %.4e %s\n'\
2700
%(part.get('pdg_code'),
2702
part['apx_decaywidth'],
2704
# For width in param_card is zero.
2705
except ZeroDivisionError:
2706
summary_chart += (str('#%11d %.4e %.4e %s\n'\
2707
%(part.get('pdg_code'),
2709
part['apx_decaywidth'],
2711
# For stable particles
2714
if abs(self['decaywidth_list'][(part.get('pdg_code'), True)]) == 0.:
2718
summary_chart += (str('#%11d %.4e %s %4.2f\n'\
2719
%(part.get('pdg_code'),
2720
self['decaywidth_list']\
2721
[(part.get('pdg_code'), True)],
2725
summary_chart += (str('#%11d %.4e %s %s\n'\
2726
%(part.get('pdg_code'),
2731
# Write the summary_chart into file
2732
fdata.write(summary_chart)
2736
def write_decay_table(self, mother_card_path, format='normal',name = ''):
2737
""" Functions that write the decay table of all the particles
2738
in this model that including the channel information and
2739
branch ratio (call the estimate_width_error automatically
2740
in the execution) in a file.
2742
normal: write only amplitudes
2743
cmp: add ratio of decay_width to the value in MG4 param_card
2744
full: also write the channels in each amplitude."""
2746
# The list of current allowing formats
2747
allow_formats = ['normal','full','cmp']
2749
# Raise error if format is wrong
2750
assert format in allow_formats, \
2751
"The format must be \'normal\' or \'full\' or \'cmp\'." \
2754
# Write the result to decaywidth_MODELNAME.dat in 'mg5decay' directory
2755
path = os.path.join(MG5DIR, 'mg5decay')
2758
if format == 'full':
2759
fdata = open(os.path.join(path,
2760
(self['name']+'_decaytable_full.dat')),
2762
logger.info("\nWrite full decay table to %s\n"\
2763
%str(os.path.join(path,
2764
(self['name']+'_decaytable_full.dat'))))
2766
fdata = open(os.path.join(path,
2767
(self['name']+'_decaytable.dat')),
2769
logger.info("\nWrite %s decay table to %s\n"\
2771
str(os.path.join(path,
2772
(self['name']+'_decaytable.dat')))))
2774
elif isinstance(name, str):
2775
fdata = open(os.path.join(path, name),'w')
2776
logger.info("\nWrite %s decay table to %s\n"\
2778
str(os.path.join(path,
2782
raise PhysicsObjectError,\
2783
"The file name of the decay table must be str." % str(name)
2785
# Write the param_card used first
2786
fdata0 = open(mother_card_path, 'r')
2787
fdata.write(fdata0.read())
2790
# Write header of the table
2794
seperator = str('#'*80 + '\n')
2795
fdata.write('\n' + seperator + '#\n'*2 +\
2796
str('## EST. DECAY TABLE ## \n') +\
2797
'#\n'*2 + seperator)
2799
# Header of summary data
2800
summary_chart = (str('# DECAY WIDTH COMPARISON \n') +\
2801
str('# model: %s \n' %self['name']) +\
2802
str('#'*80 + '\n')+\
2803
str('#Particle ID card value apprx. value ratio') +\
2804
str(' level err \n')
2806
# Header of stable particle output
2807
spart = ('\n' + seperator + \
2808
'# Stable Particles \n'+ \
2810
'#%8s Predicted \n' %'ID')
2814
for p in self['particles']:
2815
# Write the table only for particles with finite width.
2816
if p.get('apx_decaywidth'):
2817
nonspart += p.decaytable_string(format)
2818
# Try to calculate the ratio in summary_chart
2820
summary_chart +=(str('#%11d %.4e %.4e %4.2f %3d %.2e\n'\
2821
%(p.get('pdg_code'),
2822
self['decaywidth_list']\
2823
[(p.get('pdg_code'), True)],
2824
p['apx_decaywidth'],
2825
p['apx_decaywidth']/self['decaywidth_list'][(p.get('pdg_code'), True)],
2827
p['apx_decaywidth_err']
2829
# For width not available, do not calculate the ratio.
2831
summary_chart += (str('#%11d %.4e %.4e %s\n'\
2832
%(p.get('pdg_code'),
2834
p['apx_decaywidth'],
2836
# For width in param_card is zero.
2837
except ZeroDivisionError:
2838
summary_chart += (str('#%11d %.4e %.4e %s\n'\
2839
%(p.get('pdg_code'),
2841
p['apx_decaywidth'],
2846
# see if the stable property is predicted.
2847
if p.get('is_stable'):
2848
spart += str('#%8d %9s \n' % (p.get('pdg_code'), 'Yes'))
2850
spart += str('#%8d %9s \n' % (p.get('pdg_code'), 'No'))
2852
# Try to calculate the ratio if there is reference width
2854
if abs(self['decaywidth_list'][(p.get('pdg_code'), True)]) == 0.:
2858
summary_chart += (str('#%11d %.4e %s %4.2f\n'\
2859
%(p.get('pdg_code'),
2860
self['decaywidth_list']\
2861
[(p.get('pdg_code'), True)],
2865
# If no width available, write the ratio as 1
2867
summary_chart += (str('#%11d %.4e %s %s\n'\
2868
%(p.get('pdg_code'),
2874
# Print summary_chart, stable particles, and finally unstable particles
2875
fdata.write(summary_chart)
2877
fdata.write(nonspart)
2881
def find_nextlevel_ratio(self):
2882
""" Find the ratio of matrix element square for channels decay to
2887
# Helper Function for reading MG4 param_card
2888
# And compare with our apx_decaywidth
2889
def read_MG4_param_card_decay(self, param_card):
2890
"""Read the decay width in MG4 param_card and
2891
compare the width with our estimation."""
2893
if not os.path.isfile(param_card):
2894
raise MadGraph5Error, \
2895
"No such file %s" % param_card
2897
# Read in param_card
2898
logger.info("\nRead MG4 param_card: %s \n" % str(param_card))
2899
param_lines = open(param_card, 'r').read().split('\n')
2901
# Define regular expressions
2902
re_decay = re.compile(\
2903
"^decay\s+(?P<pid>\d+)\s+(?P<value>-*\d+\.\d+e(\+|-)\d+)\s*")
2904
re_two_body_decay = re.compile(\
2905
"^\s+(?P<br>-*\d+\.\d+e(\+|-)\d+)\s+(?P<nda>\d+)\s+(?P<pid1>-*\d+)\s+(?P<pid2>-*\d+)")
2906
re_three_body_decay = re.compile(\
2907
"^\s+(?P<br>-*\d+\.\d+e(\+|-)\d+)\s+(?P<nda>\d+)\s+(?P<pid1>-*\d+)\s+(?P<pid2>-*\d+)\s+(?P<pid3>-*\d+)")
2909
# Define the decay pid, total width
2913
# Go through lines in param_card
2914
for line in param_lines:
2915
if not line.strip() or line[0] == '#':
2918
# Look for decay blocks
2919
decay_match = re_decay.match(line)
2921
pid = int(decay_match.group('pid'))
2922
total_width = float(decay_match.group('value'))
2923
self['decaywidth_list'][(pid, True)] = total_width
2925
# If no decay pid available, skip this line.
2929
two_body_match = re_two_body_decay.match(line)
2930
three_body_match = re_three_body_decay.match(line)
2932
# Check three_body first!
2933
# Otherwise it will always to be two body.
2934
if three_body_match:
2935
# record the pids and br
2936
pid1 = int(three_body_match.group('pid1'))
2937
pid2 = int(three_body_match.group('pid2'))
2938
pid3 = int(three_body_match.group('pid3'))
2940
br = float(three_body_match.group('br'))
2941
final_ids = [pid1, pid2, pid3]
2942
amp = self.get_particle(pid).get_amplitude(final_ids)
2943
# If amplitude is found, record the ratio
2945
amp['exa_decaywidth'] = br*total_width
2946
# If not found, show this info
2948
logger.info('No amplitude for %d -> %d %d %d is found.' %\
2949
(pid, pid1, pid2, pid3))
2951
# Jump to next line. Do not match the two_body_decay
2954
# If not three-body, check two-body
2956
# record the pids and br
2957
pid1 = int(two_body_match.group('pid1'))
2958
pid2 = int(two_body_match.group('pid2'))
2959
br = float(two_body_match.group('br'))
2960
final_ids = [pid1, pid2]
2961
amp = self.get_particle(pid).get_amplitude(final_ids)
2962
# If amplitude is found, record the ratio
2964
amp['exa_decaywidth'] = br*total_width
2965
# If not found, show this info
2967
logger.info('No amplitude for %d -> %d %d is found.' %\
2970
# Jump to next line. Do not match the three_body_decay
2974
#===============================================================================
2975
# Channel: A specialized Diagram object for decay
2976
#===============================================================================
2978
"""parameters for estimating the phase space area"""
2981
class Channel(base_objects.Diagram):
2982
"""Channel: a diagram that describes a certain decay channel
2983
with on shell condition, apprximated matrix element,
2984
phase space area, and decay width.
2985
There are several helper static methods.
2986
The check_idlegs will return the identical legs of the
2987
given vertex. The check_channels_equiv will check the
2988
equivalence of two channels.
2991
sorted_keys = ['vertices',
2993
'onshell', 'has_idpart', 'id_part_list',
2994
'apx_matrixelement_sq', 'apx_psarea', 'apx_decaywidth',
2995
'apx_decaywidth_nextlevel', 'apx_width_calculated']
2997
def default_setup(self):
2998
"""Default values for all properties"""
2999
self['vertices'] = base_objects.VertexList()
3004
# This property denotes whether the channel has
3005
# identical particles in it.
3006
self['has_idpart'] = False
3007
# The position of the identicle particles with pid as keys.
3008
self['id_part_list'] = {}
3009
self['final_legs'] = base_objects.LegList()
3010
# Property for optimizing the check_repeat of DecayParticle
3011
self['final_mass_list'] = 0
3012
# Decay width related properties.
3013
self['apx_matrixelement_sq'] = 0.
3014
self['s_factor'] = 1
3015
self['apx_psarea'] = 0.
3016
self['apx_decaywidth'] = 0.
3017
# branch ratio is multiply by 100.
3018
self['apx_decaywidth_nextlevel'] = 0.
3019
self['apx_width_calculated'] = False
3021
# Properties for abstract Channel
3022
# [pseudo_ab_inter_ids, pseudo_ab_pids, pseudo_ab_finalids]
3023
# generated by add_ab_diagram in AbstractModel,
3024
# finalids is in the order of final_legs
3025
self['abstract_type'] = [[], [], []]
3026
self['fermionfactor'] = 1
3029
def filter(self, name, value):
3030
"""Filter for valid diagram property values."""
3032
if name in ['apx_matrixelement_sq', 'apx_psarea',
3033
'apx_decaywidth', 'apx_br',
3034
'apx_decaywidth_nextlevel']:
3035
if not isinstance(value, float):
3036
raise self.PhysicsObjectError, \
3037
"Value %s is not a float" % str(value)
3039
if name == 'onshell' or name == 'has_idpart' or \
3040
name == 'apx_width_calculated':
3041
if not isinstance(value, bool):
3042
raise self.PhysicsObjectError, \
3043
"%s is not a valid onshell condition." % str(value)
3045
return super(Channel, self).filter(name, value)
3047
def get(self, name, model=None):
3048
""" Check the onshell condition before the user get it.
3049
And recalculate the apx_decaywidth_nextlevel if the
3053
if name == 'onshell':
3054
logger.info("It is suggested to get onshell property from get_onshell function")
3056
if name == 'apx_decaywidth_nextlevel' and model:
3057
return self.get_apx_decaywidth_nextlevel(model)
3059
return super(Channel, self).get(name)
3061
def get_sorted_keys(self):
3062
"""Return particle property names as a nicely sorted list."""
3063
return self.sorted_keys
3065
def calculate_orders(self, model):
3066
"""Calculate the actual coupling orders of this channel,
3067
negative vertex id is interepret as positive one
3068
(the CPT counterpart)."""
3070
coupling_orders = {}
3071
for vertex in self['vertices']:
3072
if vertex.get('id') == 0: continue
3073
vid = vertex.get('id')
3074
couplings = model.get('interaction_dict')[abs(vertex.get('id'))].\
3076
for coupling in couplings.keys():
3078
coupling_orders[coupling] += couplings[coupling]
3080
coupling_orders[coupling] = couplings[coupling]
3082
self.set('orders', coupling_orders)
3084
def nice_string(self):
3085
""" Add width/width_nextlevel to the nice_string"""
3086
mystr = super(Channel, self).nice_string()
3087
if self['vertices']:
3089
mystr +=" (width = %.3e)" % self['apx_decaywidth']
3091
mystr +=" (est. further width = %.3e)" % self['apx_decaywidth_nextlevel']
3095
def get_initial_id(self):
3096
""" Return the id of initial particle"""
3097
return self.get('vertices')[-1].get('legs')[-1].get('id')
3099
def get_final_legs(self):
3100
""" Return a list of the final state legs."""
3102
if not self['final_legs']:
3103
for vert in self.get('vertices'):
3104
for leg in vert.get('legs'):
3105
if not leg.get('number') in [l.get('number') \
3106
for l in self['final_legs']]\
3107
and leg.get('number') > 1:
3108
self['final_legs'].append(leg)
3110
return self['final_legs']
3112
def get_onshell(self, model):
3113
""" Evaluate the onshell condition with the aid of get_final_legs"""
3114
if not isinstance(self['onshell'], bool):
3115
# Check if model is valid
3116
if not isinstance(model, base_objects.Model):
3117
raise self.PhysicsObjectError, \
3118
"The argument %s must be a model." % str(model)
3120
self['final_mass_list'] =sorted([abs(eval(model.get_particle(l.get('id')).get('mass'))) \
3121
for l in self.get_final_legs()])
3122
ini_mass = abs(eval(model.get_particle(self.get_initial_id()).get('mass')))
3123
# ini_mass = ini_mass.real
3124
self['onshell'] = ini_mass > sum(self['final_mass_list'])
3126
return self['onshell']
3128
def get_fermion_factor(self, model):
3129
""" Get the fermion_factor, same as get_fermion_factor
3130
in HelasAmplitude."""
3132
# Record the fermion_order using numbers as keys
3135
# Setup the order_dict of external legs
3136
for l in self.get_final_legs():
3137
if model.get_particle(l.get('id')).is_fermion:
3138
order_dict[l.get('id')] = [l.get('number'), []]
3140
order_dict[l.get('id')] = []
3142
# The initial leg is transformed as outgoing legs
3143
ini_part = model.get_particle(self.get_initial_id())
3144
if ini_part.is_fermion():
3145
order_dict[ini_part.get_anti_pdg_code()] = [1, []]
3147
order_dict[ini_part.get_anti_pdg_code()] = []
3149
# Find fermion_order in vertices except the identical one and
3151
for i, vert in enumerate(self['vertices'][:-1]):
3152
ini_part = model.get_particle(vert['legs'][-1]['id'])
3153
# If the vert ended up with fermion, find its fermion_mother
3154
fermion_mother = None
3155
if ini_part.is_fermion():
3157
inter = model.get_interaction(vert['id'])
3159
inter = model.get_interaction(model['cp_conj_dict'][vert['id']])
3160
pdg_codes = [p.get_pdg_code() for p in inter['particles']]
3161
ini_index = pdg_codes.index(ini_part.get_anti_pdg_code())
3162
# part_index is the index of partner in vertex['legs']
3163
if ini_index % 2 ==0:
3164
part_index = ini_index
3165
mother_code = pdg_codes[ini_index+1]
3167
part_index = ini_index - 1
3168
mother_code = pdg_codes[ini_index-1]
3170
fermion_mother = vert['legs'][part_index]
3172
# Check if this vertex has the right order
3173
if fermion_mother['id'] != mother_code:
3174
raise PhysicsObjectError, \
3175
"The order of particle in interaction is not standard."
3177
# Refind the fermion_order except for initial vertex
3178
if i != len(self['vertices'])-2:
3179
order_dict[vert['legs'][-1]['id']] = \
3180
self.get_fermion_order(vert, order_dict, fermion_mother)
3182
# Find fermion_factor in initial vertex
3183
vert = self['vertices'][-2]
3185
inter = model.get_interaction(vert['id'])
3187
inter = model.get_interaction(model['cp_conj_dict'][vert['id']])
3188
pdg_codes = [p.get_pdg_code() for p in inter['particles']]
3190
# Construct helper_vertex with
3191
# legs = [ final+ initial legs in initial vertex, auxiliary_initial_leg]
3192
help_legs = base_objects.LegList(\
3193
[base_objects.Leg({'id': code}) for code in pdg_codes])
3194
help_legs.append(base_objects.Leg({'id': 0}))
3195
helper_vertex = base_objects.Vertex({'legs': help_legs})
3197
# Using this helper_vertex to get final fermion_order
3198
final_order = self.get_fermion_order(helper_vertex, order_dict, None)
3200
self['fermionfactor'] = self.sign_flips_to_order(final_order)
3203
def get_fermion_order(vert, order_dict, fermion_mother):
3204
""" Get the fermion_order. similar to the get_fermion_order in
3205
HelasWavefunction. """
3209
# Include the order of mother first
3211
new_order.extend(order_dict[fermion_mother['id']][1])
3213
# fermion_order of the fermionic legs
3214
fermions = [order_dict[l['id']] \
3215
for l in vert['legs'][:-1] \
3216
if len(order_dict[l['id']]) == 2 and \
3217
l != fermion_mother]
3218
# fermion_order of the bosonic legs
3219
bosons = [order_dict[l['id']] \
3220
for l in vert['legs'][:-1] \
3221
if len(order_dict[l['id']]) == 1]
3223
# include fermionic legs first
3224
for i in range(0, len(fermions), 2):
3225
new_order.append(fermions[i][0])
3226
new_order.append(fermions[i+1][0])
3227
new_order.extend(fermions[i][1])
3228
new_order.extend(fermions[i+1][1])
3233
# Add mother_number for fermion
3235
return [order_dict[fermion_mother['id']][0], new_order]
3240
def sign_flips_to_order(self, fermions):
3241
"""Gives the sign corresponding to the number of flips needed
3242
to place the fermion numbers in order.
3243
N.B. copy from helas_object.py """
3245
# Perform bubble sort on the fermions, and keep track of
3246
# the number of flips that are needed
3250
for i in range(len(fermions) - 1):
3251
for j in range(i + 1, len(fermions)):
3252
if fermions[j] < fermions[i]:
3254
fermions[i] = fermions[j]
3258
return (-1) ** nflips
3261
def check_idlegs(vert):
3262
""" Helper function to check if the vertex has several identical legs.
3263
If id_legs exist, return a dict in the following format,
3264
{particle_id: [leg_index1, index2, ...]}
3265
Otherwise return False.
3269
# Record the occurence of each leg.
3270
for lindex, leg in enumerate(vert.get('legs')):
3272
lindex_dict[leg['id']].append(lindex)
3274
lindex_dict[leg['id']] = [lindex]
3276
for key, indexlist in lindex_dict.items():
3277
# If more than one index for a key,
3278
# there are identical particles.
3279
if len(indexlist) > 1:
3280
# Record the index of vertex, vertex id (interaction id),
3281
# leg id, and the list of leg index.
3282
id_part_list[key] = indexlist
3287
def get_idpartlist(self):
3288
""" Get the position of identical particles in this channel.
3289
The format of id_part_list is a dictionary with the vertex
3290
which has identical particles, value is the particle id and
3291
leg index of identicle particles. Eg.
3292
id_part_list = {(vertex_index_1, vertex id, pid_1):
3293
[index_1, index_2, ..],
3294
(vertex_index_1, vertex id, pid_2):
3295
[index_1, index_2, ..],
3296
(vertex_index_2,...): ...}
3299
if not self['id_part_list']:
3300
# Check each vertex by check_idlegs
3301
for vindex, vert in enumerate(self.get('vertices')):
3302
# Use the id_part_list given by check_idlegs
3303
id_part_list = Channel.check_idlegs(vert)
3305
for key, idpartlist in id_part_list.items():
3306
# Record the id_part_list if exists.
3307
self['id_part_list'][(vindex, vert.get('id'),
3308
key)] = id_part_list[key]
3309
self['has_idpart'] = True
3311
return self['id_part_list']
3314
def check_channels_equiv(channel_a, channel_b):
3315
""" Helper function to check if any channel is indeed identical to
3316
the given channel. (This may happens when identical particle in
3317
channel.) This function check the 'has_idpart' and the final
3318
state particles of the two channels. Then check the equivalence
3319
by the recursive "check_channels_equiv_rec" function."""
3321
# Return if the channel has no identical particle.
3322
#if not channel_a.get('has_idpart') or not channel_b.get('has_idpart'):
3325
# Get the final states of channels
3326
final_pid_a = set([l.get('id') for l in channel_a.get_final_legs()])
3327
final_pid_b = set([l.get('id') for l in channel_b.get_final_legs()])
3328
# Return False if they are not the same
3329
if final_pid_a != final_pid_b:
3332
# Recursively check the two channels from the final vertices
3333
# (the origin of decay.)
3334
return Channel.check_channels_equiv_rec(channel_a, -1, channel_b, -1)
3337
def check_channels_equiv_rec(channel_a, vindex_a, channel_b, vindex_b):
3338
""" The recursive function to check the equivalence of channels
3339
starting from the given vertex point.
3341
1. Check if the two vertices are the same (in id).
3342
2. Compare each the non-identical legs. Either they are both
3343
final legs or their decay chain are the same. The comparision
3344
of decay chain is via recursive call of check_channels_equiv_rec
3345
3. Check all the identical particle legs, try to match each leg of b
3346
for the legs of a of each kind of identical particles.
3347
If a leg of b is fit for one leg of a, do not match this leg of a
3349
If any one leg of b cannot be matched, return False.
3350
4. If the two channels are the same for all the non-identical legs
3351
and are the same for every kind of identical particle,
3352
the two channels are the same from the given vertices."""
3354
# If vindex_a or vindex_b not in the normal range of index
3355
# convert it. (e.g. vindex_b = -1)
3356
vindex_a = vindex_a % len(channel_a.get('vertices'))
3357
vindex_b = vindex_b % len(channel_b.get('vertices'))
3358
# First compare the id of the two vertices.
3359
# Do not compare them directly because the number property of legs
3361
# If vertex id is the same, then the legs id are all the same!
3362
if channel_a.get('vertices')[vindex_a]['id'] != \
3363
channel_b.get('vertices')[vindex_b]['id']:
3365
# If the vertex is the initial vertex, start from the next vertex
3366
if vindex_a == len(channel_a.get('vertices'))-1 and \
3367
vindex_b == len(channel_b.get('vertices'))-1 :
3368
return Channel.check_channels_equiv_rec(channel_a, -2,
3371
# Find the list of identical particles
3372
id_part_list_a=Channel.check_idlegs(channel_a.get('vertices')[vindex_a])
3375
# For each leg, find their decay chain and compare them.
3377
enumerate(channel_a.get('vertices')[vindex_a].get('legs')[:-1]):
3378
# The two channels are equivalent as long as the decay chain
3379
# of the two legs must be the same if they are not part of
3380
# the identicle particles.
3381
if not leg_a.get('id') in id_part_list_a.keys():
3382
# The corresponding leg in channel_b
3383
leg_b = channel_b.get('vertices')[vindex_b].get('legs')[i]
3385
# If the 'is final' is inconsistent between a and b,
3387
# If both are 'is final', end the comparision of these two legs.
3388
leg_a_isfinal = leg_a in channel_a.get_final_legs()
3389
leg_b_isfinal = leg_b in channel_b.get_final_legs()
3390
if leg_a_isfinal or leg_b_isfinal:
3391
if leg_a_isfinal and leg_b_isfinal:
3394
# Return false if one is final leg
3395
# while the other is not.
3397
# The case with both legs are not final needs
3400
# Find the next vertex index of the decay chain of
3402
for j in range(vindex_a-1, -1, -1):
3403
v = channel_a.get('vertices')[j]
3404
if leg_a in v.get('legs'):
3408
for j in range(vindex_b-1, -1, -1):
3409
v = channel_b.get('vertices')[j]
3410
if leg_b in v.get('legs'):
3414
# Compare the decay chains of the two legs.
3415
# If they are already different, return False
3416
if not Channel.check_channels_equiv_rec(channel_a, new_vid_a,
3417
channel_b, new_vid_b):
3420
# If the check can proceed out of the loop of legs,
3421
# the decay chain is all the same for non-identicle particles.
3422
# Return True if there is no identical particles.
3423
if not id_part_list_a:
3427
# Check each kind of identicle particles
3428
for pid, indices_a in id_part_list_a.items():
3429
indices_b = copy.copy(indices_a)
3430
# Match each leg of channel_b to every leg of channel_a
3431
for index_b in indices_b:
3432
# Suppose the fit fail
3433
this_leg_fit = False
3435
leg_b = channel_b.get('vertices')[vindex_b].get('legs')[index_b]
3436
# Search for match leg in legs from indices_a
3437
for i, index_a in enumerate(indices_a):
3439
leg_a = channel_a.get('vertices')[vindex_a].get('legs')[index_a]
3440
# Similar to non-identicle particles, but
3441
# we could not return False when one is final leg while
3442
# the other is not since this could due to the wrong
3444
# If the leg is fit (both are final) stop the match of this
3446
leg_a_isfinal = leg_a in channel_a.get_final_legs()
3447
leg_b_isfinal = leg_b in channel_b.get_final_legs()
3448
if leg_a_isfinal or leg_b_isfinal:
3449
if leg_a_isfinal and leg_b_isfinal:
3456
# Get the vertex indices for the decay chain of the two
3458
for j in range(vindex_a-1, -1, -1):
3459
v = channel_a.get('vertices')[j]
3460
if leg_a in v.get('legs'):
3463
for j in range(vindex_b-1, -1, -1):
3464
v = channel_b.get('vertices')[j]
3465
if leg_b in v.get('legs'):
3469
# If any one of the pairs (leg_a, leg_b) is matched,
3470
# stop the match of this leg
3471
if Channel.check_channels_equiv_rec(channel_a,new_vid_a,
3472
channel_b,new_vid_b):
3477
# If this_leg_fit is True, continue to match the next leg of
3478
# channel_b. If this_leg_fit remains False, the match of this
3479
# leg cannot match to any leg of channel_a, return False
3480
if not this_leg_fit:
3483
# If no difference is found (i.e. return False),
3484
# the two decay chain are the same eventually.
3487
# Helper function (obselete)
3489
def generate_configs(id_part_list):
3490
""" Generate all possible configuration for the identical particles in
3491
the two channels. E.g. for legs of id=21, index= [1, 3, 5],
3492
This function generate a dictionary
3493
{leg id ( =21): [[1,3,5], [1,5,3], [3,1,5], [3,5,1],
3495
which gives all the possible between the id_legs
3496
in the two channels.
3498
id_part_configs = {}
3499
for leg_id, id_parts in id_part_list.items():
3500
id_part_configs[leg_id] = [[]]
3502
# For each index_a, pair it with an index_b.
3503
for position, index_a in enumerate(id_parts):
3504
# Initiate the new configs of next stage
3505
id_part_configs_new = []
3507
# For each configuration, try to find the index_b
3508
# to pair with index_a in the new position.
3509
for config in id_part_configs[leg_id]:
3510
# Try to pair index_a with index_b that has not been used
3512
for index_b in id_parts:
3513
if not index_b in config:
3514
config_new = copy.copy(config)
3515
config_new.append(index_b)
3516
id_part_configs_new.append(config_new)
3518
# Finally, replace the configs by the new one.
3519
id_part_configs[leg_id] = id_part_configs_new
3521
return id_part_configs
3524
def get_apx_matrixelement_sq(self, model):
3525
""" Calculate the apx_matrixelement_sq, the estimation for each leg
3526
is in get_apx_fnrule.
3527
The color_multiplicity is first searching in the
3528
color_multiplicity_def in model object. If no available result,
3529
use the get_color_multiplicity function.
3530
For off shell decay, this function will estimate the value
3531
as if it is on shell."""
3533
# To ensure the final_mass_list is setup, run get_onshell first
3534
self.get_onshell(model)
3536
# Setup the value of matrix element square and the average energy
3537
# q_dict is to record the flow of energy
3539
ini_part = model.get_particle(self.get_initial_id())
3540
avg_q = (abs(eval(ini_part.get('mass'))) - sum(self['final_mass_list']))/len(self.get_final_legs())
3543
# Estimate the width of normal onshell decay.
3544
if self.get_onshell(model):
3545
# Go through each vertex and assign factors to apx_m
3546
# Do not run the identical vertex
3547
for i, vert in enumerate(self['vertices'][:-1]):
3548
# Total energy of this vertex
3553
# Assign value to apx_m except the mother leg (last leg)
3554
for leg in vert['legs'][:-1]:
3555
# Case for final legs
3556
if leg in self.get_final_legs():
3557
mass = abs(eval(model.get_particle(leg.get('id')).\
3559
q_total += (mass + avg_q)
3560
apx_m *= self.get_apx_fnrule(leg.get('id'),
3561
avg_q+mass, True, model)
3562
# If this is only internal leg, calculate the energy
3564
# (The value of this leg is assigned before.)
3566
q_total += q_dict[(leg.get('id'), leg.get('number'))]
3568
# Record the color content
3569
final_color.append(model.get_particle(leg.get('id')).\
3572
# The energy for mother leg is sum of the energy of its product.
3574
q_dict[(vert.get('legs')[-1].get('id'),
3575
vert.get('legs')[-1].get('number'))] = q_total
3576
# Assign the value if the leg is not inital leg. (propagator)
3577
if i != len(self.get('vertices'))-2:
3578
apx_m *= self.get_apx_fnrule(vert.get('legs')[-1].get('id'),
3579
q_total, False, model)
3580
# Assign the value to initial particle. (q_total should be M.)
3582
apx_m *=self.get_apx_fnrule(vert.get('legs')[-1].get('id'),
3583
abs(eval(ini_part.get('mass'))),
3586
# Evaluate the coupling strength
3587
apx_m *= sum([abs(eval(v)) ** 2 for key, v in \
3588
model.get('interaction_dict')[\
3589
abs(vert.get('id'))]['couplings'].items()])
3591
# If final_color contain non-singlet,
3592
# get the color multiplicity.
3593
if any([i != 1 for i in final_color]):
3594
ini_color = model.get_particle(vert.get('legs')[-1].get('id')).get('color')
3595
# Try to find multiplicity directly from model
3598
color_configs = model.color_multiplicity_def(final_color)
3599
for config in color_configs:
3600
if config[0] == ini_color:
3604
# Call the get_color_multiplicity if no suitable
3605
# configs in the color_dict.
3607
apx_m *= self.get_color_multiplicity(ini_color,
3610
# Call the get_color_multiplicity if the final_color
3611
# cannot be found directly in the color_dict.
3613
apx_m *= self.get_color_multiplicity(ini_color,
3618
# A quick estimate of the next-level decay of a off-shell decay
3619
# Consider all legs are onshell.
3621
M = abs(eval(ini_part.get('mass')))
3622
# The avg_E is lower by one more particle in the next-level.
3623
avg_E = (M/(len(self.get_final_legs())+1.))
3625
# Go through each vertex and assign factors to apx_m
3626
# This will take all propagators into accounts.
3627
# Do not run the identical vertex
3628
for i, vert in enumerate(self['vertices'][:-1]):
3629
# Assign the value if the leg is not inital leg.
3630
# q is assumed as 1M
3631
if i != len(self.get('vertices'))-2:
3632
apx_m *= self.get_apx_fnrule(vert.get('legs')[-1].get('id'),
3633
1*M, False, model, True)
3635
# Assign the value to initial particle.
3637
apx_m *= self.get_apx_fnrule(vert.get('legs')[-1].get('id'),
3641
# Evaluate the coupling strength
3642
apx_m *= sum([abs(eval(v)) ** 2 for key, v in \
3643
model.get('interaction_dict')[\
3644
abs(vert.get('id'))]['couplings'].items()])
3646
# Calculate the contribution from final legs
3647
for leg in self.get_final_legs():
3648
apx_m *= self.get_apx_fnrule(leg.get('id'),
3651
# For both on-shell and off-shell cases,
3652
# Correct the factor of spin/color sum of initial particle (average it)
3653
apx_m *= 1./(ini_part.get('spin'))
3655
self['apx_matrixelement_sq'] = apx_m
3658
def get_apx_fnrule(self, pid, q, onshell, model, est = False):
3659
""" The library that provide the 'approximated Feynmann rule'
3660
q is the energy of the leg. The onshell label is to decide
3661
whether this particle is final or intermediate particle."""
3663
part = model.get('particle_dict')[pid]
3664
mass = abs(eval(part.get('mass')))
3666
# Set the propagator value (square is for square of matrix element)
3667
# The width is included in the propagator.
3672
value = 1./((q ** 2 - mass ** 2) ** 2 + \
3673
mass **2 * part.get('apx_decaywidth') **2)
3674
# Rough estimation on propagator. Avoid the large propagator when
3675
# q is close to mass
3677
m_large = max([q, mass])
3678
value = 1./(0.5* m_large**2)**2
3680
# Set the value according the particle type
3682
if part.get('spin') == 3:
3684
# For massive vector boson
3686
value *= (1+ (q/mass) **2)
3687
# For massless boson
3690
# The numerator of propagator.
3692
value *= (1 - 2* (q/mass) **2 + (q/mass) **4)
3694
elif part.get('spin') == 2:
3700
# Do nothing for scalar
3704
def get_color_multiplicity(self, ini_color, final_color, model, base=False):
3705
""" Get the color multiplicity recursively of the given final_color.
3706
The multiplicity is obtained based on the color_multiplicity_def
3707
funtion in the model.
3708
If the color structure of final_color matches the
3709
color_multiplicity_def, return the multiplicity.
3710
Otherwise, return 1 for the get_color_multiplicity with base = True
3711
or return 0 for the get_color_multiplicity with base = False."""
3713
# Combine the last two color factor to get the possible configs.
3714
color_configs = model.color_multiplicity_def([final_color.pop(),
3718
for config in color_configs:
3719
# The recursion ends when the length of the final_color now is 0.
3720
# (i.e. length = 2 before pop)
3721
if len(final_color) == 0:
3722
# If the final_color is consistent to ini_color, return the
3723
# nonzero multiplicity
3724
if config[0] == ini_color:
3728
# If next_final_color has more than one element,
3729
# creaat a new final_color for recursion.
3730
next_final_color = copy.copy(final_color)
3731
next_final_color.append(config[0])
3733
# Call get_color_multiplicity with next_final_color as argument.
3734
c_factor = config[1]* self.get_color_multiplicity(ini_color,
3738
# If the c_factor is not zero, the color configs match successfully.
3739
# Return the c_factor.
3743
# If no configs are satisfied...
3744
# Raise the warning message and return 1 for base get_color_multiplicity
3746
logger.warning("Color structure %s in interaction is not included!" %str(final_color))
3748
# return 0 for intermediate get_color_multiplicity.
3753
def get_apx_psarea(self, model):
3754
""" Calculate the approximate phase space area. For off-shell case,
3755
it only approximate it as if it is on-shell.
3756
For on-shell case, it calls the recursive calculate_apx_psarea
3757
to get a more accurate estimation. """
3759
M = abs(eval(model.get_particle(self.get_initial_id()).get('mass')))
3760
# Off-shell channel only estimate the psarea if next level is onshell.
3761
if not self.get_onshell(model):
3762
# The power of extra integration for this level is
3763
# number of current final particle -2
3764
# (3-body decay -> 1 integration)
3765
self['apx_psarea'] = 1/(8*math.pi)*\
3766
pow((c_psarea*(M/8./math.pi)**2), len(self.get_final_legs())-2)
3768
# For onshell case and psarea has not been calculated
3769
elif not self['apx_psarea']:
3770
# The initial particle mass
3771
M = abs(eval(model.get_particle(self.get_initial_id()).get('mass')))
3772
mass_list = copy.copy(self['final_mass_list'])
3773
self['apx_psarea'] = self.calculate_apx_psarea(M, mass_list)
3775
return self['apx_psarea']
3777
def calculate_apx_psarea(self, M, mass_list):
3778
"""Recursive function to calculate the apx_psarea.
3779
For the estimation of integration, it takes the final mass in the
3780
mass_list. The c_psarea is corrected in each integration.
3781
Symmetric factor of final state is corrected.
3784
# For more than 2-body decay, estimate the integration from the
3785
# middle point with c_psarea factor for correction, then
3786
# calls the function itself with reduced mass_list.
3787
if len(mass_list) >2 :
3788
# Mass_n is the mass that use pop out to calculate the ps area.
3789
mass_n = mass_list.pop()
3790
# Mean value of the c.m. mass of the rest particles in mass_list
3791
M_eff_mean = ((M-mass_n) +sum(mass_list))/2
3792
# The range of the c.m. mass square
3793
delta_M_eff_sq = ((M-mass_n) ** 2-sum(mass_list) ** 2)
3794
# Recursive formula for ps area,
3795
# initial mass is replaced as the square root of M_eff_sq_mean
3796
return math.sqrt((M ** 2+mass_n ** 2-M_eff_mean**2) ** 2-\
3797
(2*M *mass_n) ** 2)* \
3798
self.calculate_apx_psarea(M_eff_mean, mass_list)*\
3799
delta_M_eff_sq*c_psarea* \
3800
1./(16*(math.pi ** 2)*(M ** 2))
3802
# for two particle decay the phase space area is known.
3804
# calculate the symmetric factor first
3806
id_list = sorted([l.get('id') for l in self.get_final_legs()])
3808
for i, pid in enumerate(id_list):
3809
if i !=0 and id_list[i-1] == pid:
3812
self['s_factor'] = self['s_factor'] * math.factorial(count)
3815
# This complete the s_factor if the idparticle is in the last part
3818
self['s_factor'] = self['s_factor'] * math.factorial(count)
3819
return math.sqrt((M ** 2+mass_list[0] ** 2-mass_list[1] ** 2) ** 2-\
3820
(2* M *mass_list[0]) ** 2)* \
3821
1./(8*math.pi*(M ** 2)*self['s_factor'])
3824
def get_apx_decaywidth(self, model):
3825
"""Calculate the apx_decaywidth
3826
formula: Gamma = ps_area* matrix element square * (1/2M)
3827
Note that it still simulate the value for off-shell decay."""
3829
# Return the value now if width has been calculated.
3830
if self['apx_width_calculated']:
3831
return self['apx_decaywidth']
3834
self['apx_decaywidth'] = self.get_apx_matrixelement_sq(model) * \
3835
self.get_apx_psarea(model)/ \
3836
(2*abs(eval(model.get_particle(self.get_initial_id()).get('mass'))))
3837
self['apx_width_calculated'] = True
3839
return self['apx_decaywidth']
3841
def get_apx_decaywidth_nextlevel(self, model):
3842
""" Estimate the sum of all the width of the next-level channels
3845
M = abs(eval(model.get_particle(self.get_initial_id()).get('mass')))
3846
m_now = sum(self.get('final_mass_list'))
3847
# Ratio is the width of next-level channels over current channel.
3849
for leg in self.get_final_legs():
3850
# Use only particle not anti-particle because anti-particle has no
3852
part = model.get_particle(abs(leg.get('id')))
3853
# For legs that are possible to decay.
3854
if (not part.get('is_stable')) and (M-m_now+part.get('2body_massdiff')) > 0.:
3855
# Suppose the further decay is two-body.
3856
# Formula: ratio = width_of_this_leg * (M/m_leg)**(-1) *
3857
# (2 * M * 8 * pi * (c_psarea* (M/8/pi)**2)) *
3858
# 1/(leg_mleg(mleg)/leg_mleg(0.5M) *
3859
# Propagator of mleg(M)
3860
ratio *= (1+ part.get('apx_decaywidth')*\
3861
(M/abs(eval(part.get('mass')))) **(-1) *\
3862
(c_psarea*(M **3/4/math.pi)) / \
3863
(self.get_apx_fnrule(leg.get('id'), 0.5*M,
3865
self.get_apx_fnrule(leg.get('id'),
3866
abs(eval(part.get('mass'))),
3868
self.get_apx_fnrule(leg.get('id'), M,
3872
# Subtract 1 to get the real ratio
3874
self['apx_decaywidth_nextlevel'] = self.get_apx_decaywidth(model)*ratio
3876
return self['apx_decaywidth_nextlevel']
3879
#===============================================================================
3880
# ChannelList: List of all possible channels for the decay
3881
#===============================================================================
3882
class ChannelList(base_objects.DiagramList):
3883
"""List of decay Channel
3886
def is_valid_element(self, obj):
3887
""" Test if the object is a valid Channel for the list. """
3888
return isinstance(obj, Channel)
3892
#===============================================================================
3893
# DecayAmplitude: An Amplitude like object contain Process and Channels
3894
#===============================================================================
3895
class DecayAmplitude(diagram_generation.Amplitude):
3896
""" DecayAmplitude is derived from Amplitude. It collects channels
3897
with the same final states and create a Process object to describe it.
3898
This could be used to generate HELAS amplitude."""
3900
sorted_keys = ['process', 'diagrams', 'apx_decaywidth', 'apx_br',
3901
'exa_decaywidth', 'part_sn_dict', 'inter_sn_dict',
3904
def default_setup(self):
3905
"""Default values for all properties. Property 'diagrams' is now
3906
as ChannelList object."""
3908
self['process'] = base_objects.Process()
3909
self['diagrams'] = ChannelList()
3910
self['apx_decaywidth'] = 0.
3912
self['exa_decaywidth'] = False
3913
# Properties used in abstract amplitude
3914
self['part_sn_dict'] = {}
3915
self['inter_sn_dict'] = {}
3916
self['ab2real_dicts'] = Ab2RealDictList()
3918
def __init__(self, argument=None, model=None):
3919
""" Allow initialization with a Channel and DecayModel to create
3920
the corresponding process."""
3922
if isinstance(argument, Channel) and isinstance(model, DecayModel):
3924
super(DecayAmplitude, self).__init__()
3926
# Set the corresponding process.
3927
self.set_process(argument, model)
3928
self.set('diagrams', ChannelList([argument]))
3931
super(DecayAmplitude, self).__init__(argument)
3933
def filter(self, name, value):
3934
"""Filter for valid amplitude property values."""
3936
if name == 'process':
3937
if not isinstance(value, base_objects.Process):
3938
raise self.PhysicsObjectError, \
3939
"%s is not a valid Process object." % str(value)
3940
# Reset the width and br
3941
self.reset_width_br()
3943
if name == 'diagrams':
3944
if not isinstance(value, ChannelList):
3945
raise self.PhysicsObjectError, \
3946
"%s is not a valid ChannelList object." % str(value)
3947
# Reset the width and br
3948
self.reset_width_br()
3950
if name == 'apx_decaywidth' and name == 'apx_br':
3951
if not isinstance(value, float):
3952
raise self.PhysicsObjectError, \
3953
"%s is not a float." % str(value)
3955
if name == 'exa_decaywidth':
3956
if not isinstance(value, float) and not isinstance(value,bool):
3957
raise self.PhysicsObjectError, \
3958
"%s is not a float." % str(value)
3962
def get(self, name):
3963
"""Get the value of the property name."""
3965
# When apx_decaywidth is requested, recalculate it if needed.
3966
# Calculate br in the same time.
3967
if name == 'apx_decaywidth' and not self[name]:
3968
self['apx_decaywidth'] = sum([c.get('apx_decaywidth') \
3969
for c in self['diagrams']])
3971
# If apx_br is requested, recalculate from the apx_decaywidth if needed.
3972
if name == 'apx_br' and not self[name]:
3974
self['apx_br'] = self.get('apx_decaywidth')/ \
3975
self['process']['model'].\
3976
get_particle(self['diagrams'][0].get_initial_id())\
3978
except ZeroDivisionError:
3979
logger.warning("Try to get branch ratio from a zero width particle %s. No action proceed." % self['process']['model'].\
3980
get_particle(self['diagrams'][0].get_initial_id()).\
3983
return super(DecayAmplitude, self).get(name)
3985
def get_sorted_keys(self):
3986
"""Return DecayProcess property names as a nicely sorted list."""
3988
return self.sorted_keys
3990
def set_process(self, dia, model=None):
3991
""" Setup the process from the diagram,
3992
used in initial setup."""
3994
# Check the number of initial leg is 1
3995
if dia['vertices'][-1]['legs'][-2]['number'] != 1:
3996
raise PhysicsObjectError, \
3997
"The number of initial leg should be setted as 1."
3999
# Append the initial leg.
4000
leglist = base_objects.LegList([base_objects.Leg({'id': dia.get_initial_id(),
4003
# Extract legs from final legs of Channel.
4004
leglist.extend(base_objects.LegList(\
4005
copy.deepcopy(sorted([l for l in dia.get_final_legs()],
4008
# Set up process and model (if provided).
4009
self.set('process', base_objects.Process({'legs':leglist}))
4011
self['process'].set('model', model)
4013
def add_std_diagram(self, new_dia, model=None):
4014
""" Add new diagram into amplitude, but check if the number identifiers
4015
of outgoing legs are consistent with the process."""
4017
if not isinstance(new_dia, Channel):
4018
raise self.PhysicsObjectError,\
4019
"The argument should be Channel object."
4021
# If this amplitude has no process
4022
if not self['process']['legs']:
4023
self.set_process(new_dia, model)
4024
self['diagrams'].append(new_dia)
4027
# non_std_number: number of new_dia
4028
non_std_numbers = [(l.get('id'),l.get('number')) \
4029
for l in new_dia.get_final_legs()]
4031
non_std_numbers.append((new_dia.get_initial_id(), 1))
4032
non_std_numbers.sort(id_num_cmp)
4034
# std_number: numbers of legs in process
4035
std_numbers = [(l.get('id'),l.get('number')) \
4036
for l in sorted(self['process']['legs'])]
4037
std_numbers.sort(id_num_cmp)
4039
# Return if the numbers in diagram is the same as process
4040
if non_std_numbers == std_numbers:
4041
self['diagrams'].append(new_dia)
4044
# Conversion from non_std_number to std_number
4045
converted_dict = dict([(num[1], std_numbers[i][1])\
4046
for i, num in enumerate(non_std_numbers)])
4049
for vert in new_dia.get('vertices'):
4050
for leg in vert.get('legs'):
4051
leg['number'] = converted_dict[leg['number']]
4053
# Add this standard diagram into diagrams
4054
self['diagrams'].append(new_dia)
4057
def reset_width_br(self):
4058
""" Reset the value of decay width and branch ratio.
4059
Automatically done in the set(filter) function.
4060
This is needed when content of diagrams or process are changed."""
4062
self['apx_decaywidth'] = 0.
4065
def decaytable_string(self, format='normal'):
4066
""" Write the string in the format for decay table.
4067
format = 'normal': show only branching ratio
4068
= 'full' : show branching ratio and all the channels."""
4070
output=' %.5e %d' %(self.get('apx_br'),
4071
len(self['process']['legs'])-1)
4072
output += ''.join(['%11d' %leg.get('id') \
4073
for leg in self['process']['legs'][1:]])
4076
if self.get('exa_decaywidth'):
4077
output += '\t%4.2f' % (self.get('apx_decaywidth')/self.get('exa_decaywidth'))
4081
output += ' #Br(%s)\n' %self.get('process').input_string()
4083
# Output the channels if format is full
4085
# Set indent of the beginning for channels
4086
output += self.get('diagrams').nice_string(6)
4088
# Return process only, get rid off the final \n
4089
elif format == 'normal' or format == 'cmp':
4092
# Raise error if format is wrong
4094
raise self.PhysicsObjectError,\
4095
"Format %s must be \'normal\' or \'full\'." % str(format)
4097
# Return output for full format case.
4101
def abstract_nice_string(self):
4102
""" Print the nice string of abstract amplitudes,
4103
which shows the real process of this type."""
4105
output = super(DecayAmplitude, self).nice_string()
4106
output += "\nReal process: "
4107
output += ', '.join([ref['process'].input_string() for ref in self['ab2real_dicts']])
4112
def generate_variables_dicts(self, real_amp, ab2real_id = -1):
4113
""" Generate the dictionary of abstract to real variables,
4114
default is to use the latest Ab2RealDict."""
4116
ab_model = self['process']['model']
4117
real_model = real_amp['process']['model']
4118
ab2realdict = self['ab2real_dicts'][ab2real_id]
4120
# Set the initial particle dict
4121
ab2realdict['mass_dict']\
4122
[ab_model.get_particle(self['process'].get_initial_ids()[0])['mass']] = real_model.get_particle(real_amp['process'].get_initial_ids()[0])['mass']
4124
# Set the final particle dict
4125
for ab_pid, real_pid in ab2realdict['final_legs_dict'].items():
4126
if isinstance(real_pid, int):
4127
ab2realdict['mass_dict'][ab_model.get_particle(ab_pid)['mass']] = real_model.get_particle(real_pid)['mass']
4128
# For the case of identical particle
4130
for pid in real_pid:
4131
ab2realdict['mass_dict'][ab_model.get_particle(ab_pid)['mass']] = real_model.get_particle(pid)['mass']
4133
# Set the interaction id dict and intermediate particles
4134
for ab_dia_id, real_dia_id in ab2realdict['dia_sn_dict'].items():
4136
ab_dia = self['diagrams'][ab_dia_id]
4137
real_dia = real_amp['diagrams'][real_dia_id]
4139
# Set the intermediate particle and interaction,
4140
# except for the identical vertex
4141
# (the initial particle will be resetted, but it should be fine.)
4142
for i, v in enumerate(ab_dia['vertices'][:-1]):
4144
# Set intermediate particle
4145
ab2realdict['mass_dict']\
4146
[ab_model.get_particle(v['legs'][-1]['id'])['mass']] \
4147
= real_model.get_particle(real_dia['vertices'][i]['legs'][-1]['id'])['mass']
4149
# Set the interaction
4150
for ab_key, real_coup in \
4151
ab_model['interaction_coupling_dict'][real_dia['vertices'][i]['id']].items():
4153
ab_coup = ab_model.get_interaction(ab_dia['vertices'][i]['id'])['couplings'][ab_key]
4154
ab2realdict['coup_dict'][ab_coup] = real_coup
4156
#===============================================================================
4157
# DecayAmplitudeList: An Amplitude like object contain Process and Channels
4158
#===============================================================================
4159
class DecayAmplitudeList(diagram_generation.AmplitudeList):
4160
""" List for DecayAmplitudeList objects.
4163
def is_valid_element(self, obj):
4164
""" Test if the object is a valid DecayAmplitude for the list. """
4165
return isinstance(obj, DecayAmplitude)
4167
def get_amplitude(self, final_ids):
4168
""" Get the amplitudes with the given final particles if
4169
exist in this list. Otherwise, return None.
4170
Note: Use stored finallist in Channel!"""
4173
if sorted([abs(l.get('id')) for l in amp['diagrams'][0]\
4174
.get_final_legs()]) == sorted(list_abs(final_ids)):
4180
def nice_string(self):
4181
""" Nice string from Amplitude """
4182
mystr = '\n'.join([a.nice_string() for a in self])
4186
def abstract_nice_string(self):
4187
""" Nice string for abstract amplitude. """
4188
mystr = '\n'.join([a.abstract_nice_string() for a in self])
4193
def decaytable_string(self, format='normal'):
4194
""" Decaytable string for Amplitudes """
4196
# Get the level (n-body) of this amplitudelist and
4197
# the total width inside this DecayAmplitudeList
4198
level = len(self[0].get('diagrams')[0].get_final_legs())
4199
total_width = sum([amp.get('apx_decaywidth') for amp in self])
4201
# Print the header with the total width
4202
mystr = '## Contribution to total width of %d-body decay: %.5e \n' \
4203
%(level, total_width)
4204
mystr += '\n'.join([a.decaytable_string(format) for a in self])
4208
#===============================================================================
4209
# AbstractModel: A Model class in which objects are abstract
4210
#===============================================================================
4211
class AbstractModel(base_objects.Model):
4212
""" Model in which particles are different only in Lorentz structure
4213
and color. Interactions may be duplicated in order to generate amplitude
4214
with the same Lorentz structure vertices or diagrams
4216
sorted_keys = ['name', 'particles', 'parameters', 'interactions',
4217
'couplings', 'lorentz',
4218
'abstract_particles_dict', 'abstract_interactions_dict',
4219
'particle_type_dict',
4220
'interaction_type_dict', 'interaction_coupling_dict',
4221
'ab_matrix_elements'
4224
def default_setup(self):
4225
"""The particles is changed to ParticleList"""
4227
super(AbstractModel, self).default_setup()
4229
self['particles'] = DecayParticleList()
4230
# Object type -> Abstract Object list of the given type
4231
self['abstract_particles_dict'] = {}
4232
self['abstract_interactions_dict'] = {}
4234
# Real interaction id -> (Abstract type (spin, color, self-anti),
4236
# keys are always positive!!!
4237
self['particle_type_dict'] = {}
4239
# Real interaction id -> Abstract type (pseudo abstract particlelist,
4242
self['interaction_type_dict'] = {}
4244
self['interaction_coupling_dict'] = {}
4246
# Store all the matrix elements
4247
self['ab_matrix_elements'] = AbstractHelasMatrixElementList()
4249
self.spin_text_dict = {1:'S', 2: 'F', 3:'V', 5:'T'}
4252
def get_sorted_keys(self):
4253
return self.sorted_keys
4255
def get_particle_type(self, part, get_code=False):
4256
""" Return the tuple (spin, color, self_antipart)
4257
of the given particle.
4258
NOTE: bosons are always treated as self-antipart. """
4260
if isinstance(part, base_objects.Particle):
4262
return abs(self.get_particle_type_code(part))
4265
self_antipart = True
4267
self_antipart = part['self_antipart']
4269
return (part['spin'], part['color'], self_antipart)
4271
# Use particle_type_dict if input is pdg_code
4272
elif isinstance(part, int):
4274
return self['particle_type_dict'][abs(part)][0]
4276
return self['particle_type_dict'][abs(part)][1]
4278
raise self.PhysicsObjectError, \
4279
"Input should be Particle type or pdg_code(int)."
4282
def get_particle_type_code(self, part):
4283
""" Return the pseudo pdg_code :
4284
(9900000+1000*spin+100*color) for non-self-antipart;
4285
(9910000+1000*spin+100*color) for self-antipart (Majorana fermion)
4286
of the given particle."""
4288
# Special pdg_code for Majorana fermions
4289
if part.is_fermion():
4290
if part['self_antipart']:
4291
return 9910000+1000*part['spin']+100*part['color']
4293
return int(math.copysign(
4294
9900000+1000*part['spin']+100*part['color'],
4295
part.get_pdg_code()))
4297
return 9900000+1000*part['spin']+100*part['color']
4300
def add_ab_particle(self, pdg_code, force=False):
4301
""" Functions to add new particle according to the given particle
4302
spin and color information. This function will assign the correct
4303
serial number of new particle and change the abstract_particles."""
4305
# Check argument type
4306
if not force and not isinstance(pdg_code, int):
4307
raise self.PhysicsObjectError,\
4308
"Argument must be a pdg_code."
4310
# To ensure pdg_code is positive
4311
pdg_code = abs(pdg_code)
4313
# Setup new particle
4314
ab_part = DecayParticle()
4315
ab_part['spin'] = self.get_particle_type(pdg_code)[0]
4316
ab_part['color'] = self.get_particle_type(pdg_code)[1]
4317
ab_part['self_antipart'] = self.get_particle_type(pdg_code)[2]
4319
# Check the current abstract_particles
4320
if self.get_particle_type(pdg_code) \
4321
in self['abstract_particles_dict'].keys():
4322
# For existing type, record the serial number
4323
sn = len(self['abstract_particles_dict']\
4324
[self.get_particle_type(pdg_code)])
4326
# Setup new type of abstract particle
4327
self['abstract_particles_dict']\
4328
[self.get_particle_type(pdg_code)]= DecayParticleList()
4332
# Set other properties: name = text(spin)+color
4334
name_string = self.spin_text_dict[ab_part.get('spin')]
4340
str(ab_part.get('color')) +\
4343
# mass = M'S or N''spin''color'
4344
# % S for self-antipart, N for non-self-antipart
4346
if ab_part['self_antipart']:
4347
mass_string = mass_string + 'S'
4349
mass_string = mass_string + 'N'
4350
mass_string = mass_string + name_string +\
4351
str(ab_part.get('color')) +\
4353
ab_part['mass'] = mass_string
4355
# pdg_code = 99'1:S ,0: N'00'spin''color'
4356
# e.g. w+: 9903100, gluino: 9912300
4357
ab_part['pdg_code'] = self.get_particle_type(pdg_code, get_code=True)+sn
4359
# Set anti-part properties if this is not a self-antiparticle
4360
if not ab_part['self_antipart']:
4361
ab_part['antiname'] = ab_part.get('name')+'~'
4363
# Create the anti-part
4364
anti_ab_part = DecayParticle(ab_part)
4365
anti_ab_part['is_part'] = False
4367
# Append ab_part into self['particles'] and abstract_particles_dict,
4368
self['particles'].append(ab_part)
4369
self['abstract_particles_dict'][self.get_particle_type(ab_part)].append(ab_part)
4370
logger.info("Add Abstract Particle %s according to #%s" \
4371
%(ab_part.get('name'), pdg_code))
4374
# Reset the particle dictionary
4375
self.get('particle_dict')
4376
self['particle_dict'][ab_part['pdg_code']] = ab_part
4377
if not ab_part['self_antipart']:
4378
self['particle_dict'][-ab_part['pdg_code']] = anti_ab_part
4382
def setup_particle(self, part, force=False):
4383
"""Add real particle into AbstractModel,
4384
convert into abstract particle."""
4386
# Check argument type
4387
if not force and not isinstance(part, base_objects.Particle):
4388
raise self.PhysicsObjectError,\
4389
"Argument must be a Particle object."
4391
# Setup the particle_type_dict for all particle
4392
self['particle_type_dict'][part['pdg_code']] = \
4393
(self.get_particle_type(part),
4394
self.get_particle_type(part, get_code=True))
4396
if self.get_particle_type(part) \
4397
not in self['abstract_particles_dict'].keys():
4398
# if not found in existing particle, create a new abstract particle
4399
self.add_ab_particle(part['pdg_code'], force)
4402
def setup_particles(self, part_list, force=False):
4403
"""Add real particles into AbstractModel,
4404
convert into abstract particles"""
4406
# Check argument type
4407
if not force and not isinstance(part_list, base_objects.ParticleList):
4408
raise self.PhysicsObjectError,\
4409
"Argument must be a ParticleList."
4411
# Call add_particle for each particle
4412
for part in part_list:
4413
self.setup_particle(part, force)
4415
# Reset dictionaries in the model
4416
self.reset_dictionaries()
4420
def get_particlelist_type(self, pdgcode_list, ignore_dup=True,
4421
sn_dict={}, std_output=True):
4422
""" Return a list of the type of the given particlelist,
4423
and a dictionary records the number of each type of particle.
4424
The pdg_code will start from the number given by serial_number_dict.
4425
Note1: If ignore_dup is True,
4426
the same particles will not be assigned with the same id.
4427
Note2: std_output is used to get the same abstract_pidlist
4428
for a given real_pids, no matter what order is.
4429
Usage e.g.: pids of final_legs in one real amplitude.
4430
Not used for: comparing two real_pids.
4433
pseudo_ab_particlelist = []
4434
# Used the given sn_dict or an empty one
4435
serial_number_dict = copy.copy(sn_dict)
4437
if not pdgcode_list:
4438
return pseudo_ab_particlelist, serial_number_dict
4440
# If standard output is required, input_pdgcode_list
4442
input_pdgcode_list = copy.copy(pdgcode_list)
4444
# List of indices to record the original order
4445
original_order = range(len(pdgcode_list))
4447
# Sort according to (particle_type, real_pid) of input_pdgcode_list
4448
temp = zip(input_pdgcode_list, original_order)
4449
temp.sort(key=lambda item: \
4450
(self.get_particle_type(item[0]),item[0]))
4452
input_pdgcode_list, original_order = zip(*temp)
4454
print original_order, pdgcode_list
4456
# Construct the abstract pid list
4457
for i, pdg_code in enumerate(input_pdgcode_list):
4459
# Use get_particle_type function
4460
part_type = self.get_particle_type(abs(pdg_code))
4461
# The sign of part_type_code depends on whether the
4462
# abstract particle is self-antipart or not
4463
part_type_code = self.get_particle_type(abs(pdg_code), True)
4464
if pdg_code < 0 and not part_type[2]:
4465
part_type_code = -part_type_code
4467
# Add the pseudo pdg code, if it is the first one in its type
4468
# appearing in the list.
4469
# There should be a abstract particle for this type,
4470
# if the setup_particles has been run correctly.
4471
if not part_type in serial_number_dict.keys():
4472
pseudo_ab_particlelist.append(\
4475
serial_number_dict[part_type] = 1
4477
# If not ignore duplicate particles,
4478
# check if there are same particle in the list.
4483
for j, previous_pdgcode in enumerate(input_pdgcode_list):
4484
# find duplicate particle, use the abstract particle
4486
# No need to update the serial_number_dict
4490
if abs(previous_pdgcode) == \
4492
pseudo_ab_particlelist.append(\
4494
pseudo_ab_particlelist[j],
4500
# Append new abstract particle if not appears before
4502
# If the abstract particle of the given serial number
4503
# does not exist, add a new abstract particle and append
4505
if serial_number_dict[part_type] >= \
4506
len(self['abstract_particles_dict'][part_type]):
4507
self.add_ab_particle(pdg_code, True)
4509
# Append the pdg_code into the list,
4510
# starting from the s/n given by serial_number_dict
4511
pseudo_ab_particlelist.append(\
4513
abs(part_type_code)+\
4514
serial_number_dict[part_type],
4518
# Update the serial_number_dict
4519
serial_number_dict[part_type] += 1
4521
# For std_output, reorder the pseudo_ab_particlelist
4522
# to be the same as pdgcode_list
4524
temp = zip(original_order, pseudo_ab_particlelist)
4526
original_order, pseudo_ab_particlelist = zip(*temp)
4527
pseudo_ab_particlelist = list(pseudo_ab_particlelist)
4529
return pseudo_ab_particlelist, serial_number_dict
4532
def get_color_string(self, inter):
4533
""" Return the correct color string according to the
4534
sorted order of particle. """
4539
def get_interaction_type(self, inter_id):
4540
""" Return the tuple (lorentz, particles_type) of the given interaction
4541
Raise error if type is not in quick reference dictionary."""
4543
# Check the quick reference dictionary.
4544
# If the lorentz type has been imported, use the type
4545
# provided by the dictionary. Otherwise, raise error.
4546
# Note: the keys, (lorentz, part_types), are well-sorted in
4547
# setup_interactions.
4549
return self['interaction_type_dict'][inter_id]
4551
raise self.PhysicsObjectError, \
4552
"Interaction #%d has not been imported into AbstractModel." \
4556
def add_ab_interaction(self, inter_id, force=False, color = None):
4557
""" Functions to set new interaction according to the given interaction
4558
particles and lorentz sturcture. This function will assign the correct
4559
serial number of new interaction, change the abstract_interactions.
4560
Note: Add NEW type of interaction should use setup."""
4562
# Check argument type
4563
if not force and not isinstance(inter_id, int):
4564
raise self.PhysicsObjectError,\
4565
"Argument must be an Interaction id."
4567
# Setup new interaction
4568
ab_inter = base_objects.Interaction()
4569
inter_type = self.get_interaction_type(inter_id)
4571
# Check the current abstract_interactions
4573
self['abstract_interactions_dict'].keys():
4574
# For existing type, record the serial number
4575
sn = len(self['abstract_interactions_dict'][inter_type])
4577
# Setup the new item, serial number = 0
4578
self['abstract_interactions_dict'][inter_type] = \
4579
base_objects.InteractionList()
4583
# Type_sn is the serial number of the type
4585
type_sn = len(self['abstract_interactions_dict'].keys())
4588
self['abstract_interactions_dict'][inter_type][0]['id']/1000
4591
# | |_> the serial number
4593
# |_> The serial number of the interaction type
4594
ab_inter['id'] = 1000*type_sn + sn
4596
# Get particle from the model.particle_dict
4597
# to ensure that the particle is DecayParticle, not Particle
4598
ab_inter['particles'] = DecayParticleList(\
4599
[self.get_particle(pid) for pid in inter_type[0]])
4600
ab_inter['lorentz'] = list(inter_type[1])
4602
# Retrieve the color information
4604
ab_inter['color'] = color
4606
# Use the information before
4607
ab_inter['color'] = \
4608
self['abstract_interactions_dict'][inter_type][0]['color']
4610
raise self.PhysicsObjectError,\
4611
"Error to add interaction. No color information available."
4613
# couplings = G###cl##
4614
# | || |-> the serial number
4615
# | ||-> the lorentz identifier
4616
# | |-> the color identifier
4617
# |-> The serial number of the interaction type
4618
ab_inter['couplings'] = {}
4619
for i, colr in enumerate(ab_inter['color']):
4620
for j, lorentz in enumerate(ab_inter['lorentz']):
4621
ab_inter['couplings'][(i, j)] = 'G%03d%1d%1d%02d' \
4622
%(type_sn, i, j, sn)
4626
# Append ab_inter into self['interactions'] and
4627
# abstract_interactions_dict
4628
self['interactions'].append(ab_inter)
4629
self['abstract_interactions_dict'][inter_type].append(ab_inter)
4631
# Reset the dictionary
4632
self['interaction_dict'][ab_inter['id']] = ab_inter
4635
def setup_interactions(self, inter_list, anti_dict, force=False):
4636
"""Add real interactions into AbstractModel,
4637
convert into abstract interactions.
4638
The lorentz and color
4639
structures keep to be the union of all correpsonding
4641
Construct the quick reference dictionary,
4642
and setup the coupling constants in the end."""
4644
# Check argument type
4645
if not force and not isinstance(inter_list,
4646
base_objects.InteractionList):
4647
raise self.PhysicsObjectError,\
4648
"Argument must be an InteractionList."
4650
# Add all interactions except for
4651
# 1. radiation interaction, 2. interaction with all stable particles
4652
for inter in inter_list:
4653
# Check validity: remove stable particles
4654
final_list = [p.get('pdg_code')\
4655
for p in inter['particles'] \
4656
if not p['is_stable'] ]
4658
# Check validity: do not count duplicate particles,
4659
# i.e. those cannot decay through this interaction.
4660
__duplicate__ = True
4661
for pid in final_list:
4662
if final_list.count(pid) == 1:
4663
__duplicate__ = False
4666
# Ignore all stable particle interaction (e.g. g g > g g) (len=0)
4667
# and interaction with all duplicate particles
4668
if len(final_list) == 0 or __duplicate__:
4672
# Use the get_particlelist_type to find the particles type of this
4674
# NOTE: particles do not sort,
4675
# ambiguity might happened!!
4676
abpart_types, sn = \
4677
self.get_particlelist_type([p.get_pdg_code() for p in\
4678
inter['particles']])
4680
# The key have to be sorted
4681
new_key = [tuple(abpart_types),
4682
tuple(sorted(inter['lorentz'])),
4683
tuple(sorted([str(colorstring) \
4684
for colorstring in inter['color']]))
4686
color_list = inter['color']
4690
# Check if this type is the subset of known lorentz type,
4691
# or contain the known lorentz type.
4692
# If so, union the two and continue search.
4693
# Do the same merge for color structure
4694
# Note: the abstract_interactions_dict always maintains to have
4695
# its keys that have no intersect lorentz with others.
4696
for key, ab_interlist in self['abstract_interactions_dict'].items():
4697
# Check lorentz if the particles are in the same types
4698
if key[0] == new_key[0]:
4699
# Stop if this lorentz and color are already in
4701
if set(key[1]).issuperset(new_key[1]) and \
4702
set(key[2]).issuperset(new_key[2]):
4705
# Continue if no joit between the lorentz types
4706
# and color strings either.
4707
elif set(key[1]).isdisjoint(new_key[1]) and \
4708
set(key[2]).isdisjoint(new_key[2]):
4710
# This key has a related lorentz structure or
4712
# Create the unions and prepare to remove this key
4715
tuple(sorted(set(key[1]).union(inter['lorentz'])))
4717
tuple(sorted(set(key[2]).union(new_key[2])))
4718
new_key[1] = union_lorentz
4719
new_key[2] = union_color
4721
# Remember to remove this key later
4722
remove_list.append(key)
4724
# Collect the real color string
4725
color_list.extend([c for c in ab_interlist[0]['color']\
4726
if not c in color_list])
4728
# Set temporate interaction_type_dict,
4729
# transform list into tuple in order to be eligible for key.
4730
self['interaction_type_dict'][inter['id']] = tuple(new_key)
4732
# If it is a new key, add interaction
4733
# in abstract_interactions_dict
4736
# Use add_ab_interaction to get the correct format,
4737
# it will find type from interaction_type_dict
4738
self.add_ab_interaction(inter['id'], color = color_list)
4740
# Remove subset, if remove_list is not empty
4741
for remove_key in remove_list:
4742
# Remove old abstract interactions in interaction list
4743
for old_int in self['abstract_interactions_dict'][remove_key]:
4744
self['interactions'].remove(old_int)
4745
del self['abstract_interactions_dict'][remove_key]
4747
# Reset the id of all abstract interactions
4748
# (the deletion could cause some errors.)
4749
for i, ab_inter in enumerate(self['interactions']):
4753
# | |_> the serial number
4755
# |_> The serial number of the interaction type
4756
ab_inter['id'] = 1000*type_sn
4758
# couplings = G_______
4759
# | || |-> the serial number
4760
# | ||-> the lorentz identifier
4761
# | |-> the color identifier
4762
# |-> The serial number of the interaction type
4763
ab_inter['couplings'] = {}
4764
for i, colr in enumerate(ab_inter['color']):
4765
for j, lorentz in enumerate(ab_inter['lorentz']):
4766
ab_inter['couplings'][(i, j)] = 'G%03d%1d%1d00' \
4771
# Update the quick reference dict
4772
# and setup the interaction_coupling_dict
4773
for inter in inter_list:
4775
# Ignore interactions that cannot decay
4776
if inter['id'] not in self['interaction_type_dict'].keys():
4779
# Update the type dict
4780
old_type = self['interaction_type_dict'][inter['id']]
4781
for new_type in self['abstract_interactions_dict'].keys():
4782
if new_type[0] == old_type[0] and \
4783
(set(old_type[1]) < set(new_type[1]) or \
4784
set(old_type[2]) < set(new_type[2])):
4786
self['interaction_type_dict'][inter['id']] = new_type
4789
# Construct the coupling dict
4790
self['interaction_coupling_dict'][inter['id']] = {}
4791
inter_type = self['interaction_type_dict'][inter['id']]
4792
ab_inter = self['abstract_interactions_dict'][inter_type][0]
4793
for key, coup in inter['couplings'].items():
4794
color = inter['color'][key[0]]
4795
lorentz = inter['lorentz'][key[1]]
4797
# Get new key for the coupling
4798
ab_key[0] = ab_inter['color'].index(color)
4799
ab_key[1] = ab_inter['lorentz'].index(lorentz)
4801
self['interaction_coupling_dict'][inter['id']][tuple(ab_key)]\
4805
# Update dict for anti-iteraction
4806
for inter in inter_list:
4807
# For possible anti-interaction,
4808
# Update the interaction_type_dict, interaction_coupling_dict
4809
if inter['id'] in anti_dict.keys():
4810
anti_inter_id = anti_dict[inter['id']]
4814
# property of -(inter_id) -> property of anti_inter_id
4815
self['interaction_type_dict'][-inter['id']] = \
4816
self['interaction_type_dict'][anti_inter_id]
4817
self['interaction_coupling_dict'][-inter['id']] = \
4818
self['interaction_coupling_dict'][anti_inter_id]
4821
# Reset dictionaries in the model
4822
self.reset_dictionaries()
4823
self.get('particle_dict')
4824
self.get('interaction_dict')
4827
def get_interactionlist_type(self, interid_list, ignore_dup=False,
4829
""" Return a list of the type of the given interactions,
4830
and a dictionary records the number of each type of interaction.
4831
The abstract interaction id will start from the number given
4832
by serial_number_dict.
4833
Note: If ignore_dup is True,
4834
the same interactions will not assign the same id.
4837
pseudo_ab_interlist = []
4838
# Used the given sn_dict or an empty one
4839
serial_number_dict = copy.copy(sn_dict)
4841
for i, inter_id in enumerate(interid_list):
4843
# Append identity vertices
4845
pseudo_ab_interlist.append(0)
4848
# Use get_interaction_type function
4849
inter_type = self.get_interaction_type(inter_id)
4851
self['abstract_interactions_dict'][inter_type][0]['id']
4853
# Add the pseudo id, if it is the first one in its type
4854
# appearing in the list.
4855
# There should be a abstract interaction for this type,
4856
# if the setup_interactions has been run correctly.
4857
if not inter_type in serial_number_dict.keys():
4858
pseudo_ab_interlist.append(\
4861
serial_number_dict[inter_type] = 1
4863
# If not ignore duplicate interactions (default),
4864
# check if there are same interactions in the list.
4869
for j, previous_id in enumerate(interid_list):
4870
# find duplicate interaction,
4871
# use the abstract interaction already exists.
4872
# No need to update the serial_number_dict
4878
pseudo_ab_interlist.append(\
4879
pseudo_ab_interlist[j])
4884
# Append new abstract interaction if not appears before
4886
# If the abstract interaction of the given serial number
4887
# does not exist, add a new abstract interaction and append
4889
if serial_number_dict[inter_type] >= \
4890
len(self['abstract_interactions_dict'][inter_type]):
4891
self.add_ab_interaction(inter_id, True)
4893
# Append the pdg_code into the list,
4894
# starting from the s/n given by serial_number_dict
4895
pseudo_ab_interlist.append(\
4897
serial_number_dict[inter_type]
4900
# Update the serial_number_dict
4901
serial_number_dict[inter_type] += 1
4904
return pseudo_ab_interlist, serial_number_dict
4907
# Helper function to construct the AbstractMatrixElement
4908
def compare_diagrams(self, ab_dia, real_dia, ab2realdict=None):
4909
""" Return True if the two diagrams are in the same abstract type.
4910
The ab_dia and real_dia must have the same topology in this algorithm.
4912
a. Compare the pseudo-abstract interaction id list by ORDER
4913
b. Compare the pseudo-abstract pdg_code list by ORDER.
4916
if not isinstance(ab_dia, Channel) or not isinstance(real_dia, Channel):
4917
raise self.PhysicsObjectError,\
4918
"The first two argument are not Channel objects."
4919
if ab2realdict != None and not isinstance(ab2realdict, Ab2RealDict):
4920
raise self.PhysicsObjectError,\
4921
"The final argument should be Ab2RealDict, otherwise should be omitted."
4924
# Interaction id list
4925
ab_inter_id_list = ab_dia['abstract_type'][0]
4926
real_inter_id_list = [v.get('id') for v in real_dia['vertices']]
4928
# Quick compare from the length.
4929
if len(ab_inter_id_list) != len(real_inter_id_list):
4932
# Full comparision of interaction type.
4933
# The duplicated intereactions are taken into consideration.
4934
if self.get_interactionlist_type(real_inter_id_list)[0] != \
4938
# Intermediate Particle id list
4939
real_pdgcode_list = [v.get('legs')[-1]['id'] \
4940
for v in real_dia['vertices'][:-2]]
4943
# Full comparision of intermediate particle type.
4944
# 1. Duplicated particles will be treated as different.
4945
# 2. Do not get the std_output. We want to compare two real pids,
4947
if self.get_particlelist_type(real_pdgcode_list, std_output=False)[0]\
4948
!= ab_dia['abstract_type'][1]:
4951
# Continue to check the final Particle id list
4952
ini_type = self.get_particle_type(real_dia.get_initial_id())
4953
if self.get_particle_type(real_dia.get_initial_id(), get_code=True) \
4954
!= ab_dia.get_initial_id():
4958
real_pdgcode_list = [l['id'] for l in real_dia.get_final_legs()]
4960
# Full comparision of particle type.
4961
# Duplicated particles will be treated as different.
4963
# Try to use final_legs_dict to save time
4964
temp = [ab2realdict['final_legs_dict'][l.get('id')] for l in ab_dia.get_final_legs()]
4966
# Compare the full list, temp may contain sublist,
4967
# if one abstract pid corresponds to several real pids.
4969
if isinstance(item, list):
4970
assert_pidlist.extend(item)
4972
assert_pidlist.append(item)
4974
if assert_pidlist != real_pdgcode_list:
4978
if sorted(self.get_particlelist_type(real_pdgcode_list,
4979
sn_dict = {ini_type: 1})[0]) != sorted(ab_dia['abstract_type'][2]):
4988
def add_ab_diagram(self, ab_amp, real_dia):
4989
""" Add abstract diagram from real one into the abstract amplitude.
4990
The abstract_type of abstract diagram is constructed. """
4992
if not isinstance(ab_amp, DecayAmplitude):
4993
raise self.PhysicsObjectError,\
4994
"The first argument is not DecayAmplitude object."
4995
if not isinstance(real_dia, Channel):
4996
raise self.PhysicsObjectError,\
4997
"The second argument is not Channel object."
4999
ab_dia = Channel({'onshell': True})
5000
ab_dia['vertices'] = copy.deepcopy(real_dia['vertices'])
5002
# Setup the interaction ids
5003
real_inter_id_list = [v.get('id') for v in real_dia['vertices']]
5005
# Setup the abstract interaction id type first
5006
new_inter_ids = self.get_interactionlist_type(real_inter_id_list)[0]
5007
ab_dia['abstract_type'][0] = new_inter_ids
5009
# Setup the abstract interaction ids from inter_sn_dict in ab_amp,
5010
# then recycle the sn record
5011
new_inter_ids, ab_amp['inter_sn_dict'] = \
5012
self.get_interactionlist_type(real_inter_id_list,
5013
sn_dict = ab_amp['inter_sn_dict'])
5014
for i, ab_inter_id in enumerate(new_inter_ids):
5015
ab_dia['vertices'][i]['id'] = ab_inter_id
5018
# Setup the initial legs
5019
ini_type = self.get_particle_type(real_dia['vertices'][-1]['legs'][-1]['id'])
5020
ini_code = self.get_particle_type(real_dia['vertices'][-1]['legs'][-1]['id'], get_code=True)
5021
ab_dia['vertices'][-1]['legs'][-1]['id'] = ini_code
5022
ab_dia['vertices'][-2]['legs'][-1]['id'] = ini_code
5024
ab_dia['vertices'][-1]['legs'][0]['id'] = ini_code
5026
ab_dia['vertices'][-1]['legs'][0]['id'] = -ini_code
5029
# Setup the final abstract particle id
5030
real_pdgcode_list = [l.get('id') for l in real_dia.get_final_legs()]
5031
ab_pid_list = self.get_particlelist_type(real_pdgcode_list,
5032
sn_dict={ini_type:1})[0]
5033
# The abstract diagram is copied from real diagram,
5034
# so the ab_pid_list, given according to the final_legs in
5035
# real diagram, can be used directly to abstract diagram.
5036
for i, leg in enumerate(ab_dia.get_final_legs()):
5037
leg['id'] = ab_pid_list[i]
5039
# Update the abstract_type
5040
ab_dia['abstract_type'][2] = ab_pid_list
5044
# Setup the intermediate Particle id list
5045
real_pdgcode_list = [v.get('legs')[-1]['id'] \
5046
for v in real_dia['vertices'][:-2]]
5048
# Setup the abstract particle id type first
5049
new_part_ids = self.get_particlelist_type(real_pdgcode_list)[0]
5050
ab_dia['abstract_type'][1] = new_part_ids
5052
# Setup the abstract interaction ids from the part_sn_dict of ab_amp,
5053
# then recycle the sn_dict.
5054
new_part_ids, ab_amp['part_sn_dict'] = \
5055
self.get_particlelist_type(real_pdgcode_list,
5056
sn_dict=ab_amp['part_sn_dict'])
5057
for i, ab_pid in enumerate(new_part_ids):
5058
ab_dia['vertices'][i]['legs'][-1]['id'] = ab_pid
5060
# Find the previous leg that connect with this one
5061
for v in ab_dia['vertices'][i+1:]:
5062
# Use the number to identify the same leg
5063
for l in v.get('legs')[:-1]:
5065
ab_dia['vertices'][i]['legs'][-1]['number']:
5070
# Add this diagram into the amplitude.
5071
ab_amp.add_std_diagram(ab_dia, self)
5075
def generate_ab_amplitudes(self, amp_list):
5076
""" Generate the abstract Amplitudes from real amplitudes of the
5077
SAME initial particle,
5078
then generating the AbstractMatrixElement. """
5084
# Get the abstract initial id
5085
ini_pdg = amp_list[0]['process'].get_initial_ids()[0]
5086
ab_ini_pdg = self.get_particle_type(ini_pdg, get_code=True)
5087
ab_ini = self.get_particle(ab_ini_pdg)
5089
for amp in amp_list:
5090
# Check if this abstract amplitude exists
5091
final_ids = [l.get('id') for l in amp['process'].get_final_legs()]
5092
# PDG code start from the initial one
5093
pseudo_pids, ini_sn_dict = \
5094
self.get_particlelist_type(final_ids,\
5095
sn_dict = {self.get_particle_type(ini_pdg): 1})
5096
ab_amp = ab_ini.get_amplitude(pseudo_pids)
5098
# Construct abstract Amplitude if not exist
5100
# Construct the process
5101
new_process = base_objects.Process({'model': self})
5102
new_process['legs'] = copy.deepcopy(amp['process']['legs'])
5104
for new_leg in new_process['legs']:
5106
if new_leg.get('state'):
5107
new_leg['id'] = pseudo_pids[i]
5110
new_leg['id'] = ab_ini_pdg
5112
# Construct Amplitude
5113
ab_amp = DecayAmplitude({'part_sn_dict': ini_sn_dict})
5114
ab_amp['process'] = new_process
5116
ab_ini['decay_amplitudes'][len(final_ids)].append(ab_amp)
5118
ab_ini['decay_amplitudes'][len(final_ids)] = DecayAmplitudeList([ab_amp])
5120
# Create the Ab2RealDict for this real amplitude
5121
# Set the final_legs_dict, using the pseudo_pids.
5122
ab_amp['ab2real_dicts'].append(Ab2RealDict({'process':
5125
ab_amp['process']}))
5126
ab_amp['ab2real_dicts'][-1].set_final_legs_dict()
5128
# Scanning the diagrams in real amplitude,
5129
# Create new diagrams if necessary
5130
for i, dia in enumerate(amp['diagrams']):
5132
# Check diagrams in abstract amplitude
5133
for j, ab_dia in enumerate(ab_amp['diagrams']):
5135
if not j in ab_amp['ab2real_dicts'][-1]['dia_sn_dict'].keys() and self.compare_diagrams(ab_dia, dia, ab_amp['ab2real_dicts'][-1]):
5137
# Update the dia_sn_dict
5138
ab_amp['ab2real_dicts'][-1]['dia_sn_dict'][j] = i
5142
# Create new diagram if necessary
5144
# the new abstract diagram corresponds to this real diagram
5145
ab_amp['ab2real_dicts'][-1]['dia_sn_dict']\
5146
[len(ab_amp['diagrams'])] = i
5147
self.add_ab_diagram(ab_amp, dia)
5150
# Construct the variable dicts
5151
ab_amp.generate_variables_dicts(amp)
5154
def generate_ab_matrixelements(self, ab_amplist):
5155
""" Generate abstract matrix elements for the given
5156
abstract amplitude list. """
5158
ab_matrix_elements = AbstractHelasMatrixElementList()
5160
for ab_amp in ab_amplist:
5161
ab_matrix_elements.append(AbstractHelasMatrixElement(ab_amp))
5163
return ab_matrix_elements
5166
def generate_ab_matrixelements_all(self):
5167
""" Generate all abstract matrix elements in this model and
5168
save inside ab_matrix_elements. """
5170
ab_matrix_elements = AbstractHelasMatrixElementList()
5172
# Generate all matrix elements in this model
5173
for part in self['particles']:
5174
for clevel, amps in part['decay_amplitudes'].items():
5175
ab_matrix_elements.extend(self.generate_ab_matrixelements(amps))
5177
self['ab_matrix_elements'] = ab_matrix_elements
5179
return ab_matrix_elements
5181
#===============================================================================
5183
#===============================================================================
5184
class Ab2RealDict(base_objects.PhysicsObject):
5185
"""A Reference dict to mapping the information of an abstract amplitude
5186
to a real amplitude."""
5189
sorted_keys = ['process', 'dia_sn_dict', 'mass_dict', 'coup_dict',
5192
def default_setup(self):
5193
"""Default values for all properties."""
5195
# Dictionary of from serial numbers of diagrams in abstract amplitude
5197
self['process'] = base_objects.Process()
5198
self['ab_process'] = base_objects.Process()
5199
self['dia_sn_dict'] = {}
5200
self['mass_dict'] = {}
5201
self['coup_dict'] = {}
5202
self['final_legs_dict'] = {}
5204
def set_final_legs_dict(self, ab_object=None, real_dia=None):
5205
""" Setup the final_legs_dict."""
5209
real_pids = [l.get('id') for l in real_dia.get_final_legs()]
5210
ini_pid = real_dia.get_initial_id()
5212
real_pids = [l.get('id') for l in self['process'].get_final_legs()]
5213
ini_pid = self['process'].get_initial_ids()[0]
5217
ab_model = self['ab_process']['model']
5218
ab_pids = ab_model.get_particlelist_type(real_pids, sn_dict = {ab_model.get_particle_type(ini_pid): 1})[0]
5219
elif isinstance(ab_object, AbstractModel):
5220
ab_pids = ab_object.get_particlelist_type(real_pids, sn_dict = {ab_object.get_particle_type(ini_pid): 1})[0]
5222
raise self.PhysicsObjectError, \
5223
"The first argument is not necessary, otherwise it should be AbstractModel."
5225
# Reset final_legs_dict
5226
self['final_legs_dict'] = {}
5228
# So far, each ab_pid only has one real_pid
5229
for k, ab_pid in enumerate(ab_pids):
5230
if not ab_pid in self['final_legs_dict'].keys():
5231
self['final_legs_dict'][ab_pid] = real_pids[k]
5232
elif isinstance(self['final_legs_dict'][ab_pid], list):
5233
logger.warning('multiple real id correspondence')
5234
self['final_legs_dict'][ab_pid].append(real_pids[k])
5236
logger.warning('multiple real id correspondence')
5237
self['final_legs_dict'][ab_pid] = \
5238
[self['final_legs_dict'][ab_pid], real_pids[k]]
5240
#===============================================================================
5242
#===============================================================================
5243
class Ab2RealDictList(base_objects.PhysicsObjectList):
5244
"""A InteractionList, with additional properties that stores the
5245
abstract interaction type."""
5247
def is_valid_element(self, obj):
5248
""" Test if the object is a valid Ab2RealDictList for the list. """
5249
return isinstance(obj, Ab2RealDict)
5252
#===============================================================================
5253
# AbstractHelasMatrixElement
5254
#===============================================================================
5255
class AbstractHelasMatrixElement(helas_objects.HelasMatrixElement):
5256
"""A HelasMatrixElement that contains the Ab2RealDict."""
5259
def default_setup(self):
5260
"""Default values for all properties."""
5262
super(AbstractHelasMatrixElement, self).default_setup()
5263
self['ab2real_dicts'] = Ab2RealDictList()
5266
# Customized constructor
5267
def __init__(self, amplitude=None, optimization=1,
5268
decay_ids=[], gen_color=True):
5269
"""Constructor for the AbstractHelasMatrixElement.
5270
If DecayAmplitude is provided, copy the Ab2RealDictList.
5273
super(AbstractHelasMatrixElement, self).__init__()
5275
if isinstance(amplitude, DecayAmplitude):
5276
self['ab2real_dicts'] = amplitude['ab2real_dicts']
5279
#===============================================================================
5280
# AbstractHelasMatrixElementList
5281
#===============================================================================
5282
class AbstractHelasMatrixElementList(helas_objects.HelasMatrixElementList):
5283
"""List of AbstractHelasMatrixElement objects
5286
def is_valid_element(self, obj):
5287
"""Test if object obj is a valid HelasMatrixElement for the list."""
5289
return isinstance(obj, AbstractHelasMatrixElement)
5292
#===============================================================================
5294
#===============================================================================
5297
return [abs(item) for item in list]
5300
"""Define the leg comparison, useful when testEqual is execute"""
5301
mycmp = cmp(x['id'], y['id'])
5303
mycmp = cmp(x['state'], y['state'])
5306
def id_num_cmp(x, y):
5307
"""Define the leg (id, number) sort."""
5308
mycmp = cmp(x[0], y[0])
5310
mycmp = cmp(x[1], y[1])
5313
def channelcmp_width(x, y):
5314
""" Sort the channels by their width."""
5316
mycmp = cmp(x['apx_decaywidth'], y['apx_decaywidth'])
5318
mycmp = cmp(x['apx_decaywidth_nextlevel'], y['apx_decaywidth_nextlevel'])
5321
def channelcmp_final(x, y):
5322
""" Sort the channels by their final_mass_list.
5323
This will be similar to sort by the final state particles."""
5325
mycmp = cmp(x['final_mass_list'], y['final_mass_list'])
5329
def amplitudecmp_width(x, y):
5330
""" Sort the amplitudes by their width."""
5331
mycmp = cmp(x['apx_decaywidth'], y['apx_decaywidth'])
5335
def part_type_cmp(x, y):
5336
""" Sort the abstract particle type."""
5337
mycmp = cmp(x[0], y[0])
5340
mycmp = cmp(x[1], y[1])
5345
""" Sort the particle according to signed pdg_code."""
5347
return cmp(x.get_pdg_code(), y.get_pdg_code())