1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16 """Methods and classes to export models and matrix elements to Pythia 8
17 and C++ Standalone format."""
18
19 import fractions
20 import glob
21 import itertools
22 import logging
23 from math import fmod
24 import os
25 import re
26 import shutil
27 import subprocess
28
29 import madgraph.core.base_objects as base_objects
30 import madgraph.core.color_algebra as color
31 import madgraph.core.helas_objects as helas_objects
32 import madgraph.iolibs.drawing_eps as draw
33 import madgraph.iolibs.files as files
34 import madgraph.iolibs.helas_call_writers as helas_call_writers
35 import madgraph.iolibs.file_writers as writers
36 import madgraph.iolibs.template_files as template_files
37 import madgraph.iolibs.ufo_expression_parsers as parsers
38 from madgraph import MadGraph5Error, InvalidCmd, MG5DIR
39 from madgraph.iolibs.files import cp, ln, mv
40
41 import madgraph.various.misc as misc
42
43 import aloha.create_aloha as create_aloha
44 import aloha.aloha_writers as aloha_writers
45
46 _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/'
47 logger = logging.getLogger('madgraph.export_pythia8')
56 """Prepare export_dir as standalone_cpp directory, including:
57 src (for RAMBO, model and ALOHA files + makefile)
58 lib (with compiled libraries from src)
59 SubProcesses (with check_sa.cpp + makefile and Pxxxxx directories)
60 """
61
62 cwd = os.getcwd()
63
64 try:
65 os.mkdir(dirpath)
66 except os.error as error:
67 logger.warning(error.strerror + " " + dirpath)
68
69 try:
70 os.chdir(dirpath)
71 except os.error:
72 logger.error('Could not cd to directory %s' % dirpath)
73 return 0
74
75 logger.info('Creating subdirectories in directory %s' % dirpath)
76
77 try:
78 os.mkdir('src')
79 except os.error as error:
80 logger.warning(error.strerror + " " + dirpath)
81
82 try:
83 os.mkdir('lib')
84 except os.error as error:
85 logger.warning(error.strerror + " " + dirpath)
86
87 try:
88 os.mkdir('Cards')
89 except os.error as error:
90 logger.warning(error.strerror + " " + dirpath)
91
92 try:
93 os.mkdir('SubProcesses')
94 except os.error as error:
95 logger.warning(error.strerror + " " + dirpath)
96
97
98 open(os.path.join("Cards","param_card.dat"), 'w').write(\
99 model.write_param_card())
100
101 src_files = ['rambo.h', 'rambo.cc', 'read_slha.h', 'read_slha.cc']
102
103
104 for f in src_files:
105 cp(_file_path + 'iolibs/template_files/' + f, 'src')
106
107
108 makefile = read_template_file('Makefile_sa_cpp_src') % \
109 {'model': ProcessExporterCPP.get_model_name(model.get('name'))}
110 open(os.path.join('src', 'Makefile'), 'w').write(makefile)
111
112
113 cp(_file_path + 'iolibs/template_files/check_sa.cpp', 'SubProcesses')
114
115
116 makefile = read_template_file('Makefile_sa_cpp_sp') % \
117 {'model': ProcessExporterCPP.get_model_name(model.get('name'))}
118 open(os.path.join('SubProcesses', 'Makefile'), 'w').write(makefile)
119
120
121 os.chdir(cwd)
122
129
130 """Generate the Pxxxxx directory for a subprocess in C++ standalone,
131 including the necessary .h and .cc files"""
132
133 cwd = os.getcwd()
134
135
136 process_exporter_cpp = ProcessExporterCPP(matrix_element,
137 cpp_helas_call_writer)
138
139
140 dirpath = os.path.join(path, \
141 "P%d_%s" % (process_exporter_cpp.process_number,
142 process_exporter_cpp.process_name))
143 try:
144 os.mkdir(dirpath)
145 except os.error as error:
146 logger.warning(error.strerror + " " + dirpath)
147
148 try:
149 os.chdir(dirpath)
150 except os.error:
151 logger.error('Could not cd to directory %s' % dirpath)
152 return 0
153
154 logger.info('Creating files in directory %s' % dirpath)
155
156 process_exporter_cpp.path = dirpath
157
158 process_exporter_cpp.generate_process_files()
159
160 linkfiles = ['check_sa.cpp', 'Makefile']
161
162
163 for file in linkfiles:
164 ln('../%s' % file)
165
166
167 os.chdir(cwd)
168
169 return
170
172 """Make the model library in a C++ standalone directory"""
173
174 source_dir = os.path.join(dir_path, "src")
175
176 logger.info("Running make for src")
177 misc.compile(cwd=source_dir)
178
183 """Class to take care of exporting a set of matrix elements to
184 C++ format."""
185
186
187 process_dir = '.'
188 include_dir = '.'
189 process_template_h = 'cpp_process_h.inc'
190 process_template_cc = 'cpp_process_cc.inc'
191 process_class_template = 'cpp_process_class.inc'
192 process_definition_template = 'cpp_process_function_definitions.inc'
193 process_wavefunction_template = 'cpp_process_wavefunctions.inc'
194 process_sigmaKin_function_template = 'cpp_process_sigmaKin_function.inc'
195
198
199 - def __init__(self, matrix_elements, cpp_helas_call_writer, process_string = "",
200 process_number = 0, path = os.getcwd()):
201 """Initiate with matrix elements, helas call writer, process
202 string, path. Generate the process .h and .cc files."""
203
204 if isinstance(matrix_elements, helas_objects.HelasMultiProcess):
205 self.matrix_elements = matrix_elements.get('matrix_elements')
206 elif isinstance(matrix_elements, helas_objects.HelasMatrixElement):
207 self.matrix_elements = \
208 helas_objects.HelasMatrixElementList([matrix_elements])
209 elif isinstance(matrix_elements, helas_objects.HelasMatrixElementList):
210 self.matrix_elements = matrix_elements
211 else:
212 raise base_objects.PhysicsObject.PhysicsObjectError,\
213 "Wrong object type for matrix_elements"
214
215 if not self.matrix_elements:
216 raise MadGraph5Error("No matrix elements to export")
217
218 self.model = self.matrix_elements[0].get('processes')[0].get('model')
219 self.model_name = ProcessExporterCPP.get_model_name(self.model.get('name'))
220
221 self.processes = sum([me.get('processes') for \
222 me in self.matrix_elements], [])
223 self.processes.extend(sum([me.get_mirror_processes() for \
224 me in self.matrix_elements], []))
225
226 self.nprocesses = len(self.matrix_elements)
227 if any([m.get('has_mirror_process') for m in self.matrix_elements]):
228 self.nprocesses = 2*len(self.matrix_elements)
229
230 if process_string:
231 self.process_string = process_string
232 else:
233 self.process_string = self.processes[0].base_string()
234
235 if process_number:
236 self.process_number = process_number
237 else:
238 self.process_number = self.processes[0].get('id')
239
240 self.process_name = self.get_process_name()
241 self.process_class = "CPPProcess"
242
243 self.path = path
244 self.helas_call_writer = cpp_helas_call_writer
245
246 if not isinstance(self.helas_call_writer, helas_call_writers.CPPUFOHelasCallWriter):
247 raise self.ProcessExporterCPPError, \
248 "helas_call_writer not CPPUFOHelasCallWriter"
249
250 self.nexternal, self.ninitial = \
251 self.matrix_elements[0].get_nexternal_ninitial()
252 self.nfinal = self.nexternal - self.ninitial
253
254
255
256
257 self.single_helicities = True
258
259 hel_matrix = self.get_helicity_matrix(self.matrix_elements[0])
260
261 for me in self.matrix_elements[1:]:
262 if self.get_helicity_matrix(me) != hel_matrix:
263 self.single_helicities = False
264
265 if self.single_helicities:
266
267
268
269
270 self.wavefunctions = []
271 wf_number = 0
272
273 for me in self.matrix_elements:
274 for iwf, wf in enumerate(me.get_all_wavefunctions()):
275 try:
276 old_wf = \
277 self.wavefunctions[self.wavefunctions.index(wf)]
278 wf.set('number', old_wf.get('number'))
279 except ValueError:
280 wf_number += 1
281 wf.set('number', wf_number)
282 self.wavefunctions.append(wf)
283
284
285 self.amplitudes = helas_objects.HelasAmplitudeList()
286 amp_number = 0
287 for me in self.matrix_elements:
288 for iamp, amp in enumerate(me.get_all_amplitudes()):
289 try:
290 old_amp = \
291 self.amplitudes[self.amplitudes.index(amp)]
292 amp.set('number', old_amp.get('number'))
293 except ValueError:
294 amp_number += 1
295 amp.set('number', amp_number)
296 self.amplitudes.append(amp)
297 diagram = helas_objects.HelasDiagram({'amplitudes': self.amplitudes})
298 self.amplitudes = helas_objects.HelasMatrixElement({\
299 'diagrams': helas_objects.HelasDiagramList([diagram])})
300
301
302
304 """Generate the .h and .cc files needed for C++, for the
305 processes described by multi_matrix_element"""
306
307
308 if not os.path.isdir(os.path.join(self.path, self.include_dir)):
309 os.makedirs(os.path.join(self.path, self.include_dir))
310 filename = os.path.join(self.path, self.include_dir,
311 '%s.h' % self.process_class)
312 self.write_process_h_file(writers.CPPWriter(filename))
313
314 if not os.path.isdir(os.path.join(self.path, self.process_dir)):
315 os.makedirs(os.path.join(self.path, self.process_dir))
316 filename = os.path.join(self.path, self.process_dir,
317 '%s.cc' % self.process_class)
318 self.write_process_cc_file(writers.CPPWriter(filename))
319
320 logger.info('Created files %(process)s.h and %(process)s.cc in' % \
321 {'process': self.process_class} + \
322 ' directory %(dir)s' % {'dir': os.path.split(filename)[0]})
323
324
325
326
327
328
330 """Write the class definition (.h) file for the process"""
331
332 if not isinstance(writer, writers.CPPWriter):
333 raise writers.CPPWriter.CPPWriterError(\
334 "writer not CPPWriter")
335
336 replace_dict = {}
337
338
339 info_lines = get_mg5_info_lines()
340 replace_dict['info_lines'] = info_lines
341
342
343 replace_dict['model_name'] = \
344 self.model_name
345
346
347 replace_dict['process_file_name'] = self.process_name
348
349
350 process_class_definitions = self.get_process_class_definitions()
351 replace_dict['process_class_definitions'] = process_class_definitions
352
353 file = read_template_file(self.process_template_h) % replace_dict
354
355
356 writer.writelines(file)
357
358
359
360
362 """Write the class member definition (.cc) file for the process
363 described by matrix_element"""
364
365 if not isinstance(writer, writers.CPPWriter):
366 raise writers.CPPWriter.CPPWriterError(\
367 "writer not CPPWriter")
368
369 replace_dict = {}
370
371
372 info_lines = get_mg5_info_lines()
373 replace_dict['info_lines'] = info_lines
374
375
376 replace_dict['process_file_name'] = self.process_name
377
378
379 replace_dict['model_name'] = self.model_name
380
381
382
383 process_function_definitions = \
384 self.get_process_function_definitions()
385 replace_dict['process_function_definitions'] = \
386 process_function_definitions
387
388 file = read_template_file(self.process_template_cc) % replace_dict
389
390
391 writer.writelines(file)
392
393
394
395
397 """The complete class definition for the process"""
398
399 replace_dict = {}
400
401
402 replace_dict['model_name'] = self.model_name
403
404
405 process_lines = "\n".join([self.get_process_info_lines(me) for me in \
406 self.matrix_elements])
407
408 replace_dict['process_lines'] = process_lines
409
410
411 replace_dict['nfinal'] = self.nfinal
412
413
414 replace_dict['ninitial'] = self.ninitial
415
416
417 replace_dict['process_class_name'] = self.process_name
418
419
420 process_definition = "%s (%s)" % (self.process_string,
421 self.model_name)
422 replace_dict['process_definition'] = process_definition
423
424 process = self.processes[0]
425
426 replace_dict['process_code'] = self.process_number
427 replace_dict['nexternal'] = self.nexternal
428 replace_dict['nprocesses'] = self.nprocesses
429
430 if self.single_helicities:
431 replace_dict['all_sigma_kin_definitions'] = \
432 """// Calculate wavefunctions
433 void calculate_wavefunctions(const int perm[], const int hel[]);
434 static const int nwavefuncs = %d;
435 std::complex<double> w[nwavefuncs][18];
436 static const int namplitudes = %d;
437 std::complex<double> amp[namplitudes];""" % \
438 (len(self.wavefunctions),
439 len(self.amplitudes.get_all_amplitudes()))
440 replace_dict['all_matrix_definitions'] = \
441 "\n".join(["double matrix_%s();" % \
442 me.get('processes')[0].shell_string().\
443 replace("0_", "") \
444 for me in self.matrix_elements])
445
446 else:
447 replace_dict['all_sigma_kin_definitions'] = \
448 "\n".join(["void sigmaKin_%s();" % \
449 me.get('processes')[0].shell_string().\
450 replace("0_", "") \
451 for me in self.matrix_elements])
452 replace_dict['all_matrix_definitions'] = \
453 "\n".join(["double matrix_%s(const int hel[]);" % \
454 me.get('processes')[0].shell_string().\
455 replace("0_", "") \
456 for me in self.matrix_elements])
457
458
459 file = read_template_file(self.process_class_template) % replace_dict
460
461 return file
462
464 """The complete Pythia 8 class definition for the process"""
465
466 replace_dict = {}
467
468
469 replace_dict['model_name'] = self.model_name
470
471
472 replace_dict['process_lines'] = \
473 "\n".join([self.get_process_info_lines(me) for \
474 me in self.matrix_elements])
475
476
477 replace_dict['process_class_name'] = self.process_name
478
479 color_amplitudes = [me.get_color_amplitudes() for me in \
480 self.matrix_elements]
481
482 replace_dict['initProc_lines'] = \
483 self.get_initProc_lines(self.matrix_elements[0],
484 color_amplitudes)
485 replace_dict['reset_jamp_lines'] = \
486 self.get_reset_jamp_lines(color_amplitudes)
487 replace_dict['sigmaKin_lines'] = \
488 self.get_sigmaKin_lines(color_amplitudes)
489 replace_dict['sigmaHat_lines'] = \
490 self.get_sigmaHat_lines()
491
492 replace_dict['all_sigmaKin'] = \
493 self.get_all_sigmaKin_lines(color_amplitudes,
494 'CPPProcess')
495
496 file = read_template_file(self.process_definition_template) %\
497 replace_dict
498
499 return file
500
502 """Return process file name for the process in matrix_element"""
503
504 process_string = self.process_string
505
506
507 proc_number_pattern = re.compile("^(.+)@\s*(\d+)\s*(.*)$")
508 proc_number_re = proc_number_pattern.match(process_string)
509 proc_number = 0
510 if proc_number_re:
511 proc_number = int(proc_number_re.group(2))
512 process_string = proc_number_re.group(1) + \
513 proc_number_re.group(3)
514
515
516 order_pattern = re.compile("^(.+)\s+(\w+)\s*=\s*(\d+)\s*$")
517 order_re = order_pattern.match(process_string)
518 while order_re:
519 process_string = order_re.group(1)
520 order_re = order_pattern.match(process_string)
521
522 process_string = process_string.replace(' ', '')
523 process_string = process_string.replace('>', '_')
524 process_string = process_string.replace('+', 'p')
525 process_string = process_string.replace('-', 'm')
526 process_string = process_string.replace('~', 'x')
527 process_string = process_string.replace('/', '_no_')
528 process_string = process_string.replace('$', '_nos_')
529 process_string = process_string.replace('|', '_or_')
530 if proc_number != 0:
531 process_string = "%d_%s" % (proc_number, process_string)
532
533 process_string = "Sigma_%s_%s" % (self.model_name,
534 process_string)
535 return process_string
536
538 """Return info lines describing the processes for this matrix element"""
539
540 return"\n".join([ "# " + process.nice_string().replace('\n', '\n# * ') \
541 for process in matrix_element.get('processes')])
542
543
545 """Get initProc_lines for function definition for Pythia 8 .cc file"""
546
547 initProc_lines = []
548
549 initProc_lines.append("// Set external particle masses for this matrix element")
550
551 for part in matrix_element.get_external_wavefunctions():
552 initProc_lines.append("mME.push_back(pars->%s);" % part.get('mass'))
553 for i, colamp in enumerate(color_amplitudes):
554 initProc_lines.append("jamp2[%d] = new double[%d];" % \
555 (i, len(colamp)))
556
557 return "\n".join(initProc_lines)
558
560 """Get lines to reset jamps"""
561
562 ret_lines = ""
563 for icol, col_amp in enumerate(color_amplitudes):
564 ret_lines+= """for(int i=0;i < %(ncolor)d; i++)
565 jamp2[%(proc_number)d][i]=0.;\n""" % \
566 {"ncolor": len(col_amp), "proc_number": icol}
567 return ret_lines
568
569
589
590
592 """Get sigmaKin_lines for function definition for Pythia 8 .cc file"""
593
594
595 if self.single_helicities:
596 replace_dict = {}
597
598
599 replace_dict['ncomb'] = \
600 self.matrix_elements[0].get_helicity_combinations()
601
602
603 replace_dict['process_class_name'] = self.process_name
604
605
606 replace_dict['id1'] = self.processes[0].get('legs')[0].get('id')
607 replace_dict['id2'] = self.processes[0].get('legs')[1].get('id')
608
609
610 replace_dict['helicity_matrix'] = \
611 self.get_helicity_matrix(self.matrix_elements[0])
612
613
614 den_factors = [str(me.get_denominator_factor()) for me in \
615 self.matrix_elements]
616 if self.nprocesses != len(self.matrix_elements):
617 den_factors.extend(den_factors)
618 replace_dict['den_factors'] = ",".join(den_factors)
619 replace_dict['get_matrix_t_lines'] = "\n".join(
620 ["t[%(iproc)d]=matrix_%(proc_name)s();" % \
621 {"iproc": i, "proc_name": \
622 me.get('processes')[0].shell_string().replace("0_", "")} \
623 for i, me in enumerate(self.matrix_elements)])
624
625
626 mirror_matrix_lines = ""
627
628 if any([m.get('has_mirror_process') for m in self.matrix_elements]):
629 mirror_matrix_lines += \
630 """ // Mirror initial state momenta for mirror process
631 perm[0]=1;
632 perm[1]=0;
633 // Calculate wavefunctions
634 calculate_wavefunctions(perm, helicities[ihel]);
635 // Mirror back
636 perm[0]=0;
637 perm[1]=1;
638 // Calculate matrix elements
639 """
640
641 mirror_matrix_lines += "\n".join(
642 ["t[%(iproc)d]=matrix_%(proc_name)s();" % \
643 {"iproc": i + len(self.matrix_elements), "proc_name": \
644 me.get('processes')[0].shell_string().replace("0_", "")} \
645 for i, me in enumerate(self.matrix_elements) if me.get('has_mirror_process')])
646
647 replace_dict['get_mirror_matrix_lines'] = mirror_matrix_lines
648
649
650 file = \
651 read_template_file(\
652 self.process_sigmaKin_function_template) %\
653 replace_dict
654
655 return file
656
657 else:
658 ret_lines = "// Call the individual sigmaKin for each process\n"
659 return ret_lines + \
660 "\n".join(["sigmaKin_%s();" % \
661 me.get('processes')[0].shell_string().\
662 replace("0_", "") for \
663 me in self.matrix_elements])
664
666 """Get sigmaKin_process for all subprocesses for Pythia 8 .cc file"""
667
668 ret_lines = []
669 if self.single_helicities:
670 ret_lines.append(\
671 "void %s::calculate_wavefunctions(const int perm[], const int hel[]){" % \
672 class_name)
673 ret_lines.append("// Calculate wavefunctions for all processes")
674 ret_lines.append(self.get_calculate_wavefunctions(\
675 self.wavefunctions, self.amplitudes))
676 ret_lines.append("}")
677 else:
678 ret_lines.extend([self.get_sigmaKin_single_process(i, me) \
679 for i, me in enumerate(self.matrix_elements)])
680 ret_lines.extend([self.get_matrix_single_process(i, me,
681 color_amplitudes[i],
682 class_name) \
683 for i, me in enumerate(self.matrix_elements)])
684 return "\n".join(ret_lines)
685
686
688 """Write sigmaKin for each process"""
689
690
691
692 replace_dict = {}
693
694
695 replace_dict['proc_name'] = \
696 matrix_element.get('processes')[0].shell_string().replace("0_", "")
697
698
699 replace_dict['process_class_name'] = self.process_name
700
701
702 replace_dict['proc_number'] = i
703
704
705 replace_dict['ncomb'] = matrix_element.get_helicity_combinations()
706
707
708 replace_dict['helicity_matrix'] = \
709 self.get_helicity_matrix(matrix_element)
710
711 replace_dict['den_factor'] = matrix_element.get_denominator_factor()
712
713 file = \
714 read_template_file('cpp_process_sigmaKin_subproc_function.inc') %\
715 replace_dict
716
717 return file
718
721 """Write matrix() for each process"""
722
723
724
725 replace_dict = {}
726
727
728 replace_dict['proc_name'] = \
729 matrix_element.get('processes')[0].shell_string().replace("0_", "")
730
731
732
733 if self.single_helicities:
734 replace_dict['matrix_args'] = ""
735 replace_dict['all_wavefunction_calls'] = "int i, j;"
736 else:
737 replace_dict['matrix_args'] = "const int hel[]"
738 wavefunctions = matrix_element.get_all_wavefunctions()
739 replace_dict['all_wavefunction_calls'] = \
740 """const int nwavefuncs = %d;
741 std::complex<double> w[nwavefuncs][18];
742 """ % len(wavefunctions)+ \
743 self.get_calculate_wavefunctions(wavefunctions, [])
744
745
746 replace_dict['process_class_name'] = class_name
747
748
749 replace_dict['proc_number'] = i
750
751
752 replace_dict['ncolor'] = len(color_amplitudes)
753
754 replace_dict['ngraphs'] = matrix_element.get_number_of_amplitudes()
755
756
757 replace_dict['color_matrix_lines'] = \
758 self.get_color_matrix_lines(matrix_element)
759
760 replace_dict['jamp_lines'] = self.get_jamp_lines(color_amplitudes)
761
762 file = read_template_file('cpp_process_matrix.inc') % \
763 replace_dict
764
765 return file
766
767
769 """Get sigmaHat_lines for function definition for Pythia 8 .cc file"""
770
771
772 beams = set([(process.get('legs')[0].get('id'),
773 process.get('legs')[1].get('id')) \
774 for process in self.processes])
775
776 res_lines = []
777
778
779
780 res_lines.append("// Select between the different processes")
781 for ibeam, beam_parts in enumerate(beams):
782
783 if ibeam == 0:
784 res_lines.append("if(id1 == %d && id2 == %d){" % beam_parts)
785 else:
786 res_lines.append("else if(id1 == %d && id2 == %d){" % beam_parts)
787
788
789 beam_processes = [(i, me) for (i, me) in \
790 enumerate(self.matrix_elements) if beam_parts in \
791 [(process.get('legs')[0].get('id'),
792 process.get('legs')[1].get('id')) \
793 for process in me.get('processes')]]
794
795
796 beam_processes.extend([(len(self.matrix_elements) + i, me) for (i, me) in \
797 enumerate(self.matrix_elements) if beam_parts in \
798 [(process.get('legs')[0].get('id'),
799 process.get('legs')[1].get('id')) \
800 for process in me.get_mirror_processes()]])
801
802
803 res_lines.append("// Add matrix elements for processes with beams %s" % \
804 repr(beam_parts))
805 res_lines.append("return %s;" % \
806 ("+".join(["matrix_element[%i]*%i" % \
807 (i, len([proc for proc in \
808 me.get('processes') if beam_parts == \
809 (proc.get('legs')[0].get('id'),
810 proc.get('legs')[1].get('id')) or \
811 me.get('has_mirror_process') and \
812 beam_parts == \
813 (proc.get('legs')[1].get('id'),
814 proc.get('legs')[0].get('id'))])) \
815 for (i, me) in beam_processes]).\
816 replace('*1', '')))
817 res_lines.append("}")
818
819
820 res_lines.append("else {")
821 res_lines.append("// Return 0 if not correct initial state assignment")
822 res_lines.append(" return 0.;}")
823
824 return "\n".join(res_lines)
825
826
828 """Return the Helicity matrix definition lines for this matrix element"""
829
830 helicity_line = "static const int helicities[ncomb][nexternal] = {";
831 helicity_line_list = []
832
833 for helicities in matrix_element.get_helicity_matrix():
834 helicity_line_list.append("{"+",".join(['%d'] * len(helicities)) % \
835 tuple(helicities) + "}")
836
837 return helicity_line + ",".join(helicity_line_list) + "};"
838
840 """Return the denominator factor line for this matrix element"""
841
842 return "const int denominator = %d;" % \
843 matrix_element.get_denominator_factor()
844
846 """Return the color matrix definition lines for this matrix element. Split
847 rows in chunks of size n."""
848
849 if not matrix_element.get('color_matrix'):
850 return "\n".join(["static const double denom[1] = {1.};",
851 "static const double cf[1][1] = {1.};"])
852 else:
853 color_denominators = matrix_element.get('color_matrix').\
854 get_line_denominators()
855 denom_string = "static const double denom[ncolor] = {%s};" % \
856 ",".join(["%i" % denom for denom in color_denominators])
857
858 matrix_strings = []
859 my_cs = color.ColorString()
860 for index, denominator in enumerate(color_denominators):
861
862 num_list = matrix_element.get('color_matrix').\
863 get_line_numerators(index, denominator)
864
865 matrix_strings.append("{%s}" % \
866 ",".join(["%d" % i for i in num_list]))
867 matrix_string = "static const double cf[ncolor][ncolor] = {" + \
868 ",".join(matrix_strings) + "};"
869 return "\n".join([denom_string, matrix_string])
870
872 """Return the jamp = sum(fermionfactor * amp[i]) lines"""
873
874 res_list = []
875
876 for i, coeff_list in enumerate(color_amplitudes):
877
878 res = "jamp[%i]=" % i
879
880
881
882 list_fracs = [abs(coefficient[0][1]) for coefficient in coeff_list]
883 common_factor = False
884 diff_fracs = list(set(list_fracs))
885 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1:
886 common_factor = True
887 global_factor = diff_fracs[0]
888 res = res + '%s(' % coeff(1, global_factor, False, 0)
889
890 for (coefficient, amp_number) in coeff_list:
891 if common_factor:
892 res = res + "%samp[%d]" % (coeff(coefficient[0],
893 coefficient[1] / abs(coefficient[1]),
894 coefficient[2],
895 coefficient[3]),
896 amp_number - 1)
897 else:
898 res = res + "%samp[%d]" % (coeff(coefficient[0],
899 coefficient[1],
900 coefficient[2],
901 coefficient[3]),
902 amp_number - 1)
903
904 if common_factor:
905 res = res + ')'
906
907 res += ';'
908
909 res_list.append(res)
910
911 return "\n".join(res_list)
912
913 @staticmethod
915 """Replace - with _, + with _plus_ in a model name."""
916
917 name = name.replace('-', '_')
918 name = name.replace('+', '_plus_')
919 return name
920
921
922
923
924 -def generate_process_files_pythia8(multi_matrix_element, cpp_helas_call_writer,
925 process_string = "",
926 process_number = 0, path = os.getcwd()):
927
928 """Generate the .h and .cc files needed for Pythia 8, for the
929 processes described by multi_matrix_element"""
930
931 process_exporter_pythia8 = ProcessExporterPythia8(multi_matrix_element,
932 cpp_helas_call_writer,
933 process_string,
934 process_number,
935 path)
936
937
938 model = process_exporter_pythia8.model
939 model_name = process_exporter_pythia8.model_name
940 process_exporter_pythia8.process_dir = \
941 'Processes_%(model)s' % {'model': \
942 model_name}
943 process_exporter_pythia8.include_dir = process_exporter_pythia8.process_dir
944 process_exporter_pythia8.generate_process_files()
945 return process_exporter_pythia8
946
951 """Class to take care of exporting a set of matrix elements to
952 Pythia 8 format."""
953
954
955 process_template_h = 'pythia8_process_h.inc'
956 process_template_cc = 'pythia8_process_cc.inc'
957 process_class_template = 'pythia8_process_class.inc'
958 process_definition_template = 'pythia8_process_function_definitions.inc'
959 process_wavefunction_template = 'pythia8_process_wavefunctions.inc'
960 process_sigmaKin_function_template = 'pythia8_process_sigmaKin_function.inc'
961
963 """Set process class name"""
964
965 super(ProcessExporterPythia8, self).__init__(*args, **opts)
966
967
968 for me in self.matrix_elements:
969 if me.get_nexternal_ninitial() not in [(3,2),(4,2),(5,2)]:
970 nex,nin = me.get_nexternal_ninitial()
971 raise InvalidCmd,\
972 "Pythia 8 can only handle 2->1,2,3 processes, not %d->%d" % \
973 (nin,nex-nin)
974
975 self.process_class = self.process_name
976
977
978
979
980
981
983 """The complete Pythia 8 class definition for the process"""
984
985 replace_dict = {}
986
987
988 replace_dict['model_name'] = self.model_name
989
990
991 process_lines = "\n".join([self.get_process_info_lines(me) for me in \
992 self.matrix_elements])
993
994 replace_dict['process_lines'] = process_lines
995
996
997 replace_dict['nfinal'] = self.nfinal
998
999
1000 replace_dict['process_class_name'] = self.process_name
1001
1002
1003 process_definition = "%s (%s)" % (self.process_string,
1004 self.model_name)
1005 replace_dict['process_definition'] = process_definition
1006
1007 process = self.processes[0]
1008 replace_dict['process_code'] = 10000 + \
1009 100*process.get('id') + \
1010 self.process_number
1011
1012 replace_dict['inFlux'] = self.get_process_influx()
1013
1014 replace_dict['id_masses'] = self.get_id_masses(process)
1015 replace_dict['resonances'] = self.get_resonance_lines()
1016
1017 replace_dict['nexternal'] = self.nexternal
1018 replace_dict['nprocesses'] = self.nprocesses
1019
1020 if self.single_helicities:
1021 replace_dict['all_sigma_kin_definitions'] = \
1022 """// Calculate wavefunctions
1023 void calculate_wavefunctions(const int perm[], const int hel[]);
1024 static const int nwavefuncs = %d;
1025 std::complex<double> w[nwavefuncs][18];
1026 static const int namplitudes = %d;
1027 std::complex<double> amp[namplitudes];""" % \
1028 (len(self.wavefunctions),
1029 len(self.amplitudes.get_all_amplitudes()))
1030 replace_dict['all_matrix_definitions'] = \
1031 "\n".join(["double matrix_%s();" % \
1032 me.get('processes')[0].shell_string().\
1033 replace("0_", "") \
1034 for me in self.matrix_elements])
1035
1036 else:
1037 replace_dict['all_sigma_kin_definitions'] = \
1038 "\n".join(["void sigmaKin_%s();" % \
1039 me.get('processes')[0].shell_string().\
1040 replace("0_", "") \
1041 for me in self.matrix_elements])
1042 replace_dict['all_matrix_definitions'] = \
1043 "\n".join(["double matrix_%s(const int hel[]);" % \
1044 me.get('processes')[0].shell_string().\
1045 replace("0_", "") \
1046 for me in self.matrix_elements])
1047
1048
1049 file = read_template_file('pythia8_process_class.inc') % replace_dict
1050
1051 return file
1052
1054 """The complete Pythia 8 class definition for the process"""
1055
1056 replace_dict = {}
1057
1058
1059 replace_dict['model_name'] = self.model_name
1060
1061
1062 replace_dict['process_lines'] = \
1063 "\n".join([self.get_process_info_lines(me) for \
1064 me in self.matrix_elements])
1065
1066
1067 replace_dict['process_class_name'] = self.process_name
1068
1069 color_amplitudes = [me.get_color_amplitudes() for me in \
1070 self.matrix_elements]
1071
1072 replace_dict['initProc_lines'] = \
1073 self.get_initProc_lines(color_amplitudes)
1074 replace_dict['reset_jamp_lines'] = \
1075 self.get_reset_jamp_lines(color_amplitudes)
1076 replace_dict['sigmaKin_lines'] = \
1077 self.get_sigmaKin_lines(color_amplitudes)
1078 replace_dict['sigmaHat_lines'] = \
1079 self.get_sigmaHat_lines()
1080
1081 replace_dict['setIdColAcol_lines'] = \
1082 self.get_setIdColAcol_lines(color_amplitudes)
1083
1084 replace_dict['weightDecay_lines'] = \
1085 self.get_weightDecay_lines()
1086
1087 replace_dict['all_sigmaKin'] = \
1088 self.get_all_sigmaKin_lines(color_amplitudes,
1089 self.process_name)
1090
1091 file = read_template_file('pythia8_process_function_definitions.inc') %\
1092 replace_dict
1093
1094 return file
1095
1097 """Return process file name for the process in matrix_element"""
1098
1099
1100
1101 beams = set([tuple(sorted([process.get('legs')[0].get('id'),
1102 process.get('legs')[1].get('id')])) \
1103 for process in self.processes])
1104
1105
1106 antiquarks = range(-1, -6, -1)
1107 quarks = range(1,6)
1108 antileptons = range(-11, -17, -1)
1109 leptons = range(11, 17, 1)
1110 allquarks = antiquarks + quarks
1111 antifermions = antiquarks + antileptons
1112 fermions = quarks + leptons
1113 allfermions = allquarks + antileptons + leptons
1114 downfermions = range(-2, -5, -2) + range(-1, -5, -2) + \
1115 range(-12, -17, -2) + range(-11, -17, -2)
1116 upfermions = range(1, 5, 2) + range(2, 5, 2) + \
1117 range(11, 17, 2) + range(12, 17, 2)
1118
1119
1120
1121 set_tuples = [(set([(21, 21)]), "gg"),
1122 (set(list(itertools.product(allquarks, [21]))), "qg"),
1123 (set(zip(antiquarks, quarks)), "qqbarSame"),
1124 (set(list(itertools.product(allquarks,
1125 allquarks))), "qq"),
1126 (set(zip(antifermions, fermions)),"ffbarSame"),
1127 (set(zip(downfermions, upfermions)),"ffbarChg"),
1128 (set(list(itertools.product(allfermions,
1129 allfermions))), "ff"),
1130 (set(list(itertools.product(allfermions, [22]))), "fgm"),
1131 (set([(21, 22)]), "ggm"),
1132 (set([(22, 22)]), "gmgm")]
1133
1134 for set_tuple in set_tuples:
1135 if beams.issubset(set_tuple[0]):
1136 return set_tuple[1]
1137
1138 raise InvalidCmd('Pythia 8 cannot handle incoming flavors %s' %\
1139 repr(beams))
1140
1141 return
1142
1144 """Return the lines which define the ids for the final state particles,
1145 for the Pythia phase space"""
1146
1147 if self.nfinal == 1:
1148 return ""
1149
1150 mass_strings = []
1151 for i in range(2, len(process.get('legs'))):
1152 if self.model.get_particle(process.get('legs')[i].get('id')).\
1153 get('mass') not in ['zero', 'ZERO']:
1154 mass_strings.append("int id%dMass() const {return %d;}" % \
1155 (i + 1, abs(process.get('legs')[i].get('id'))))
1156
1157 return "\n".join(mass_strings)
1158
1160 """Return the lines which define the ids for the final state particles,
1161 for the Pythia phase space"""
1162
1163 if self.nfinal == 1:
1164 return "virtual int resonanceA() const {return %d;}" % \
1165 abs(self.processes[0].get('legs')[2].get('id'))
1166
1167 res_strings = []
1168 res_letters = ['A', 'B']
1169
1170 sids, singleres, schannel = self.get_resonances()
1171
1172 for i, sid in enumerate(sids[:2]):
1173 res_strings.append("virtual int resonance%s() const {return %d;}"\
1174 % (res_letters[i], sid))
1175
1176 if singleres != 0:
1177 res_strings.append("virtual int idSChannel() const {return %d;}" \
1178 % singleres)
1179 if schannel:
1180 res_strings.append("virtual bool isSChannel() const {return true;}")
1181
1182 return "\n".join(res_strings)
1183
1185 """Return the PIDs for any resonances in 2->2 and 2->3 processes."""
1186
1187 resonances = []
1188 model = self.matrix_elements[0].get('processes')[0].get('model')
1189 new_pdg = model.get_first_non_pdg()
1190
1191 diagrams = sum([me.get('diagrams') for me in self.matrix_elements], [])
1192 for diagram in diagrams:
1193 schannels, tchannels = diagram.get('amplitudes')[0].\
1194 get_s_and_t_channels(self.ninitial, new_pdg)
1195
1196 for schannel in schannels:
1197 sid = schannel.get('legs')[-1].get('id')
1198 part = self.model.get_particle(sid)
1199 if part:
1200 width = self.model.get_particle(sid).get('width')
1201 if width.lower() != 'zero':
1202 resonances.append(sid)
1203 resonance_set = set(resonances)
1204
1205 singleres = 0
1206
1207 if len(resonances) == len(diagrams) and len(resonance_set) == 1:
1208 singleres = resonances[0]
1209
1210
1211 resonance_set = list(set([abs(pid) for pid in resonance_set]))
1212
1213
1214
1215 schannel = not any([\
1216 len(d.get('amplitudes')[0].get_s_and_t_channels(self.ninitial, new_pdg)[0])\
1217 == 0 for d in diagrams]) and \
1218 not any(['QCD' in d.calculate_orders() for d in diagrams])
1219
1220 return resonance_set, singleres, schannel
1221
1223 """Get initProc_lines for function definition for Pythia 8 .cc file"""
1224
1225 initProc_lines = []
1226
1227 initProc_lines.append("// Set massive/massless matrix elements for c/b/mu/tau")
1228
1229 if not self.model.get_particle(4) or \
1230 self.model.get_particle(4).get('mass').lower() == 'zero':
1231 cMassiveME = "0."
1232 else:
1233 cMassiveME = "particleDataPtr->m0(4)"
1234 initProc_lines.append("mcME = %s;" % cMassiveME)
1235 if not self.model.get_particle(5) or \
1236 self.model.get_particle(5).get('mass').lower() == 'zero':
1237 bMassiveME = "0."
1238 else:
1239 bMassiveME = "particleDataPtr->m0(5)"
1240 initProc_lines.append("mbME = %s;" % bMassiveME)
1241 if not self.model.get_particle(13) or \
1242 self.model.get_particle(13).get('mass').lower() == 'zero':
1243 muMassiveME = "0."
1244 else:
1245 muMassiveME = "particleDataPtr->m0(13)"
1246 initProc_lines.append("mmuME = %s;" % muMassiveME)
1247 if not self.model.get_particle(15) or \
1248 self.model.get_particle(15).get('mass').lower() == 'zero':
1249 tauMassiveME = "0."
1250 else:
1251 tauMassiveME = "particleDataPtr->m0(15)"
1252 initProc_lines.append("mtauME = %s;" % tauMassiveME)
1253
1254 for i, me in enumerate(self.matrix_elements):
1255 initProc_lines.append("jamp2[%d] = new double[%d];" % \
1256 (i, len(color_amplitudes[i])))
1257
1258 return "\n".join(initProc_lines)
1259
1261 """Generate lines to set final-state id and color info for process"""
1262
1263 res_lines = []
1264
1265
1266 beams = set([(process.get('legs')[0].get('id'),
1267 process.get('legs')[1].get('id')) \
1268 for process in self.processes])
1269
1270
1271 for ibeam, beam_parts in enumerate(beams):
1272 if ibeam == 0:
1273 res_lines.append("if(id1 == %d && id2 == %d){" % beam_parts)
1274 else:
1275 res_lines.append("else if(id1 == %d && id2 == %d){" % beam_parts)
1276
1277 beam_processes = [(i, me) for (i, me) in \
1278 enumerate(self.matrix_elements) if beam_parts in \
1279 [(process.get('legs')[0].get('id'),
1280 process.get('legs')[1].get('id')) \
1281 for process in me.get('processes')]]
1282
1283 beam_mirror_processes = []
1284 if beam_parts[0] != beam_parts[1]:
1285 beam_mirror_processes = [(i, me) for (i, me) in \
1286 enumerate(self.matrix_elements) if beam_parts in \
1287 [(process.get('legs')[1].get('id'),
1288 process.get('legs')[0].get('id')) \
1289 for process in me.get('processes')]]
1290
1291 final_id_list = []
1292 final_mirror_id_list = []
1293 for (i, me) in beam_processes:
1294 final_id_list.extend([tuple([l.get('id') for l in \
1295 proc.get('legs') if l.get('state')]) \
1296 for proc in me.get('processes') \
1297 if beam_parts == \
1298 (proc.get('legs')[0].get('id'),
1299 proc.get('legs')[1].get('id'))])
1300 for (i, me) in beam_mirror_processes:
1301 final_mirror_id_list.extend([tuple([l.get('id') for l in \
1302 proc.get('legs') if l.get('state')]) \
1303 for proc in me.get_mirror_processes() \
1304 if beam_parts == \
1305 (proc.get('legs')[0].get('id'),
1306 proc.get('legs')[1].get('id'))])
1307 final_id_list = set(final_id_list)
1308 final_mirror_id_list = set(final_mirror_id_list)
1309
1310 if final_id_list and final_mirror_id_list or \
1311 not final_id_list and not final_mirror_id_list:
1312 raise self.ProcessExporterCPPError,\
1313 "Missing processes, or both process and mirror process"
1314
1315
1316 ncombs = len(final_id_list)+len(final_mirror_id_list)
1317
1318 res_lines.append("// Pick one of the flavor combinations %s" % \
1319 ", ".join([repr(ids) for ids in final_id_list]))
1320
1321 me_weight = []
1322 for final_ids in final_id_list:
1323 items = [(i, len([ p for p in me.get('processes') \
1324 if [l.get('id') for l in \
1325 p.get('legs')] == \
1326 list(beam_parts) + list(final_ids)])) \
1327 for (i, me) in beam_processes]
1328 me_weight.append("+".join(["matrix_element[%i]*%i" % (i, l) for\
1329 (i, l) in items if l > 0]).\
1330 replace('*1', ''))
1331 if any([l>1 for (i, l) in items]):
1332 raise self.ProcessExporterCPPError,\
1333 "More than one process with identical " + \
1334 "external particles is not supported"
1335
1336 for final_ids in final_mirror_id_list:
1337 items = [(i, len([ p for p in me.get_mirror_processes() \
1338 if [l.get('id') for l in p.get('legs')] == \
1339 list(beam_parts) + list(final_ids)])) \
1340 for (i, me) in beam_mirror_processes]
1341 me_weight.append("+".join(["matrix_element[%i]*%i" % \
1342 (i+len(self.matrix_elements), l) for\
1343 (i, l) in items if l > 0]).\
1344 replace('*1', ''))
1345 if any([l>1 for (i, l) in items]):
1346 raise self.ProcessExporterCPPError,\
1347 "More than one process with identical " + \
1348 "external particles is not supported"
1349
1350 if final_id_list:
1351 res_lines.append("int flavors[%d][%d] = {%s};" % \
1352 (ncombs, self.nfinal,
1353 ",".join(["{" + ",".join([str(id) for id \
1354 in ids]) + "}" for ids \
1355 in final_id_list])))
1356 elif final_mirror_id_list:
1357 res_lines.append("int flavors[%d][%d] = {%s};" % \
1358 (ncombs, self.nfinal,
1359 ",".join(["{" + ",".join([str(id) for id \
1360 in ids]) + "}" for ids \
1361 in final_mirror_id_list])))
1362 res_lines.append("vector<double> probs;")
1363 res_lines.append("double sum = %s;" % "+".join(me_weight))
1364 for me in me_weight:
1365 res_lines.append("probs.push_back(%s/sum);" % me)
1366 res_lines.append("int choice = rndmPtr->pick(probs);")
1367 for i in range(self.nfinal):
1368 res_lines.append("id%d = flavors[choice][%d];" % (i+3, i))
1369
1370 res_lines.append("}")
1371
1372 res_lines.append("setId(%s);" % ",".join(["id%d" % i for i in \
1373 range(1, self.nexternal + 1)]))
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384 res_lines.append("// Pick color flow")
1385
1386 res_lines.append("int ncolor[%d] = {%s};" % \
1387 (len(color_amplitudes),
1388 ",".join([str(len(colamp)) for colamp in \
1389 color_amplitudes])))
1390
1391
1392 for ime, me in enumerate(self.matrix_elements):
1393
1394 res_lines.append("if(%s){" % \
1395 "||".join(["&&".join(["id%d == %d" % \
1396 (i+1, l.get('id')) for (i, l) in \
1397 enumerate(p.get('legs'))])\
1398 for p in me.get('processes')]))
1399 if ime > 0:
1400 res_lines[-1] = "else " + res_lines[-1]
1401
1402 proc = me.get('processes')[0]
1403 if not me.get('color_basis'):
1404
1405 res_lines.append("setColAcol(%s);" % ",".join(["0"]*2*self.nfinal))
1406 else:
1407
1408 repr_dict = {}
1409 legs = proc.get_legs_with_decays()
1410 for l in legs:
1411 repr_dict[l.get('number')] = \
1412 proc.get('model').get_particle(l.get('id')).get_color()
1413
1414 color_flow_list = \
1415 me.get('color_basis').color_flow_decomposition(\
1416 repr_dict, self.ninitial)
1417
1418 ncolor = len(me.get('color_basis'))
1419 res_lines.append("""vector<double> probs;
1420 double sum = %s;
1421 for(int i=0;i<ncolor[%i];i++)
1422 probs.push_back(jamp2[%i][i]/sum);
1423 int ic = rndmPtr->pick(probs);""" % \
1424 ("+".join(["jamp2[%d][%d]" % (ime, i) for i \
1425 in range(ncolor)]), ime, ime))
1426
1427 color_flows = []
1428 for color_flow_dict in color_flow_list:
1429 color_flows.append([int(fmod(color_flow_dict[l.get('number')][i], 500)) \
1430 for (l,i) in itertools.product(legs, [0,1])])
1431
1432
1433 res_lines.append("static int colors[%d][%d] = {%s};" % \
1434 (ncolor, 2 * self.nexternal,
1435 ",".join(["{" + ",".join([str(id) for id \
1436 in flows]) + "}" for flows \
1437 in color_flows])))
1438
1439 res_lines.append("setColAcol(%s);" % \
1440 ",".join(["colors[ic][%d]" % i for i in \
1441 range(2 * self.nexternal)]))
1442 res_lines.append('}')
1443
1444
1445 for ime, me in enumerate(self.matrix_elements):
1446 if not me.get('has_mirror_process'):
1447 continue
1448 res_lines.append("else if(%s){" % \
1449 "||".join(["&&".join(["id%d == %d" % \
1450 (i+1, l.get('id')) for (i, l) in \
1451 enumerate(p.get('legs'))])\
1452 for p in me.get_mirror_processes()]))
1453
1454 proc = me.get('processes')[0]
1455 if not me.get('color_basis'):
1456
1457 res_lines.append("setColAcol(%s);" % ",".join(["0"]*2*self.nfinal))
1458 else:
1459
1460 repr_dict = {}
1461 legs = proc.get_legs_with_decays()
1462 legs[0:2] = [legs[1],legs[0]]
1463 for l in legs:
1464 repr_dict[l.get('number')] = \
1465 proc.get('model').get_particle(l.get('id')).get_color()
1466
1467 color_flow_list = \
1468 me.get('color_basis').color_flow_decomposition(\
1469 repr_dict, self.ninitial)
1470
1471 ncolor = len(me.get('color_basis'))
1472 res_lines.append("""vector<double> probs;
1473 double sum = %s;
1474 for(int i=0;i<ncolor[%i];i++)
1475 probs.push_back(jamp2[%i][i]/sum);
1476 int ic = rndmPtr->pick(probs);""" % \
1477 ("+".join(["jamp2[%d][%d]" % (ime, i) for i \
1478 in range(ncolor)]), ime, ime))
1479
1480 color_flows = []
1481 for color_flow_dict in color_flow_list:
1482 color_flows.append([color_flow_dict[l.get('number')][i] % 500 \
1483 for (l,i) in itertools.product(legs, [0,1])])
1484
1485
1486 res_lines.append("static int colors[%d][%d] = {%s};" % \
1487 (ncolor, 2 * self.nexternal,
1488 ",".join(["{" + ",".join([str(id) for id \
1489 in flows]) + "}" for flows \
1490 in color_flows])))
1491
1492 res_lines.append("setColAcol(%s);" % \
1493 ",".join(["colors[ic][%d]" % i for i in \
1494 range(2 * self.nexternal)]))
1495 res_lines.append('}')
1496
1497 return "\n".join(res_lines)
1498
1499
1501 """Get weightDecay_lines for function definition for Pythia 8 .cc file"""
1502
1503 weightDecay_lines = "// Just use isotropic decay (default)\n"
1504 weightDecay_lines += "return 1.;"
1505
1506 return weightDecay_lines
1507
1513 """Open a template file and return the contents."""
1514
1515 return open(os.path.join(_file_path, \
1516 'iolibs', 'template_files',
1517 filename)).read()
1518
1520 """Return info lines for MG5, suitable to place at beginning of
1521 Fortran files"""
1522
1523 info = misc.get_pkg_info()
1524 info_lines = ""
1525 if info and info.has_key('version') and info.has_key('date'):
1526 info_lines = "# MadGraph 5 v. %s, %s\n" % \
1527 (info['version'], info['date'])
1528 info_lines = info_lines + \
1529 "# By the MadGraph Development Team\n" + \
1530 "# Please visit us at https://launchpad.net/madgraph5"
1531 else:
1532 info_lines = "# MadGraph 5\n" + \
1533 "# By the MadGraph Development Team\n" + \
1534 "# Please visit us at https://launchpad.net/madgraph5"
1535
1536 return info_lines
1537
1538 -def coeff(ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1539 """Returns a nicely formatted string for the coefficients in JAMP lines"""
1540
1541 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power
1542
1543 if total_coeff == 1:
1544 if is_imaginary:
1545 return '+std::complex<double>(0,1)*'
1546 else:
1547 return '+'
1548 elif total_coeff == -1:
1549 if is_imaginary:
1550 return '-std::complex<double>(0,1)*'
1551 else:
1552 return '-'
1553
1554 res_str = '%+i.' % total_coeff.numerator
1555
1556 if total_coeff.denominator != 1:
1557
1558 res_str = res_str + '/%i.' % total_coeff.denominator
1559
1560 if is_imaginary:
1561 res_str = res_str + '*std::complex<double>(0,1)'
1562
1563 return res_str + '*'
1564
1565
1566
1567
1568
1569 -def convert_model_to_cpp(model, output_dir, wanted_lorentz = [],
1570 wanted_couplings = []):
1571 """Create a full valid Pythia 8 model from an MG5 model (coming from UFO)"""
1572
1573
1574 model_builder = UFOModelConverterCPP(model,
1575 os.path.join(output_dir, 'src'),
1576 wanted_lorentz,
1577 wanted_couplings)
1578 model_builder.write_files()
1579
1585 """ A converter of the UFO-MG5 Model to the C++ format """
1586
1587
1588 output_name = 'C++ Standalone'
1589 namespace = 'MG5'
1590
1591
1592 type_dict = {"real": "double",
1593 "complex": "std::complex<double>"}
1594
1595
1596 compiler_option_re = re.compile('^#\w')
1597 namespace_re = re.compile('^using namespace')
1598
1599 slha_to_depend = {('SMINPUTS', (3,)): ('aS',),
1600 ('SMINPUTS', (1,)): ('aEM',)}
1601
1602
1603 include_dir = '.'
1604 cc_file_dir = '.'
1605 param_template_h = 'cpp_model_parameters_h.inc'
1606 param_template_cc = 'cpp_model_parameters_cc.inc'
1607 aloha_template_h = 'cpp_hel_amps_h.inc'
1608 aloha_template_cc = 'cpp_hel_amps_cc.inc'
1609
1610 copy_include_files = []
1611 copy_cc_files = []
1612
1613 - def __init__(self, model, output_path, wanted_lorentz = [],
1614 wanted_couplings = []):
1615 """ initialization of the objects """
1616
1617 self.model = model
1618 self.model_name = ProcessExporterCPP.get_model_name(model['name'])
1619
1620 self.dir_path = output_path
1621
1622
1623 self.wanted_lorentz = wanted_lorentz
1624
1625
1626
1627
1628 self.coups_dep = {}
1629 self.coups_indep = []
1630 self.params_dep = []
1631 self.params_indep = []
1632 self.p_to_cpp = parsers.UFOExpressionParserCPP()
1633
1634
1635 self.prepare_parameters()
1636 self.prepare_couplings(wanted_couplings)
1637
1646
1647
1648
1650 """Extract the parameters from the model, and store them in
1651 the two lists params_indep and params_dep"""
1652
1653
1654 keys = self.model['parameters'].keys()
1655 keys.sort(key=len)
1656 params_ext = []
1657 for key in keys:
1658 if key == ('external',):
1659 params_ext += self.model['parameters'][key]
1660 elif 'aS' in key:
1661 for p in self.model['parameters'][key]:
1662 self.params_dep.append(base_objects.ModelVariable(p.name,
1663 p.name + " = " + \
1664 self.p_to_cpp.parse(p.expr) + ";",
1665 p.type,
1666 p.depend))
1667 else:
1668 for p in self.model['parameters'][key]:
1669 if p.name == 'ZERO':
1670 continue
1671 self.params_indep.append(base_objects.ModelVariable(p.name,
1672 p.name + " = " + \
1673 self.p_to_cpp.parse(p.expr) + ";",
1674 p.type,
1675 p.depend))
1676
1677
1678 while params_ext:
1679 param = params_ext.pop(0)
1680
1681 expression = ""
1682 assert param.value.imag == 0
1683 if len(param.lhacode) == 1:
1684 expression = "%s = slha.get_block_entry(\"%s\", %d, %e);" % \
1685 (param.name, param.lhablock.lower(),
1686 param.lhacode[0], param.value.real)
1687 elif len(param.lhacode) == 2:
1688 expression = "indices[0] = %d;\nindices[1] = %d;\n" % \
1689 (param.lhacode[0], param.lhacode[1])
1690 expression += "%s = slha.get_block_entry(\"%s\", indices, %e);" \
1691 % (param.name, param.lhablock.lower(), param.value.real)
1692 else:
1693 raise MadGraph5Error("Only support for SLHA blocks with 1 or 2 indices")
1694 self.params_indep.insert(0,
1695 base_objects.ModelVariable(param.name,
1696 expression,
1697 'real'))
1698
1700 """Extract the couplings from the model, and store them in
1701 the two lists coups_indep and coups_dep"""
1702
1703
1704 keys = self.model['couplings'].keys()
1705 keys.sort(key=len)
1706 for key, coup_list in self.model['couplings'].items():
1707 if "aS" in key:
1708 for c in coup_list:
1709 if not wanted_couplings or c.name in wanted_couplings:
1710 self.coups_dep[c.name] = base_objects.ModelVariable(\
1711 c.name,
1712 c.expr,
1713 c.type,
1714 c.depend)
1715 else:
1716 for c in coup_list:
1717 if not wanted_couplings or c.name in wanted_couplings:
1718 self.coups_indep.append(base_objects.ModelVariable(\
1719 c.name,
1720 c.expr,
1721 c.type,
1722 c.depend))
1723
1724
1725 for coup in self.coups_dep.values() + self.coups_indep:
1726 coup.expr = coup.name + " = " + self.p_to_cpp.parse(coup.expr) + ";"
1727
1728
1729
1731 """Generate the parameters_model.h and parameters_model.cc
1732 files, which have the parameters and couplings for the model."""
1733
1734 if not os.path.isdir(os.path.join(self.dir_path, self.include_dir)):
1735 os.makedirs(os.path.join(self.dir_path, self.include_dir))
1736 if not os.path.isdir(os.path.join(self.dir_path, self.cc_file_dir)):
1737 os.makedirs(os.path.join(self.dir_path, self.cc_file_dir))
1738
1739 parameter_h_file = os.path.join(self.dir_path, self.include_dir,
1740 'Parameters_%s.h' % self.model_name)
1741 parameter_cc_file = os.path.join(self.dir_path, self.cc_file_dir,
1742 'Parameters_%s.cc' % self.model_name)
1743
1744 file_h, file_cc = self.generate_parameters_class_files()
1745
1746
1747 writers.CPPWriter(parameter_h_file).writelines(file_h)
1748 writers.CPPWriter(parameter_cc_file).writelines(file_cc)
1749
1750
1751 for copy_file in self.copy_include_files:
1752 shutil.copy(os.path.join(_file_path, 'iolibs',
1753 'template_files',copy_file),
1754 os.path.join(self.dir_path, self.include_dir))
1755
1756 for copy_file in self.copy_cc_files:
1757 shutil.copy(os.path.join(_file_path, 'iolibs',
1758 'template_files',copy_file),
1759 os.path.join(self.dir_path, self.cc_file_dir))
1760
1761 logger.info("Created files %s and %s in directory" \
1762 % (os.path.split(parameter_h_file)[-1],
1763 os.path.split(parameter_cc_file)[-1]))
1764 logger.info("%s and %s" % \
1765 (os.path.split(parameter_h_file)[0],
1766 os.path.split(parameter_cc_file)[0]))
1767
1769 """Create the content of the Parameters_model.h and .cc files"""
1770
1771 replace_dict = {}
1772
1773 replace_dict['info_lines'] = get_mg5_info_lines()
1774 replace_dict['model_name'] = self.model_name
1775
1776 replace_dict['independent_parameters'] = \
1777 "// Model parameters independent of aS\n" + \
1778 self.write_parameters(self.params_indep)
1779 replace_dict['independent_couplings'] = \
1780 "// Model parameters dependent on aS\n" + \
1781 self.write_parameters(self.params_dep)
1782 replace_dict['dependent_parameters'] = \
1783 "// Model couplings independent of aS\n" + \
1784 self.write_parameters(self.coups_indep)
1785 replace_dict['dependent_couplings'] = \
1786 "// Model couplings dependent on aS\n" + \
1787 self.write_parameters(self.coups_dep.values())
1788
1789 replace_dict['set_independent_parameters'] = \
1790 self.write_set_parameters(self.params_indep)
1791 replace_dict['set_independent_couplings'] = \
1792 self.write_set_parameters(self.coups_indep)
1793 replace_dict['set_dependent_parameters'] = \
1794 self.write_set_parameters(self.params_dep)
1795 replace_dict['set_dependent_couplings'] = \
1796 self.write_set_parameters(self.coups_dep.values())
1797
1798 replace_dict['print_independent_parameters'] = \
1799 self.write_print_parameters(self.params_indep)
1800 replace_dict['print_independent_couplings'] = \
1801 self.write_print_parameters(self.coups_indep)
1802 replace_dict['print_dependent_parameters'] = \
1803 self.write_print_parameters(self.params_dep)
1804 replace_dict['print_dependent_couplings'] = \
1805 self.write_print_parameters(self.coups_dep.values())
1806
1807 file_h = read_template_file(self.param_template_h) % \
1808 replace_dict
1809 file_cc = read_template_file(self.param_template_cc) % \
1810 replace_dict
1811
1812 return file_h, file_cc
1813
1815 """Write out the definitions of parameters"""
1816
1817
1818 type_param_dict = {}
1819
1820 for param in params:
1821 type_param_dict[param.type] = \
1822 type_param_dict.setdefault(param.type, []) + [param.name]
1823
1824
1825
1826 res_strings = []
1827 for key in type_param_dict:
1828 res_strings.append("%s %s;" % (self.type_dict[key],
1829 ",".join(type_param_dict[key])))
1830
1831 return "\n".join(res_strings)
1832
1834 """Write out the lines of independent parameters"""
1835
1836
1837
1838 res_strings = []
1839 for param in params:
1840 res_strings.append("%s" % param.expr)
1841
1842
1843
1844 for particle in self.model.get('particles'):
1845 if particle.is_fermion() and particle.get('self_antipart') and \
1846 particle.get('width').lower() != 'zero':
1847 res_strings.append("if (%s < 0)" % particle.get('mass'))
1848 res_strings.append("%(width)s = -abs(%(width)s);" % \
1849 {"width": particle.get('width')})
1850
1851 return "\n".join(res_strings)
1852
1854 """Write out the lines of independent parameters"""
1855
1856
1857
1858 res_strings = []
1859 for param in params:
1860 res_strings.append("cout << setw(20) << \"%s \" << \"= \" << setiosflags(ios::scientific) << setw(10) << %s << endl;" % (param.name, param.name))
1861
1862 return "\n".join(res_strings)
1863
1864
1865
1867 """Generate the hel_amps_model.h and hel_amps_model.cc files, which
1868 have the complete set of generalized Helas routines for the model"""
1869
1870 if not os.path.isdir(os.path.join(self.dir_path, self.include_dir)):
1871 os.makedirs(os.path.join(self.dir_path, self.include_dir))
1872 if not os.path.isdir(os.path.join(self.dir_path, self.cc_file_dir)):
1873 os.makedirs(os.path.join(self.dir_path, self.cc_file_dir))
1874
1875 model_h_file = os.path.join(self.dir_path, self.include_dir,
1876 'HelAmps_%s.h' % self.model_name)
1877 model_cc_file = os.path.join(self.dir_path, self.cc_file_dir,
1878 'HelAmps_%s.cc' % self.model_name)
1879
1880 replace_dict = {}
1881
1882 replace_dict['output_name'] = self.output_name
1883 replace_dict['info_lines'] = get_mg5_info_lines()
1884 replace_dict['namespace'] = self.namespace
1885 replace_dict['model_name'] = self.model_name
1886
1887
1888
1889 template_h_files = self.read_aloha_template_files(ext = 'h')
1890 template_cc_files = self.read_aloha_template_files(ext = 'cc')
1891
1892 aloha_model = create_aloha.AbstractALOHAModel(self.model.get('name'))
1893
1894 if self.wanted_lorentz:
1895 aloha_model.compute_subset(self.wanted_lorentz)
1896 else:
1897 aloha_model.compute_all(save=False)
1898
1899 for abstracthelas in dict(aloha_model).values():
1900 h_rout, cc_rout = abstracthelas.write(output_dir=None, language='CPP',
1901 compiler_cmd=False)
1902
1903 template_h_files.append(h_rout)
1904 template_cc_files.append(cc_rout)
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914 replace_dict['function_declarations'] = '\n'.join(template_h_files)
1915 replace_dict['function_definitions'] = '\n'.join(template_cc_files)
1916
1917 file_h = read_template_file(self.aloha_template_h) % replace_dict
1918 file_cc = read_template_file(self.aloha_template_cc) % replace_dict
1919
1920
1921 writers.CPPWriter(model_h_file).writelines(file_h)
1922 writers.CPPWriter(model_cc_file).writelines(file_cc)
1923
1924 logger.info("Created files %s and %s in directory" \
1925 % (os.path.split(model_h_file)[-1],
1926 os.path.split(model_cc_file)[-1]))
1927 logger.info("%s and %s" % \
1928 (os.path.split(model_h_file)[0],
1929 os.path.split(model_cc_file)[0]))
1930
1931
1933 """Read all ALOHA template files with extension ext, strip them of
1934 compiler options and namespace options, and return in a list"""
1935
1936 template_files = []
1937 for filename in glob.glob(os.path.join(MG5DIR, 'aloha',
1938 'template_files', '*.%s' % ext)):
1939 file = open(filename, 'r')
1940 template_file_string = ""
1941 while file:
1942 line = file.readline()
1943 if len(line) == 0: break
1944 line = self.clean_line(line)
1945 if not line:
1946 continue
1947 template_file_string += line.strip() + '\n'
1948 template_files.append(template_file_string)
1949
1950 return template_files
1951
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1975 """Strip a line of compiler options and namespace options."""
1976
1977 if self.compiler_option_re.match(line) or self.namespace_re.match(line):
1978 return ""
1979
1980 return line
1981
1982
1983
1984
1985 -def generate_example_file_pythia8(path,
1986 model_path,
1987 process_names,
1988 exporter,
1989 main_file_name = "",
1990 example_dir = "examples"):
1991 """Generate the main_model_name.cc file and Makefile in the examples dir"""
1992
1993 filepath = os.path.join(path, example_dir)
1994 if not os.path.isdir(filepath):
1995 os.makedirs(filepath)
1996
1997 replace_dict = {}
1998
1999
2000 info_lines = get_mg5_info_lines()
2001 replace_dict['info_lines'] = info_lines
2002
2003
2004 replace_dict['model_name'] = exporter.model_name
2005
2006
2007 replace_dict['include_lines'] = \
2008 "\n".join(["#include \"%s.h\"" % proc_name \
2009 for proc_name in process_names])
2010
2011
2012 replace_dict['sigma_pointer_lines'] = \
2013 "\n".join(["pythia.setSigmaPtr(new %s());" % proc_name \
2014 for proc_name in process_names])
2015
2016
2017 replace_dict['param_card'] = os.path.join(os.path.pardir,model_path,
2018 "param_card_%s.dat" % \
2019 exporter.model_name)
2020
2021
2022 file = read_template_file('pythia8_main_example_cc.inc') % \
2023 replace_dict
2024
2025 if not main_file_name:
2026 num = 1
2027 while os.path.exists(os.path.join(filepath,
2028 'main_%s_%i' % (exporter.model_name, num))):
2029 num += 1
2030 main_file_name = str(num)
2031
2032 main_file = 'main_%s_%s' % (exporter.model_name,
2033 main_file_name)
2034
2035 main_filename = os.path.join(filepath, main_file + '.cc')
2036
2037
2038 writers.CPPWriter(main_filename).writelines(file)
2039
2040 replace_dict = {}
2041
2042
2043 replace_dict['info_lines'] = get_mg5_info_lines()
2044
2045 replace_dict['main_file'] = main_file
2046
2047 replace_dict['process_dir'] = model_path
2048
2049 replace_dict['include_dir'] = exporter.include_dir
2050
2051
2052 file = read_template_file('pythia8_main_makefile.inc') % \
2053 replace_dict
2054
2055 make_filename = os.path.join(filepath, 'Makefile_%s_%s' % \
2056 (exporter.model_name, main_file_name))
2057
2058
2059 open(make_filename, 'w').write(file)
2060
2061 logger.info("Created files %s and %s in directory %s" \
2062 % (os.path.split(main_filename)[-1],
2063 os.path.split(make_filename)[-1],
2064 os.path.split(make_filename)[0]))
2065 return main_file, make_filename
2066
2074 """Create a full valid Pythia 8 model from an MG5 model (coming from UFO)"""
2075
2076 if not os.path.isfile(os.path.join(pythia_dir, 'include', 'Pythia.h')):
2077 logger.warning('Directory %s is not a valid Pythia 8 main dir.' % pythia_dir)
2078
2079
2080 model_builder = UFOModelConverterPythia8(model, pythia_dir)
2081 model_builder.cc_file_dir = "Processes_" + model_builder.model_name
2082 model_builder.include_dir = model_builder.cc_file_dir
2083
2084 model_builder.write_files()
2085
2086 model_builder.write_makefile()
2087
2088 model_builder.write_param_card()
2089 return model_builder.model_name, model_builder.cc_file_dir
2090
2096 """ A converter of the UFO-MG5 Model to the Pythia 8 format """
2097
2098
2099 output_name = 'Pythia 8'
2100 namespace = 'Pythia8'
2101
2102
2103 slha_to_expr = {('SMINPUTS', (1,)): '1./csm->alphaEM(pow(pd->m0(23),2))',
2104 ('SMINPUTS', (2,)): 'M_PI*csm->alphaEM(pow(pd->m0(23),2))*pow(pd->m0(23),2)/(sqrt(2.)*pow(pd->m0(24),2)*(pow(pd->m0(23),2)-pow(pd->m0(24),2)))',
2105 ('SMINPUTS', (3,)): 'alpS',
2106 ('CKMBLOCK', (1,)): 'csm->VCKMgen(1,2)',
2107 }
2108
2109
2110 param_template_h = 'pythia8_model_parameters_h.inc'
2111 param_template_cc = 'pythia8_model_parameters_cc.inc'
2112
2114 """Extract the model parameters from Pythia 8, and store them in
2115 the two lists params_indep and params_dep"""
2116
2117
2118 keys = self.model['parameters'].keys()
2119 keys.sort(key=len)
2120 params_ext = []
2121 for key in keys:
2122 if key == ('external',):
2123 params_ext += self.model['parameters'][key]
2124 elif 'aS' in key:
2125 for p in self.model['parameters'][key]:
2126 self.params_dep.append(base_objects.ModelVariable(p.name,
2127 p.name + " = " + \
2128 self.p_to_cpp.parse(p.expr) + ';',
2129 p.type,
2130 p.depend))
2131 else:
2132 for p in self.model['parameters'][key]:
2133 self.params_indep.append(base_objects.ModelVariable(p.name,
2134 p.name + " = " + \
2135 self.p_to_cpp.parse(p.expr) + ';',
2136 p.type,
2137 p.depend))
2138
2139
2140
2141
2142 while params_ext:
2143 param = params_ext.pop(0)
2144 key = (param.lhablock, tuple(param.lhacode))
2145 if 'aS' in self.slha_to_depend.setdefault(key, ()):
2146
2147 self.params_dep.insert(0,
2148 base_objects.ModelVariable(param.name,
2149 param.name + ' = ' + \
2150 self.slha_to_expr[key] + ';',
2151 'real'))
2152 else:
2153 try:
2154
2155 self.params_indep.insert(0,
2156 base_objects.ModelVariable(param.name,
2157 param.name + ' = ' + \
2158 self.slha_to_expr[key] + ';',
2159 'real'))
2160 except:
2161
2162
2163 if param.lhablock == 'YUKAWA':
2164 self.slha_to_expr[key] = 'pd->mRun(%i, pd->m0(24))' \
2165 % param.lhacode[0]
2166 if param.lhablock == 'MASS':
2167 self.slha_to_expr[key] = 'pd->m0(%i)' \
2168 % param.lhacode[0]
2169 if param.lhablock == 'DECAY':
2170 self.slha_to_expr[key] = \
2171 'pd->mWidth(%i)' % param.lhacode[0]
2172 if key in self.slha_to_expr:
2173 self.params_indep.insert(0,\
2174 base_objects.ModelVariable(param.name,
2175 param.name + "=" + self.slha_to_expr[key] \
2176 + ';',
2177 'real'))
2178 else:
2179
2180 if len(param.lhacode) == 1:
2181 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %s)){\n" % \
2182 (param.lhablock.lower(),
2183 param.lhacode[0],
2184 param.name) + \
2185 ("cout << \"Warning, setting %s to %e\" << endl;\n" \
2186 + "%s = %e;}") % (param.name, param.value.real,
2187 param.name, param.value.real)
2188 elif len(param.lhacode) == 2:
2189 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %d, %s)){\n" % \
2190 (param.lhablock.lower(),
2191 param.lhacode[0],
2192 param.lhacode[1],
2193 param.name) + \
2194 ("cout << \"Warning, setting %s to %e\" << endl;\n" \
2195 + "%s = %e;}") % (param.name, param.value.real,
2196 param.name, param.value.real)
2197 elif len(param.lhacode) == 3:
2198 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %d, %d, %s)){\n" % \
2199 (param.lhablock.lower(),
2200 param.lhacode[0],
2201 param.lhacode[1],
2202 param.lhacode[2],
2203 param.name) + \
2204 ("cout << \"Warning, setting %s to %e\" << endl;\n" \
2205 + "%s = %e;}") % (param.name, param.value.real,
2206 param.name, param.value.real)
2207 else:
2208 raise MadGraph5Error("Only support for SLHA blocks with 1 or 2 indices")
2209 self.params_indep.insert(0,
2210 base_objects.ModelVariable(param.name,
2211 expression,
2212 'real'))
2213
2215 """Generate the Makefile, which creates library files."""
2216
2217 makefilename = os.path.join(self.dir_path, self.cc_file_dir,
2218 'Makefile')
2219
2220 replace_dict = {}
2221
2222 replace_dict['info_lines'] = get_mg5_info_lines()
2223 replace_dict['model'] = self.model_name
2224
2225 makefile = read_template_file('pythia8_makefile.inc') % replace_dict
2226
2227
2228 open(makefilename, 'w').write(makefile)
2229
2230 logger.info("Created %s in directory %s" \
2231 % (os.path.split(makefilename)[-1],
2232 os.path.split(makefilename)[0]))
2233
2235 """Generate the param_card for the model."""
2236
2237 paramcardname = os.path.join(self.dir_path, self.cc_file_dir,
2238 'param_card_%s.dat' % self.model_name)
2239
2240 open(paramcardname, 'w').write(\
2241 self.model.write_param_card())
2242
2243 logger.info("Created %s in directory %s" \
2244 % (os.path.split(paramcardname)[-1],
2245 os.path.split(paramcardname)[0]))
2246