From cc044defbbaf33b69866b67569aa58092f447b77 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Tue, 4 Jun 2024 08:43:31 -0500 Subject: [PATCH 01/85] Move table loading responsibility to individual fluxes --- python/SIREN_Controller.py | 9 ++-- python/__init__.py | 7 +-- python/_util.py | 33 +++++++------ resources/Examples/Example1/DIS_ATLAS.py | 7 +-- .../Examples/Example2/DipolePortal_MINERvA.py | 7 +-- .../Example2/DipolePortal_MiniBooNE.py | 7 +-- .../Fluxes/BNB/BNB-v1.0/FluxCalculator.py | 29 ----------- resources/Fluxes/BNB/BNB-v1.0/flux.py | 47 ++++++++++++++++++ .../Fluxes/HE_SN/HE_SN-v1.0/FluxCalculator.py | 22 --------- resources/Fluxes/HE_SN/HE_SN-v1.0/flux.py | 33 +++++++++++++ .../Fluxes/NUMI/NUMI-v1.0/FluxCalculator.py | 31 ------------ resources/Fluxes/NUMI/NUMI-v1.0/flux.py | 48 +++++++++++++++++++ 12 files changed, 164 insertions(+), 116 deletions(-) delete mode 100644 resources/Fluxes/BNB/BNB-v1.0/FluxCalculator.py create mode 100644 resources/Fluxes/BNB/BNB-v1.0/flux.py delete mode 100644 resources/Fluxes/HE_SN/HE_SN-v1.0/FluxCalculator.py create mode 100644 resources/Fluxes/HE_SN/HE_SN-v1.0/flux.py delete mode 100644 resources/Fluxes/NUMI/NUMI-v1.0/FluxCalculator.py create mode 100644 resources/Fluxes/NUMI/NUMI-v1.0/flux.py diff --git a/python/SIREN_Controller.py b/python/SIREN_Controller.py index 7f714b115..7990dab0d 100644 --- a/python/SIREN_Controller.py +++ b/python/SIREN_Controller.py @@ -1,3 +1,4 @@ +import os import h5py import numpy as np import awkward as ak @@ -413,10 +414,12 @@ def SetInjectorStoppingCondition(self, stopping_condition): self.injector.SetStoppingCondition(stopping_condition) # Initialize the injector, either from an existing .siren_injector file or from controller injection objects - def InitializeInjector(self,filenames=None): - if type(filenames)==str: - filenames = [filenames] + def InitializeInjector(self, filenames=None): + if type(filenames) == str: + if os.path.isfile(filenames): + filenames = [filenames] self.injectors=[] + filenames = None if filenames is None: assert(self.primary_injection_process.primary_type is not None) # Use controller injection objects diff --git a/python/__init__.py b/python/__init__.py index 2429378ba..7f0f00e2d 100644 --- a/python/__init__.py +++ b/python/__init__.py @@ -14,8 +14,8 @@ utilities.get_detector_model_path = _util.get_detector_model_path utilities.get_material_model_path = _util.get_material_model_path utilities.get_cross_section_model_path = _util.get_cross_section_model_path -utilities.get_tabulated_flux_model_path = _util.get_tabulated_flux_model_path -utilities.get_tabulated_flux_file = _util.get_tabulated_flux_file +utilities.get_flux_model_path = _util.get_flux_model_path +utilities.load_flux = _util.load_flux def darknews_version(): try: @@ -24,4 +24,5 @@ def darknews_version(): except: print("WARNING: DarkNews is not installed in the local environment") return None -utilities.darknews_version = darknews_version \ No newline at end of file +utilities.darknews_version = darknews_version + diff --git a/python/_util.py b/python/_util.py index 5fa66fef5..2e3b087f7 100644 --- a/python/_util.py +++ b/python/_util.py @@ -1,6 +1,7 @@ import os import re import sys +import uuid import importlib THIS_DIR = os.path.abspath(os.path.dirname(__file__)) @@ -537,19 +538,21 @@ def get_cross_section_model_path(model_name, must_exist=True): return _get_model_path(model_name, prefix="CrossSections", is_file=False, must_exist=must_exist) -def get_tabulated_flux_model_path(model_name, must_exist=True): +def get_flux_model_path(model_name, must_exist=True): return _get_model_path(model_name,prefix="Fluxes", is_file=False, must_exist=must_exist) - - -def get_tabulated_flux_file(model_name, tag, must_exist=True): - abs_flux_dir = get_tabulated_flux_model_path(model_name,must_exist=must_exist) - # require existence of FluxCalculator.py - FluxCalculatorFile = os.path.join(abs_flux_dir,"FluxCalculator.py") - assert(os.path.isfile(FluxCalculatorFile)) - spec = importlib.util.spec_from_file_location("FluxCalculator", FluxCalculatorFile) - FluxCalculator = importlib.util.module_from_spec(spec) - sys.modules["FluxCalculator"] = FluxCalculator - spec.loader.exec_module(FluxCalculator) - flux_file = FluxCalculator.MakeFluxFile(tag,abs_flux_dir) - del sys.modules["FluxCalculator"] # remove flux directory from the system - return flux_file \ No newline at end of file + + +def load_flux(model_name, *args, **kwargs): + abs_flux_dir = get_flux_model_path(model_name, must_exist=True) + + # require existence of flux.py + flux_file = os.path.join(abs_flux_dir, "flux.py") + assert(os.path.isfile(flux_file)) + spec = importlib.util.spec_from_file_location("flux", flux_file) + flux = importlib.util.module_from_spec(spec) + module_name = f"siren-flux-{model_name}-{str(uuid.uuid4())}" + sys.modules[module_name] = flux + spec.loader.exec_module(flux) + flux_file = flux.load_flux(*args, **kwargs) + del sys.modules[module_name] # remove flux directory from the system + return flux_file diff --git a/resources/Examples/Example1/DIS_ATLAS.py b/resources/Examples/Example1/DIS_ATLAS.py index fcbe7a7b3..a64e06791 100644 --- a/resources/Examples/Example1/DIS_ATLAS.py +++ b/resources/Examples/Example1/DIS_ATLAS.py @@ -38,9 +38,10 @@ # energy distribution # HE SN flux from ATLAS paper -flux_file = siren.utilities.get_tabulated_flux_file("HE_SN","numu") -edist = siren.distributions.TabulatedFluxDistribution(100, 1e6, flux_file, True) #bool is whether flux is physical -primary_injection_distributions["energy"] = edist +edist = siren.utilities.load_flux("HE_SN", tag="numu", min_energy=100, max_energy=1e6, physically_normalized=True) +edist_gen = siren.utilities.load_flux("HE_SN", tag="numu", min_energy=100, max_energy=1e6, physically_normalized=False) + +primary_injection_distributions["energy"] = edist_gen primary_physical_distributions["energy"] = edist # direction distribution diff --git a/resources/Examples/Example2/DipolePortal_MINERvA.py b/resources/Examples/Example2/DipolePortal_MINERvA.py index 536508af8..6a33862a7 100644 --- a/resources/Examples/Example2/DipolePortal_MINERvA.py +++ b/resources/Examples/Example2/DipolePortal_MINERvA.py @@ -41,11 +41,8 @@ primary_physical_distributions = {} # energy distribution -flux_file = siren.utilities.get_tabulated_flux_file("NUMI","FHC_ME_numu") -edist = siren.distributions.TabulatedFluxDistribution(flux_file, True) -edist_gen = siren.distributions.TabulatedFluxDistribution( - model_kwargs["m4"], 20, flux_file, False -) +edist = siren.utilities.load_flux("NUMI", tag="FHC_ME_numu", physically_normalized=True) +edist_gen = siren.utilities.load_flux("NUMI", tag="FHC_ME_numu", min_energy=model_kwargs["m4"], max_energy=20, physically_normalized=False) primary_injection_distributions["energy"] = edist_gen primary_physical_distributions["energy"] = edist diff --git a/resources/Examples/Example2/DipolePortal_MiniBooNE.py b/resources/Examples/Example2/DipolePortal_MiniBooNE.py index 7fd7c62cd..05d022530 100644 --- a/resources/Examples/Example2/DipolePortal_MiniBooNE.py +++ b/resources/Examples/Example2/DipolePortal_MiniBooNE.py @@ -41,11 +41,8 @@ primary_physical_distributions = {} # energy distribution -flux_file = siren.utilities.get_tabulated_flux_file("BNB","FHC_numu") -edist = siren.distributions.TabulatedFluxDistribution(flux_file, True) -edist_gen = siren.distributions.TabulatedFluxDistribution( - model_kwargs["m4"], 10, flux_file, False -) +edist = siren.utilities.load_flux("BNB", tag="FHC_numu", physically_normalized=True) +edist_gen = siren.utilities.load_flux("BNB", tag="FHC_numu", min_energy=model_kwargs["m4"], max_energy=10, physically_normalized=False) primary_injection_distributions["energy"] = edist_gen primary_physical_distributions["energy"] = edist diff --git a/resources/Fluxes/BNB/BNB-v1.0/FluxCalculator.py b/resources/Fluxes/BNB/BNB-v1.0/FluxCalculator.py deleted file mode 100644 index d93221ad0..000000000 --- a/resources/Fluxes/BNB/BNB-v1.0/FluxCalculator.py +++ /dev/null @@ -1,29 +0,0 @@ -import os -def MakeFluxFile(tag, abs_flux_dir): - ''' - Accepts the following tags: - {FHC,RHC}_{nue,nuebar,numu,numubar} - ''' - mode,particle = tag.split("_") - if mode not in ["FHC","RHC"]: - print("%s beam mode specified in tag %s is not valid"%(mode,tag)) - exit(0) - if particle not in ["nue","numu","nuebar","numubar"]: - print("%s particle specified in tag %s is not valid"%(particle,tag)) - exit(0) - input_flux_file = os.path.join(abs_flux_dir, - "BNB_%s.dat"%mode) - output_flux_file = os.path.join(abs_flux_dir, - "BNB_%s_%s_flux.txt"%(mode,particle)) - with open(input_flux_file,"r") as fin: - all_lines = fin.readlines() - headers = all_lines[0].strip().split() - data = [line.strip().split() for line in all_lines[1:]] - pid = headers.index(particle) - with open(output_flux_file,"w") as fout: - for row in data: - Elow,Ehigh,bin_flux = float(row[0]),float(row[1]),float(row[pid]) - Emid = (Elow+Ehigh)/2. - flux = bin_flux/50*1000*1e4 # put flux in units of nu/m^2/GeV/POT - print(Emid,flux,file=fout) - return output_flux_file \ No newline at end of file diff --git a/resources/Fluxes/BNB/BNB-v1.0/flux.py b/resources/Fluxes/BNB/BNB-v1.0/flux.py new file mode 100644 index 000000000..0d6be7571 --- /dev/null +++ b/resources/Fluxes/BNB/BNB-v1.0/flux.py @@ -0,0 +1,47 @@ +import os +import siren + + +def load_flux(tag=None, min_energy=None, max_energy=None, physically_normalized=True): + """ + Accepts the following tags: + {FHC,RHC}_{nue,nuebar,numu,numubar} + """ + + if tag is None: + raise TypeError("\"tag\" is a required argument") + try: + tag = str(tag) + except: + raise RuntimeError("\"tag\" must convert to a str") + if min_energy is None != max_energy is None: + raise RuntimeError("Neither or both \"min_energy\" and \"max_energy\" must be provided") + has_energy_range = min_energy is not None + + mode, particle = tag.split("_") + if mode not in ["FHC", "RHC"]: + raise ValueError("%s beam mode specified in tag %s is not valid" % (mode, tag)) + if particle not in ["nue", "numu", "nuebar", "numubar"]: + raise ValueError("%s particle specified in tag %s is not valid" % (particle, tag)) + + abs_flux_dir = os.path.dirname(__file__) + input_flux_file = os.path.join(abs_flux_dir, "BNB_%s.dat" % mode) + + all_lines = open(input_flux_file, "r").readlines() + headers = all_lines[0].strip().split() + data = [line.strip().split() for line in all_lines[1:]] + pid = headers.index(particle) + e_low_idx = 0 + e_high_idx = 1 + + energies = [(float(row[e_low_idx]) + float(row[e_high_idx]))/2.0 for row in data] + flux = [float(row[pid]) / 50 * 1000 * 1e4 for row in data] # put flux in units of nu/m^2/GeV/POT + + table = None + if has_energy_range: + table = siren.distributions.TabulatedFluxDistribution(min_energy, max_energy, energies, flux, physically_normalized) + else: + table = siren.distributions.TabulatedFluxDistribution(energies, flux, physically_normalized) + + return table + diff --git a/resources/Fluxes/HE_SN/HE_SN-v1.0/FluxCalculator.py b/resources/Fluxes/HE_SN/HE_SN-v1.0/FluxCalculator.py deleted file mode 100644 index 80fbc49de..000000000 --- a/resources/Fluxes/HE_SN/HE_SN-v1.0/FluxCalculator.py +++ /dev/null @@ -1,22 +0,0 @@ -import os - -def MakeFluxFile(tag, abs_flux_dir): - ''' - only supported tag is "numu" - ''' - if tag!="numu": - print("Tag %s not supported for HE SN"%tag) - exit(0) - input_flux_file = os.path.join(abs_flux_dir, - "dN_dE_SNe_2n_D1_0_s20_t100d_NuMu_d10kpc.txt") - output_flux_file = os.path.join(abs_flux_dir, - "HE_SN_numu.txt") - with open(input_flux_file,"r") as fin: - all_lines = fin.readlines() - data = [line.strip().split() for line in all_lines] - with open(output_flux_file,"w") as fout: - for row in data: - E,flux = float(row[0]),float(row[1]) - flux*=1e4 # put flux in units of nu/m^2/GeV/100d - print(E,flux,file=fout) - return output_flux_file \ No newline at end of file diff --git a/resources/Fluxes/HE_SN/HE_SN-v1.0/flux.py b/resources/Fluxes/HE_SN/HE_SN-v1.0/flux.py new file mode 100644 index 000000000..0440f99f6 --- /dev/null +++ b/resources/Fluxes/HE_SN/HE_SN-v1.0/flux.py @@ -0,0 +1,33 @@ +import os +import siren + +def load_flux(tag=None, min_energy=None, max_energy=None, physically_normalized=True): + ''' + only supported tag is "numu" + ''' + if tag!="numu": + raise ValueError("Tag %s not supported for HE SN" % tag) + + has_energy_range = min_energy is not None + + abs_flux_dir = os.path.dirname(__file__) + input_flux_file = os.path.join(abs_flux_dir, + "dN_dE_SNe_2n_D1_0_s20_t100d_NuMu_d10kpc.txt") + + all_lines = open(input_flux_file, "r").readlines() + headers = all_lines[0].strip().split() + data = [line.strip().split() for line in all_lines[1:]] + e_idx = 0 + flux_idx = 1 + + energies = [float(row[e_idx]) for row in data] + flux = [float(row[flux_idx]) * 1e4 for row in data] # put flux in units of nu/m^2/GeV/100d + + table = None + if has_energy_range: + table = siren.distributions.TabulatedFluxDistribution(min_energy, max_energy, energies, flux, physically_normalized) + else: + table = siren.distributions.TabulatedFluxDistribution(energies, flux, physically_normalized) + + return table + diff --git a/resources/Fluxes/NUMI/NUMI-v1.0/FluxCalculator.py b/resources/Fluxes/NUMI/NUMI-v1.0/FluxCalculator.py deleted file mode 100644 index de7b13b0f..000000000 --- a/resources/Fluxes/NUMI/NUMI-v1.0/FluxCalculator.py +++ /dev/null @@ -1,31 +0,0 @@ -import os -def MakeFluxFile(tag, abs_flux_dir): - ''' - Accepts the following tags: - {FHC,RHC}_{LE,ME}_{nue,nuebar,numu,numubar} - ''' - mode,energy,particle = tag.split("_") - if mode not in ["FHC","RHC"]: - print("%s beam mode specified in tag %s is not valid"%(mode,tag)) - exit(0) - if energy not in ["LE","ME"]: - print("%s energy mode specified in tag %s is not valid"%(energy,tag)) - exit(0) - if particle not in ["nue","numu","nuebar","numubar"]: - print("%s particle specified in tag %s is not valid"%(particle,tag)) - exit(0) - input_flux_file = os.path.join(abs_flux_dir, - "NUMI_%s_%s.dat"%(mode,energy)) - output_flux_file = os.path.join(abs_flux_dir, - "NUMI_%s_%s_%s_flux.txt"%(mode,energy,particle)) - with open(input_flux_file,"r") as fin: - all_lines = fin.readlines() - headers = all_lines[0].strip().split() - data = [line.strip().split() for line in all_lines[1:]] - pid = headers.index(particle) - with open(output_flux_file,"w") as fout: - for row in data: - E,flux = float(row[0]),float(row[pid]) - flux*=1e4 # put flux in units of nu/m^2/GeV/POT - print(E,flux,file=fout) - return output_flux_file \ No newline at end of file diff --git a/resources/Fluxes/NUMI/NUMI-v1.0/flux.py b/resources/Fluxes/NUMI/NUMI-v1.0/flux.py new file mode 100644 index 000000000..ca54fc77f --- /dev/null +++ b/resources/Fluxes/NUMI/NUMI-v1.0/flux.py @@ -0,0 +1,48 @@ +import os +import siren + + +def load_flux(tag=None, min_energy=None, max_energy=None, physically_normalized=True): + ''' + Accepts the following tags: + {FHC,RHC}_{LE,ME}_{nue,nuebar,numu,numubar} + ''' + + if tag is None: + raise TypeError("\"tag\" is a required argument") + try: + tag = str(tag) + except: + raise RuntimeError("\"tag\" must convert to a str") + if min_energy is None != max_energy is None: + raise RuntimeError("Neither or both \"min_energy\" and \"max_energy\" must be provided") + has_energy_range = min_energy is not None + + mode, energy, particle = tag.split("_") + if mode not in ["FHC","RHC"]: + raise ValueError("%s beam mode specified in tag %s is not valid"%(mode,tag)) + if energy not in ["LE","ME"]: + raise ValueError("%s energy mode specified in tag %s is not valid"%(energy,tag)) + if particle not in ["nue","numu","nuebar","numubar"]: + raise ValueError("%s particle specified in tag %s is not valid"%(particle,tag)) + + abs_flux_dir = os.path.dirname(__file__) + input_flux_file = os.path.join(abs_flux_dir, + "NUMI_%s_%s.dat" % (mode, energy)) + + all_lines = open(input_flux_file, "r").readlines() + headers = all_lines[0].strip().split() + data = [line.strip().split() for line in all_lines[1:]] + pid = headers.index(particle) + e_idx = 0 + + energies = [float(row[e_idx]) for row in data] + flux = [float(row[pid]) * 1e4 for row in data] # put flux in units of nu/m^2/GeV/POT + + table = None + if has_energy_range: + table = siren.distributions.TabulatedFluxDistribution(min_energy, max_energy, energies, flux, physically_normalized) + else: + table = siren.distributions.TabulatedFluxDistribution(energies, flux, physically_normalized) + + return table From fa04cb273e6a214a20231040d899a41f6ce8d72e Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 5 Jun 2024 04:01:37 -0500 Subject: [PATCH 02/85] __repr__ and __str__ for InteractionSignature --- .../private/pybindings/dataclasses.cxx | 31 ++++++++++++++----- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/projects/dataclasses/private/pybindings/dataclasses.cxx b/projects/dataclasses/private/pybindings/dataclasses.cxx index a3a74f06f..e328b280f 100644 --- a/projects/dataclasses/private/pybindings/dataclasses.cxx +++ b/projects/dataclasses/private/pybindings/dataclasses.cxx @@ -33,17 +33,32 @@ PYBIND11_MODULE(dataclasses,m) { .def_readwrite("helicity",&Particle::helicity) .def("GenerateID",&Particle::GenerateID); - enum_(particle, "ParticleType", arithmetic()) + enum_(particle, "ParticleType", arithmetic()) #define X(a, b) .value( #a , ParticleType:: a ) #include "../../public/SIREN/dataclasses/ParticleTypes.def" #undef X - .export_values(); - - class_>(m, "InteractionSignature") - .def(init<>()) - .def_readwrite("primary_type",&InteractionSignature::primary_type) - .def_readwrite("target_type",&InteractionSignature::target_type) - .def_readwrite("secondary_types",&InteractionSignature::secondary_types); + .export_values(); + + class_>(m, "InteractionSignature") + .def(init<>()) + .def("__str__", [](InteractionSignature const & p) { std::stringstream ss; ss << p; return ss.str(); }) + .def("__repr__", [](InteractionSignature const & s) { + std::stringstream ss; + ss << "InteractionSignature( "; + ss << s.primary_type << " "; + if(s.primary_type == ParticleType::unknown or s.target_type != ParticleType::unknown) { + ss << s.target_type << " "; + } + ss << "-> "; + for(auto const & secondary : s.secondary_types) { + ss << secondary << " "; + } + ss << ")"; + return ss.str(); + }) + .def_readwrite("primary_type",&InteractionSignature::primary_type) + .def_readwrite("target_type",&InteractionSignature::target_type) + .def_readwrite("secondary_types",&InteractionSignature::secondary_types); class_>(m, "PrimaryDistributionRecord") .def(init()) From 322d98798ffa0a48a86be33b625091157a3ff102 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 5 Jun 2024 04:02:03 -0500 Subject: [PATCH 03/85] Whitespace --- python/SIREN_DarkNews.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/python/SIREN_DarkNews.py b/python/SIREN_DarkNews.py index 212366c53..b5c4fb2f5 100644 --- a/python/SIREN_DarkNews.py +++ b/python/SIREN_DarkNews.py @@ -100,9 +100,6 @@ def __init__( self.GenerateCrossSections(use_pickles=use_pickles,**xs_kwargs) self.GenerateDecays(use_pickles=use_pickles) - - - def GenerateCrossSections(self, use_pickles, **kwargs): # Save all unique scattering processes self.cross_sections = [] From 8b5bbcd6d651ceea257a1490939c6c3ac9273b56 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 5 Jun 2024 04:02:43 -0500 Subject: [PATCH 04/85] load_module function --- python/_util.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/python/_util.py b/python/_util.py index 2e3b087f7..5d4dff1ac 100644 --- a/python/_util.py +++ b/python/_util.py @@ -2,6 +2,7 @@ import re import sys import uuid +import pathlib import importlib THIS_DIR = os.path.abspath(os.path.dirname(__file__)) @@ -169,6 +170,22 @@ def has_module(module_name): return True +def load_module(name, path, persist=True): + """Load a module with a specific name and path""" + url = pathlib.Path(os.path.abspath(path)).as_uri() + module_name = f"{name}-{str(uuid.uuid5(uuid.NAMESPACE_URL, url))}" + if module_name in sys.modules: + return module + spec = importlib.util.spec_from_file_location(name, path) + module = importlib.util.module_from_spec(spec) + sys.modules[module_name] = module + spec.loader.exec_module(module_name) + module = sys.modules[module_name] + if not persist: + del sys.modules[module_name] + return module + + _VERSION_PATTERN = r""" v? (?: @@ -556,3 +573,4 @@ def load_flux(model_name, *args, **kwargs): flux_file = flux.load_flux(*args, **kwargs) del sys.modules[module_name] # remove flux directory from the system return flux_file + From ad97eef0f672e9f50c3030c4688cfdfa12cc7618 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 5 Jun 2024 10:19:48 -0500 Subject: [PATCH 05/85] Sketch loading logic for DarkNews --- .../DarkNewsTables/DarkNewsCrossSection.py | 503 ++++++++++++++++++ .../DarkNewsTables/DarkNewsDecay.py | 320 +++++++++++ .../CrossSections/DarkNewsTables/logger.py | 11 + .../CrossSections/DarkNewsTables/processes.py | 342 ++++++++++++ 4 files changed, 1176 insertions(+) create mode 100644 resources/CrossSections/DarkNewsTables/DarkNewsCrossSection.py create mode 100644 resources/CrossSections/DarkNewsTables/DarkNewsDecay.py create mode 100644 resources/CrossSections/DarkNewsTables/logger.py create mode 100644 resources/CrossSections/DarkNewsTables/processes.py diff --git a/resources/CrossSections/DarkNewsTables/DarkNewsCrossSection.py b/resources/CrossSections/DarkNewsTables/DarkNewsCrossSection.py new file mode 100644 index 000000000..688da76ad --- /dev/null +++ b/resources/CrossSections/DarkNewsTables/DarkNewsCrossSection.py @@ -0,0 +1,503 @@ +import os +import numpy as np +import functools +from scipy.interpolate import LinearNDInterpolator, PchipInterpolator + +base_path = os.path.dirname(os.path.abspath(__file__)) +loader_file = os.path.join(base_path, "loader.py") +siren._util.load_module("loader", loader_file) + +# SIREN methods +from siren.interactions import DarkNewsCrossSection +from siren import dataclasses +from siren.dataclasses import Particle + +# DarkNews methods +from DarkNews import phase_space + +# A class representing a single ups_case DarkNews class +# Only handles methods concerning the upscattering part +class PyDarkNewsCrossSection(DarkNewsCrossSection): + def __init__( + self, + ups_case, # DarkNews UpscatteringProcess instance + tolerance=1e-6, # supposed to represent machine epsilon + interp_tolerance=5e-2, # relative interpolation tolerance + always_interpolate=True, # bool whether to always interpolate the total/differential cross section + ): + DarkNewsCrossSection.__init__(self) # C++ constructor + + self.ups_case = ups_case + self.tolerance = tolerance + self.interp_tolerance = interp_tolerance + self.always_interpolate = always_interpolate + + # 2D table in E, sigma + self.total_cross_section_table = np.empty((0, 2), dtype=float) + # 3D table in E, z, dsigma/dQ2 where z = (Q2 - Q2min) / (Q2max - Q2min) + self.differential_cross_section_table = np.empty((0, 3), dtype=float) + + def load_from_table(self, table_dir): + # Make the table directory where will we store cross section tables + table_dir_exists = False + if os.path.exists(table_dir): + # print("Directory '%s' already exists"%table_dir) + table_dir_exists = True + else: + try: + os.makedirs(table_dir, exist_ok=False) + # print("Directory '%s' created successfully" % table_dir) + except OSError as error: + raise RuntimeError("Directory '%s' cannot be created" % table_dir) + + # Look in table dir and check whether total/differential xsec tables exist + if table_dir_exists: + total_xsec_file = os.path.join(table_dir, "total_cross_sections.npy") + if os.path.exists(total_xsec_file): + self.total_cross_section_table = np.load(total_xsec_file) + diff_xsec_file = os.path.join( + table_dir, "differential_cross_sections.npy" + ) + if os.path.exists(diff_xsec_file): + self.differential_cross_section_table = np.load(diff_xsec_file) + + self.configure() + + + # serialization method + def get_representation(self): + return { + "total_cross_section_table": self.total_cross_section_table, + "differential_cross_section_table": self.differential_cross_section_table, + "ups_case": self.ups_case, + "tolerance": self.tolerance, + "interp_tolerance": self.interp_tolerance, + "always_interpolate": self.always_interpolate, + "is_configured": False, + } + + # Configure function to set up member variables + # assumes we have defined the following: + # ups_case, total_cross_section_table, differential_cross_section_table, + # tolerance, interp_tolerance, always_interpolate + # kwargs argument can be used to set any of these + def configure(self, **kwargs): + + for k, v in kwargs.items(): + self.__setattr__(k, v) + + # Define the target particle + # make sure protons are stored as H nuclei + self.target_type = Particle.ParticleType(self.ups_case.nuclear_target.pdgid) + if self.target_type == Particle.ParticleType.PPlus: + self.target_type = Particle.ParticleType.HNucleus + + # Initialize interpolation objects + self.total_cross_section_interpolator = None + self.differential_cross_section_interpolator = None + self._redefine_interpolation_objects(total=True, diff=True) + self.is_configured = True + + # Sorts and redefines scipy interpolation objects + def _redefine_interpolation_objects(self, total=False, diff=False): + if total: + if len(self.total_cross_section_table) <= 1: + return + idxs = np.argsort(self.total_cross_section_table[:, 0]) + self.total_cross_section_table = self.total_cross_section_table[idxs] + self.total_cross_section_interpolator = PchipInterpolator( + self.total_cross_section_table[:, 0], + self.total_cross_section_table[:, 1], + ) + if diff: + if len(self.differential_cross_section_table) <= 1: + return + idxs = np.lexsort( + ( + self.differential_cross_section_table[:, 1], + self.differential_cross_section_table[:, 0], + ) + ) + self.differential_cross_section_table = ( + self.differential_cross_section_table[idxs] + ) + # If we only have two energy points, don't try to construct interpolator + if len(np.unique(self.differential_cross_section_table[:, 0])) <= 2: + return + self.differential_cross_section_interpolator = LinearNDInterpolator( + self.differential_cross_section_table[:, :2], + self.differential_cross_section_table[:, 2], + rescale=True, + ) + + # Check whether we have close-enough entries in the intrepolation tables + def _interpolation_flags(self, inputs, mode): + # + # returns UseSinglePoint,Interpolate,closest_idx + # UseSinglePoint: whether to use a single point in table + # Interpolate: whether to interpolate bewteen different points + # closest_idx: index of closest point in table (for UseSinglePoint) + + # Determine which table we are using + if mode == "total": + interp_table = self.total_cross_section_table + elif mode == "differential": + interp_table = self.differential_cross_section_table + else: + print("Invalid interpolation table mode %s" % mode) + exit(0) + + # first check if we have saved table points already + if len(interp_table) == 0: + return False, False, -1 + + # bools to keep track of whether to use a single point or interpolate + UseSinglePoint = False + Interpolate = True + # order events by the relative difference + rel_diff = np.abs((interp_table[:, :-1] - inputs) / inputs) + rel_diff_length = np.sqrt(np.sum(rel_diff**2, axis=-1)) + closest_idx_abs = np.argmin(rel_diff_length, axis=-1) + # First check whether we have a close-enough single point + if np.all(np.abs(rel_diff[closest_idx_abs]) < self.tolerance): + UseSinglePoint = True + # Ensure we have enough points to interpolate + if len(interp_table) < len(inputs) + 1: + Interpolate = False + # Require that we have at least len(inputs)+1 close points to interpolate + else: + close = np.all(rel_diff < self.interp_tolerance, axis=-1) + if sum(close) < len(inputs) + 1: + Interpolate = False + return UseSinglePoint, Interpolate, closest_idx_abs + + # return entries in interpolation table if we have inputs + def _query_interpolation_table(self, inputs, mode): + # + # returns: + # 0 if we are not close enough to any points in the interpolation table + # otherwise, returns the desired interpolated value + + # First make sure we are configured + self._ensure_configured() + + # Determine which table we are using + if mode == "total": + interp_table = self.total_cross_section_table + interpolator = self.total_cross_section_interpolator + elif mode == "differential": + interp_table = self.differential_cross_section_table + interpolator = self.differential_cross_section_interpolator + else: + print("Invalid interpolation table mode %s" % mode) + exit(0) + + if self.always_interpolate: + # check if energy is within table range + + if interpolator is None or inputs[0] > interp_table[-1, 0]: + print( + "Requested interpolation at %2.2f GeV. Either this is above the table boundary or the interpolator doesn't yet exist. Filling %s table" + % (inputs[0], mode) + ) + n = self.FillInterpolationTables( + total=(mode == "total"), + diff=(mode == "differential"), + Emax=(1 + self.interp_tolerance) * inputs[0], + ) + print("Added %d points" % n) + if mode == "total": + interpolator = self.total_cross_section_interpolator + elif mode == "differential": + interpolator = self.differential_cross_section_interpolator + elif inputs[0] < interp_table[0, 0]: + print( + "Requested interpolation at %2.2f GeV below table boundary. Requring calculation" + % inputs[0] + ) + return 0 + val = max(0, interpolator(inputs)) + if val < 0: + print( + "WARNING: negative interpolated value for %s-%s %s cross section at," + % ( + self.ups_case.nuclear_target.name, + self.ups_case.scattering_regime, + mode, + ), + inputs, + ) + return val + + UseSinglePoint, Interpolate, closest_idx = self._interpolation_flags( + inputs, mode + ) + + if UseSinglePoint: + if closest_idx < 0: + print( + "Trying to use a single table point, but no closest idx found. Exiting..." + ) + exit(0) + return interp_table[closest_idx, -1] + elif Interpolate: + return interpolator(inputs) + else: + return -1 + + def FillTableAtEnergy(self, E, total=True, diff=True, factor=0.8): + num_added_points = 0 + if total: + xsec = self.ups_case.total_xsec(E) + self.total_cross_section_table = np.append( + self.total_cross_section_table, [[E, xsec]], axis=0 + ) + num_added_points += 1 + if diff: + interaction = dataclasses.InteractionRecord() + interaction.signature.primary_type = self.GetPossiblePrimaries()[ + 0 + ] # only one primary + interaction.signature.target_type = self.GetPossibleTargets()[ + 0 + ] # only one target + interaction.target_mass = self.ups_case.MA + interaction.primary_momentum = [E, 0, 0, 0] + zmin, zmax = self.tolerance, 1 + Q2min = self.Q2Min(interaction) + Q2max = self.Q2Max(interaction) + z = zmin + while z < zmax: + Q2 = Q2min + z * (Q2max - Q2min) + dxsec = self.ups_case.diff_xsec_Q2(E, Q2).item() + self.differential_cross_section_table = np.append( + self.differential_cross_section_table, + [[E, z, dxsec]], + axis=0, + ) + num_added_points += 1 + z *= 1 + factor * self.interp_tolerance + self._redefine_interpolation_objects(total=total, diff=diff) + return num_added_points + + # Fills the total and differential cross section tables within interp_tolerance + def FillInterpolationTables(self, total=True, diff=True, factor=0.8, Emax=None): + increment_factor = 0.5 * factor * self.interp_tolerance + Emin = (1.0 + self.tolerance) * self.ups_case.Ethreshold + if Emax is None: + if ( + len(self.total_cross_section_table) + + len(self.differential_cross_section_table) + ) <= 0: + return 0 + Emax = max( + np.max([0] + list(self.total_cross_section_table[:, 0])), + np.max([0] + list(self.differential_cross_section_table[:, 0])), + ) + num_added_points = 0 + E = Emin + E_existing_total = np.unique(self.total_cross_section_table[:, 0]) + E_existing_diff = np.unique(self.differential_cross_section_table[:, 0]) + while E < Emax: + # sample more coarsely past 1.5*threshold + if E > 1.5 * self.ups_case.Ethreshold: + increment_factor = factor * self.interp_tolerance + n = self.FillTableAtEnergy( + E, + total=(total and (E not in E_existing_total)), + diff=(diff and (E not in E_existing_diff)), + factor=factor, + ) + num_added_points += n + E *= 1 + increment_factor + self._redefine_interpolation_objects(total=total, diff=diff) + return num_added_points + + # Saves the tables for the scipy interpolation objects + def SaveInterpolationTables(self, table_dir, total=True, diff=True): + if total: + self._redefine_interpolation_objects(total=True) + with open( + os.path.join(table_dir, "total_cross_sections.npy"), "wb" + ) as f: + np.save(f, self.total_cross_section_table) + if diff: + self._redefine_interpolation_objects(diff=True) + with open( + os.path.join(table_dir, "differential_cross_sections.npy"), "wb" + ) as f: + np.save(f, self.differential_cross_section_table) + + def GetPossiblePrimaries(self): + return [Particle.ParticleType(self.ups_case.nu_projectile.pdgid)] + + def _ensure_configured(self): + if not self.is_configured: + self.configure() + + def GetPossibleTargetsFromPrimary(self, primary_type): + self._ensure_configured() + if Particle.ParticleType(self.ups_case.nu_projectile.pdgid) == primary_type: + return [self.target_type] + return [] + + def GetPossibleTargets(self): + self._ensure_configured() + return [self.target_type] + + def GetPossibleSignatures(self): + self._ensure_configured() + signature = dataclasses.InteractionSignature() + signature.primary_type = Particle.ParticleType( + self.ups_case.nu_projectile.pdgid + ) + signature.target_type = self.target_type + signature.secondary_types = [] + signature.secondary_types.append( + Particle.ParticleType(self.ups_case.nu_upscattered.pdgid) + ) + signature.secondary_types.append(self.target_type) + return [signature] + + def GetPossibleSignaturesFromParents(self, primary_type, target_type): + if ( + Particle.ParticleType(self.ups_case.nu_projectile.pdgid) == primary_type + ) and ((self.target_type == target_type)): + signature = dataclasses.InteractionSignature() + signature.primary_type = Particle.ParticleType( + self.ups_case.nu_projectile.pdgid + ) + signature.target_type = self.target_type + secondary_types = [] + secondary_types.append( + Particle.ParticleType(self.ups_case.nu_upscattered.pdgid) + ) + secondary_types.append( + Particle.ParticleType(self.ups_case.nuclear_target.pdgid) + ) + signature.secondary_types = secondary_types + return [signature] + return [] + + def DifferentialCrossSection(self, arg1, target=None, energy=None, Q2=None): + if type(arg1) == dataclasses.InteractionRecord: + interaction = arg1 + # Calculate Q2 assuming we are in the target rest frame + m1sq = interaction.primary_momentum[0] ** 2 - np.sum( + [p**2 for p in interaction.primary_momentum[1:]] + ) + m3sq = interaction.secondary_momenta[0][0] ** 2 - np.sum( + [p**2 for p in interaction.secondary_momenta[0][1:]] + ) + p1p3 = interaction.primary_momentum[0] * interaction.secondary_momenta[0][ + 0 + ] - np.sum( + p1 * p3 + for p1, p3 in zip( + interaction.primary_momentum[1:], + interaction.secondary_momenta[0][1:], + ) + ) + Q2 = -(m1sq + m3sq - 2 * p1p3) + energy = interaction.primary_momentum[0] + else: + primary = arg1 + interaction = dataclasses.InteractionRecord() + interaction.signature.primary_type = primary + interaction.signature.target_type = target + interaction.primary_momentum = [energy, 0, 0, 0] + interaction.target_mass = self.ups_case.MA + if interaction.signature.primary_type != Particle.ParticleType( + self.ups_case.nu_projectile.pdgid + ): + return 0 + if interaction.primary_momentum[0] < self.InteractionThreshold(interaction): + return 0 + Q2min = self.Q2Min(interaction) + Q2max = self.Q2Max(interaction) + if Q2 < Q2min or Q2 > Q2max: + return 0 + z = (Q2 - Q2min) / (Q2max - Q2min) + + if self.always_interpolate: + # Check if we can interpolate + val = self._query_interpolation_table([energy, z], mode="differential") + if val >= 0: + # we have recovered the differential cross section from the interpolation table + return val + + # If we have reached this block, we must compute the differential cross section using DarkNews + dxsec = self.ups_case.diff_xsec_Q2(energy, Q2).item() + return dxsec + + def TargetMass(self, target_type): + target_mass = self.ups_case.MA + return target_mass + + def SecondaryMasses(self, secondary_types): + secondary_masses = [] + secondary_masses.append(self.ups_case.m_ups) + secondary_masses.append(self.ups_case.MA) + return secondary_masses + + def SecondaryHelicities(self, record): + secondary_helicities = [] + secondary_helicities.append( + self.ups_case.h_upscattered * record.primary_helicity + ) + secondary_helicities.append(record.target_helicity) + self.h_ups = self.ups_case.m_ups + self.h_target = self.ups_case.MA + return secondary_helicities + + def TotalCrossSection(self, arg1, energy=None, target=None): + # Handle overloaded arguments + if type(arg1) == dataclasses.InteractionRecord: + primary = arg1.signature.primary_type + energy = arg1.primary_momentum[0] + target = arg1.signature.target_type + elif energy is not None and target is not None: + primary = arg1 + else: + print("Incorrect function call to TotalCrossSection!") + exit(0) + if int(primary) != self.ups_case.nu_projectile: + return 0 + interaction = dataclasses.InteractionRecord() + interaction.signature.primary_type = primary + interaction.signature.target_type = target + interaction.primary_momentum[0] = energy + if energy < self.InteractionThreshold(interaction): + # print("Python: energy %2.2f < self.InteractionThreshold(interaction) %2.2f"%(energy,self.InteractionThreshold(interaction))) + return 0 + + # Check if we can interpolate + val = self._query_interpolation_table([energy], mode="total") + if val >= 0: + # we have recovered the cross section from the interpolation table + return val + + # If we have reached this block, we must compute the cross section using DarkNews + xsec = self.ups_case.total_xsec(energy) + self.total_cross_section_table = np.append( + self.total_cross_section_table, [[energy, xsec]], axis=0 + ) + self._redefine_interpolation_objects(total=True) + return xsec + + def InteractionThreshold(self, interaction): + return self.ups_case.Ethreshold + + def Q2Min(self, interaction): + return phase_space.upscattering_Q2min( + interaction.primary_momentum[0], + self.ups_case.m_ups, + self.ups_case.MA, + ) + + def Q2Max(self, interaction): + return phase_space.upscattering_Q2max( + interaction.primary_momentum[0], + self.ups_case.m_ups, + self.ups_case.MA, + ) diff --git a/resources/CrossSections/DarkNewsTables/DarkNewsDecay.py b/resources/CrossSections/DarkNewsTables/DarkNewsDecay.py new file mode 100644 index 000000000..5a3b37125 --- /dev/null +++ b/resources/CrossSections/DarkNewsTables/DarkNewsDecay.py @@ -0,0 +1,320 @@ +import os +import numpy as np +import functools + +base_path = os.path.dirname(os.path.abspath(__file__)) +loader_file = os.path.join(base_path, "loader.py") +siren._util.load_module("loader", loader_file) + +# SIREN methods +from siren.interactions import DarkNewsDecay +from siren import dataclasses +from siren.dataclasses import Particle + +# A class representing a single decay_case DarkNews class +# Only handles methods concerning the decay part +class PyDarkNewsDecay(DarkNewsDecay): + def __init__(self, dec_case): + DarkNewsDecay.__init__(self) # C++ constructor + self.dec_case = dec_case + + # Some variables for storing the decay phase space integrator + self.decay_integrator = None + self.decay_norm = None + self.PS_samples = None + self.PS_weights = None + self.PS_weights_CDF = None + self.total_width = None + + def load_from_table(self, table_dir): + if table_dir is None: + print( + "No table_dir specified; will sample from new VEGAS integrator for each decay" + ) + print("WARNING: this will siginficantly slow down event generation") + return + + # Make the table directory where will we store cross section integrators + table_dir_exists = False + if os.path.exists(table_dir): + # print("Directory '%s' already exists"%table_dir) + table_dir_exists = True + else: + try: + os.makedirs(table_dir, exist_ok=False) + print("Directory '%s' created successfully" % table_dir) + except OSError as error: + print("Directory '%s' cannot be created" % table_dir) + exit(0) + + # Try to find the decay integrator + int_file = os.path.join(table_dir, "decay_integrator.pkl") + if os.path.isfile(int_file): + with open(int_file, "rb") as ifile: + self.decay_integrator = pickle.load(ifile) + # Try to find the normalization information + norm_file = os.path.join(table_dir, "decay_norm.json") + if os.path.isfile(norm_file): + with open( + norm_file, + ) as nfile: + self.decay_norm = json.load(nfile) + + + # serialization method + def get_representation(self): + return {"decay_integrator":self.decay_integrator, + "decay_norm":self.decay_norm, + "dec_case":self.dec_case, + "PS_samples":self.PS_samples, + "PS_weights":self.PS_weights, + "PS_weights_CDF":self.PS_weights_CDF, + "total_width":self.total_width, + } + + def SetIntegratorAndNorm(self, decay_norm, decay_integrator): + self.decay_norm = decay_norm + self.decay_integrator = decay_integrator + + def GetPossibleSignatures(self): + signature = dataclasses.InteractionSignature() + signature.primary_type = Particle.ParticleType(self.dec_case.nu_parent.pdgid) + signature.target_type = Particle.ParticleType.Decay + secondary_types = [] + secondary_types.append(Particle.ParticleType(self.dec_case.nu_daughter.pdgid)) + for secondary in self.dec_case.secondaries: + secondary_types.append(Particle.ParticleType(secondary.pdgid)) + signature.secondary_types = secondary_types + return [signature] + + def GetPossibleSignaturesFromParent(self, primary_type): + if Particle.ParticleType(self.dec_case.nu_parent.pdgid) == primary_type: + signature = dataclasses.InteractionSignature() + signature.primary_type = Particle.ParticleType( + self.dec_case.nu_parent.pdgid + ) + signature.target_type = Particle.ParticleType.Decay + secondary_types = [] + secondary_types.append( + Particle.ParticleType(self.dec_case.nu_daughter.pdgid) + ) + for secondary in self.dec_case.secondaries: + secondary_types.append(Particle.ParticleType(secondary.pdgid)) + signature.secondary_types = secondary_types + return [signature] + return [] + + def DifferentialDecayWidth(self, record): + # Momentum variables of HNL necessary for calculating decay phase space + PN = np.array(record.primary_momentum) + + if type(self.dec_case) == FermionSinglePhotonDecay: + gamma_idx = 0 + for secondary in record.signature.secondary_types: + if secondary == dataclasses.Particle.ParticleType.Gamma: + break + gamma_idx += 1 + if gamma_idx >= len(record.signature.secondary_types): + print("No gamma found in the list of secondaries!") + exit(0) + + Pgamma = np.array(record.secondary_momenta[gamma_idx]) + momenta = np.expand_dims(PN, 0), np.expand_dims(Pgamma, 0) + + elif type(self.dec_case) == FermionDileptonDecay: + lepminus_idx = -1 + lepplus_idx = -1 + nu_idx = -1 + for idx, secondary in enumerate(record.signature.secondary_types): + if secondary in [ + dataclasses.Particle.ParticleType.EMinus, + dataclasses.Particle.ParticleType.MuMinus, + dataclasses.Particle.ParticleType.TauMinus, + ]: + lepminus_idx = idx + elif secondary in [ + dataclasses.Particle.ParticleType.EPlus, + dataclasses.Particle.ParticleType.MuPlus, + dataclasses.Particle.ParticleType.TauPlus, + ]: + lepplus_idx = idx + else: + nu_idx = idx + if -1 in [lepminus_idx, lepplus_idx, nu_idx]: + print("Couldn't find two leptons and a neutrino in the final state!") + exit(0) + Pnu = np.array(record.secondary_momenta[nu_idx]) + Plepminus = np.array(record.secondary_momenta[lepminus_idx]) + Plepplus = np.array(record.secondary_momenta[lepplus_idx]) + momenta = ( + np.expand_dims(PN, 0), + np.expand_dims(Plepminus, 0), + np.expand_dims(Plepplus, 0), + np.expand_dims(Pnu, 0), + ) + else: + print("%s is not a valid decay class type!" % type(self.dec_case)) + exit(0) + return self.dec_case.differential_width(momenta) + + def TotalDecayWidth(self, arg1): + if type(arg1) == dataclasses.InteractionRecord: + primary = arg1.signature.primary_type + elif type(arg1) == dataclasses.Particle.ParticleType: + primary = arg1 + else: + print("Incorrect function call to TotalDecayWidth!") + exit(0) + if int(primary) != self.dec_case.nu_parent: + return 0 + if self.total_width is None: + # Need to set the total width + if type(self.dec_case) == FermionDileptonDecay and ( + self.dec_case.vector_off_shell and self.dec_case.scalar_off_shell + ): + # total width calculation requires evaluating an integral + if self.decay_integrator is None or self.decay_norm is None: + # We need to initialize a new VEGAS integrator in DarkNews + self.total_width, dec_norm, dec_integrator = self.dec_case.total_width( + return_norm=True, return_dec=True + ) + self.SetIntegratorAndNorm(dec_norm, dec_integrator) + else: + self.total_width = ( + self.decay_integrator["diff_decay_rate_0"].mean + * self.decay_norm["diff_decay_rate_0"] + ) + else: + self.total_width = self.dec_case.total_width() + return self.total_width + + def TotalDecayWidthForFinalState(self, record): + sig = self.GetPossibleSignatures()[0] + if ( + (record.signature.primary_type != sig.primary_type) + or (record.signature.target_type != sig.target_type) + or (len(record.signature.secondary_types) != len(sig.secondary_types)) + or ( + np.any( + [ + record.signature.secondary_types[i] != sig.secondary_types[i] + for i in range(len(sig.secondary_types)) + ] + ) + ) + ): + return 0 + ret = self.dec_case.total_width() + return ret + + def DensityVariables(self): + if type(self.dec_case) == FermionSinglePhotonDecay: + return "cost" + elif type(self.dec_case) == FermionDileptonDecay: + if self.dec_case.vector_on_shell and self.dec_case.scalar_on_shell: + print("Can't have both the scalar and vector on shell") + exit(0) + elif (self.dec_case.vector_on_shell and self.dec_case.scalar_off_shell) or ( + self.dec_case.vector_off_shell and self.dec_case.scalar_on_shell + ): + return "cost" + elif self.dec_case.vector_off_shell and self.dec_case.scalar_off_shell: + return "t,u,c3,phi34" + else: + print("%s is not a valid decay class type!" % type(self.dec_case)) + exit(0) + return "" + + def GetPSSample(self, random): + # Make the PS weight CDF if that hasn't been done + if self.PS_weights_CDF is None: + self.PS_weights_CDF = np.cumsum(self.PS_weights) + + # Random number to determine + x = random.Uniform(0, self.PS_weights_CDF[-1]) + + # find first instance of a CDF entry greater than x + PSidx = np.argmax(x - self.PS_weights_CDF <= 0) + return self.PS_samples[:, PSidx] + + def SampleRecordFromDarkNews(self, record, random): + # First, make sure we have PS samples and weights + if self.PS_samples is None or self.PS_weights is None: + # We need to generate new PS samples + if self.decay_integrator is None or self.decay_norm is None: + # We need to initialize a new VEGAS integrator in DarkNews + (self.PS_samples, PS_weights_dict), dec_norm, dec_integrator = self.dec_case.SamplePS( + return_norm=True, return_dec=True + ) + self.PS_weights = PS_weights_dict["diff_decay_rate_0"] + self.SetIntegratorAndNorm(dec_norm, dec_integrator) + else: + # We already have an integrator, we just need new PS samples + self.PS_samples, PS_weights_dict = self.dec_case.SamplePS( + existing_integrator=self.decay_integrator + ) + self.PS_weights = PS_weights_dict["diff_decay_rate_0"] + + # Now we must sample an PS point on the hypercube + PS = self.GetPSSample(random) + + # Find the four-momenta associated with this point + # Expand dims required to call DarkNews function on signle sample + four_momenta = get_decay_momenta_from_vegas_samples( + np.expand_dims(PS, 0), + self.dec_case, + np.expand_dims(np.array(record.primary_momentum), 0), + ) + + secondaries = record.GetSecondaryParticleRecords() + + if type(self.dec_case) == FermionSinglePhotonDecay: + gamma_idx = 0 + for secondary in record.signature.secondary_types: + if secondary == dataclasses.Particle.ParticleType.Gamma: + break + gamma_idx += 1 + if gamma_idx >= len(record.signature.secondary_types): + print("No gamma found in the list of secondaries!") + exit(0) + nu_idx = 1 - gamma_idx + secondaries[gamma_idx].four_momentum = np.squeeze(four_momenta["P_decay_photon"]) + secondaries[gamma_idx].mass = 0 + secondaries[nu_idx].four_momentum = np.squeeze(four_momenta["P_decay_N_daughter"]) + secondaries[nu_idx].mass = 0 + + elif type(self.dec_case) == FermionDileptonDecay: + lepminus_idx = -1 + lepplus_idx = -1 + nu_idx = -1 + for idx, secondary in enumerate(record.signature.secondary_types): + if secondary in [ + dataclasses.Particle.ParticleType.EMinus, + dataclasses.Particle.ParticleType.MuMinus, + dataclasses.Particle.ParticleType.TauMinus, + ]: + lepminus_idx = idx + elif secondary in [ + dataclasses.Particle.ParticleType.EPlus, + dataclasses.Particle.ParticleType.MuPlus, + dataclasses.Particle.ParticleType.TauPlus, + ]: + lepplus_idx = idx + else: + nu_idx = idx + if -1 in [lepminus_idx, lepplus_idx, nu_idx]: + print([lepminus_idx, lepplus_idx, nu_idx]) + print(record.signature.secondary_types) + print("Couldn't find two leptons and a neutrino in the final state!") + exit(0) + secondaries[lepminus_idx].four_momentum = ( + np.squeeze(four_momenta["P_decay_ell_minus"]) + ) + secondaries[lepplus_idx].four_momentum = ( + np.squeeze(four_momenta["P_decay_ell_plus"]) + ) + secondaries[nu_idx].four_momentum = ( + np.squeeze(four_momenta["P_decay_N_daughter"]) + ) + return record + diff --git a/resources/CrossSections/DarkNewsTables/logger.py b/resources/CrossSections/DarkNewsTables/logger.py new file mode 100644 index 000000000..e8ebef6f6 --- /dev/null +++ b/resources/CrossSections/DarkNewsTables/logger.py @@ -0,0 +1,11 @@ +# Monkey patch DarkNews logger to hide printouts + +from DarkNews.ModelContainer import ModelContainer +ModelContainer_configure_logger = ModelContainer.configure_logger + +@functools.wraps(ModelContainer.configure_logger) +def suppress_info(self, logger, loglevel="INFO", prettyprinter=None, logfile=None, verbose=False): + return ModelContainer_configure_logger(self, logger, loglevel="WARNING", prettyprinter=prettyprinter, logfile=logfile, verbose=verbose) + +ModelContainer.configure_logger = suppress_info + diff --git a/resources/CrossSections/DarkNewsTables/processes.py b/resources/CrossSections/DarkNewsTables/processes.py new file mode 100644 index 000000000..c26e0aad7 --- /dev/null +++ b/resources/CrossSections/DarkNewsTables/processes.py @@ -0,0 +1,342 @@ +import os +import siren + +base_path = os.path.dirname(os.path.abspath(__file__)) +loader_file = os.path.join(base_path, "loader.py") +siren._util.load_module("loader", loader_file) + +from DarkNews.ModelContainer import ModelContainer + +xs_path = siren.utilities.get_cross_section_model_path( + f"DarkNewsTables-v{siren.utilities.darknews_version()}", must_exist=False +) + +def GetDetectorModelTargets(detector_model): + """ + Determines the targets that exist inside the detector model + :return: lists of targets and strings + :rtype: (list, list) + """ + count = 0 + targets = [] + target_strs = [] + while detector_model.Materials.HasMaterial(count): + for target in detector_model.Materials.GetMaterialTargets(count): + if target not in targets: + targets.append(target) + if str(target).find("Nucleus") == -1: + continue + else: + target_str = str(target)[ + str(target).find("Type") + 5 : str(target).find("Nucleus") + ] + if target_str == "H": + target_str = "H1" + if target_str not in target_strs: + target_strs.append(target_str) + count += 1 + return targets, target_strs + + +def load_cross_section( + model_container, + upscattering_key, + tolerance=1e-6, + interp_tolerance=5e-2, + always_interpolate=True, +): + if upscattering_key not in model_container.ups_cases: + raise KeyError( + f'Upscattering key "{upscattering_key}" not present in model_container.ups_cases' + ) + upscattering_model = model_container.ups_cases[upscattering_key] + return PyDarkNewsCrossSection( + upscattering_model, + tolerance=tolerance, + interp_tolerance=interp_tolerance, + always_interpolate=always_interpolate, + ) + + +def load_cross_section_from_table( + model_container, + upscattering_key, + table_dir, + tolerance=1e-6, + interp_tolerance=5e-2, + always_interpolate=True, +): + subdir = "_".join(["CrossSection"] + [str(x) for x in upscattering_key]) + table_subdir = os.path.join(table_dir, subdir) + + cross_section = load_cross_section( + model_container, + upscattering_key, + tolerance=tolerance, + interp_tolerance=interp_tolerance, + always_interpolate=always_interpolate, + ) + cross_section.load_from_table(table_subdir) + return cross_section + + +def load_cross_section_from_pickle( + upscattering_key, + table_dir, + tolerance=1e-6, + interp_tolerance=5e-2, + always_interpolate=True, +): + subdir = "_".join(["CrossSection"] + [str(x) for x in upscattering_key]) + table_subdir = os.path.join(table_dir, subdir) + fname = os.path.join(table_dir, "xs_object.pkl") + with open(fname, "rb") as f: + xs_obj = pickle.load(f) + xs_obj.configure( + tolerance=tolerance, + interp_tolerance=interp_tolerance, + always_interpolate=always_interpolate, + ) + return xs_obj + + +def attempt_to_load_cross_section( + models, + ups_key, + tabel_dir, + preferences, +): + if len(preferences) == 0: + raise ValueError("preferences must have at least one entry") + + subdir = "_".join(["CrossSection"] + [str(x) for x in ups_key]) + loaded = False + cross_section = None + for p in preferences: + if p == "table": + table_subdir = os.path.join(table_dir, subdir) + if os.path.isdir(table_subdir): + try: + cross_section = append( + load_cross_section_from_table( + models, + ups_key, + table_subdir, + tolerance=tolerance, + interp_tolerance=interp_tolerance, + always_interpolate=always_interpolate, + ) + ) + loaded = True + except Exception as e: + print( + "Encountered exception while loading DN cross section from table" + ) + raise e from None + break + elif p == "pickle": + table_subdir = os.path.join(table_dir, subdir) + if os.path.isdir(table_subdir): + try: + cross_section = append( + load_cross_section_from_pickle( + ups_key, + table_subdir, + tolerance=tolerance, + interp_tolerance=interp_tolerance, + always_interpolate=always_interpolate, + ) + ) + loaded = True + except Exception as e: + print( + "Encountered exception while loading DN cross section from pickle" + ) + raise e from None + break + elif p == "normal": + try: + cross_sections = append( + load_cross_section( + models, + ups_key, + tolerance=tolerance, + interp_tolerance=interp_tolerance, + always_interpolate=always_interpolate, + ) + ) + loaded = True + except Exception as e: + print("Encountered exception while loading DN cross section normally") + raise e from None + break + + if not loaded: + raise RuntimeError("Not able to load DN cross section with any strategy") + return cross_section + + +def load_cross_sections( + model_kwargs, + table_dir=None, + tolerance=1e-6, + interp_tolerance=5e-2, + always_interpolate=True, + preferences=None, +): + if preferences is None: + preferences = ["table", "pickle", "normal"] + + models = ModelContainer(**model_kwargs) + + if table_dir is None: + table_dir = "" + + cross_sections = [] + for ups_key, ups_case in models.ups_cases.items(): + cross_sections.append( + attempt_to_load_cross_section(models, ups_key, table_dir, preferences) + ) + + return cross_sections + + +def load_processes( + primary_type=None, + target_types=None, + fill_tables_at_start=False, + Emax=None, + m4=None, + mu_tr_mu4=None, # GeV^-1 + UD4=0, + Umu4=0, + epsilon=0.0, + gD=0.0, + decay_product="photon", + noHC=True, + HNLtype="dirac", + nuclear_targets=None, + detector_model=None, + tolerance=1e-6, # supposed to represent machine epsilon + interp_tolerance=5e-2, # relative interpolation tolerance + always_interpolate=True, # bool whether to always interpolate the total/differential cross section +): + + if nuclear_targets is None and detector_model is None: + raise ValueError( + 'Either "nuclear_targets" or "detector_model" must be provided' + ) + + if nuclear_targets is None: + nuclear_targets = GetDetectorModelTargets(detector_model)[1] + + base_path = os.path.dirname(os.path.abspath(__file__)) + table_dir = os.path.join(base_path, "Dipole_M%2.2e_mu%2.2e" % (m4, mu_tr_mu4)) + + model_kwargs = { + "m4": m4, + "mu_tr_mu4": mu_tr_mu4, + "UD4": UD4, + "Umu4": Umu4, + "epsilon": epsilon, + "gD": gD, + "decay_product": decay_product, + "noHC": noHC, + } + + cross_sections = load_cross_sections( + model_kwargs, + table_dir=None, + tolerance=tolerance, + interp_tolerance=interp_tolerance, + always_interpolate=always_interpolate, + ) + + if fill_tables_at_start: + if Emax is None: + print( + "WARNING: Cannot fill cross section tables without specifying a maximum energy" + ) + else: + for cross_section in cross_sections: + cross_section.FillInterpolationTables(Emax=Emax) + + # Initialize primary InteractionCollection + # Loop over available cross sections and save those which match primary type + primary_cross_sections = [] + for cross_section in self.DN_processes.cross_sections: + if primary_type == _dataclasses.Particle.ParticleType( + cross_section.ups_case.nu_projectile.pdgid + ): + primary_cross_sections.append(cross_section) + primary_interaction_collection = _interactions.InteractionCollection( + primary_type, primary_cross_sections + ) + + # Initialize secondary processes and define secondary InteractionCollection objects + secondary_decays = {} + # Also keep track of the minimum decay width for defining the position distribution later + self.DN_min_decay_width = np.inf + # Loop over available decays, group by parent type + for decay in self.DN_processes.decays: + secondary_type = _dataclasses.Particle.ParticleType( + decay.dec_case.nu_parent.pdgid + ) + if secondary_type not in secondary_decays.keys(): + secondary_decays[secondary_type] = [] + secondary_decays[secondary_type].append(decay) + total_decay_width = decay.TotalDecayWidth(secondary_type) + if total_decay_width < self.DN_min_decay_width: + self.DN_min_decay_width = total_decay_width + # Now make the list of secondary cross section collections + # Add new secondary injection and physical processes at the same time + secondary_interaction_collections = [] + for secondary_type, decay_list in secondary_decays.items(): + # Define a sedcondary injection distribution + secondary_injection_process = _injection.SecondaryInjectionProcess() + secondary_physical_process = _injection.PhysicalProcess() + secondary_injection_process.primary_type = secondary_type + secondary_physical_process.primary_type = secondary_type + + # Add the secondary position distribution + if self.fid_vol is not None: + secondary_injection_process.AddSecondaryInjectionDistribution( + _distributions.SecondaryBoundedVertexDistribution(self.fid_vol) + ) + else: + secondary_injection_process.AddSecondaryInjectionDistribution( + _distributions.SecondaryPhysicalVertexDistribution() + ) + + self.secondary_injection_processes.append(secondary_injection_process) + self.secondary_physical_processes.append(secondary_physical_process) + + secondary_interaction_collections.append( + _interactions.InteractionCollection(secondary_type, decay_list) + ) + + self.SetInteractions( + primary_interaction_collection, secondary_interaction_collections + ) + + +def GetFiducialVolume(self): + """ + :return: identified fiducial volume for the experiment, None if not found + """ + detector_model_file = _util.get_detector_model_path(self.experiment) + with open(detector_model_file) as file: + fiducial_line = None + detector_line = None + for line in file: + data = line.split() + if len(data) <= 0: + continue + elif data[0] == "fiducial": + fiducial_line = line + elif data[0] == "detector": + detector_line = line + if fiducial_line is None or detector_line is None: + return None + return _detector.DetectorModel.ParseFiducialVolume(fiducial_line, detector_line) + return None From 226746d4506d1ebb1a5d2465dd4cde487cce82d6 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Thu, 6 Jun 2024 08:35:43 -0500 Subject: [PATCH 06/85] Sketch DN decay logic and fix some obvious issues in the DN xs logic --- .../DarkNewsTables/DarkNewsCrossSection.py | 52 +++-- .../DarkNewsTables/DarkNewsDecay.py | 52 ++--- .../CrossSections/DarkNewsTables/processes.py | 182 +++++++++++++++--- 3 files changed, 193 insertions(+), 93 deletions(-) diff --git a/resources/CrossSections/DarkNewsTables/DarkNewsCrossSection.py b/resources/CrossSections/DarkNewsTables/DarkNewsCrossSection.py index 688da76ad..6843972f7 100644 --- a/resources/CrossSections/DarkNewsTables/DarkNewsCrossSection.py +++ b/resources/CrossSections/DarkNewsTables/DarkNewsCrossSection.py @@ -39,30 +39,37 @@ def __init__( def load_from_table(self, table_dir): # Make the table directory where will we store cross section tables - table_dir_exists = False - if os.path.exists(table_dir): - # print("Directory '%s' already exists"%table_dir) - table_dir_exists = True - else: + if not os.path.exists(table_dir): try: os.makedirs(table_dir, exist_ok=False) - # print("Directory '%s' created successfully" % table_dir) except OSError as error: raise RuntimeError("Directory '%s' cannot be created" % table_dir) # Look in table dir and check whether total/differential xsec tables exist - if table_dir_exists: - total_xsec_file = os.path.join(table_dir, "total_cross_sections.npy") - if os.path.exists(total_xsec_file): - self.total_cross_section_table = np.load(total_xsec_file) - diff_xsec_file = os.path.join( - table_dir, "differential_cross_sections.npy" - ) - if os.path.exists(diff_xsec_file): - self.differential_cross_section_table = np.load(diff_xsec_file) + total_xsec_file = os.path.join(table_dir, "total_cross_sections.npy") + if os.path.exists(total_xsec_file): + self.total_cross_section_table = np.load(total_xsec_file) + diff_xsec_file = os.path.join( + table_dir, "differential_cross_sections.npy" + ) + if os.path.exists(diff_xsec_file): + self.differential_cross_section_table = np.load(diff_xsec_file) self.configure() + def save_to_table(self, table_dir, total=True, diff=True): + if total: + self._redefine_interpolation_objects(total=True) + with open( + os.path.join(table_dir, "total_cross_sections.npy"), "wb" + ) as f: + np.save(f, self.total_cross_section_table) + if diff: + self._redefine_interpolation_objects(diff=True) + with open( + os.path.join(table_dir, "differential_cross_sections.npy"), "wb" + ) as f: + np.save(f, self.differential_cross_section_table) # serialization method def get_representation(self): @@ -313,21 +320,6 @@ def FillInterpolationTables(self, total=True, diff=True, factor=0.8, Emax=None): self._redefine_interpolation_objects(total=total, diff=diff) return num_added_points - # Saves the tables for the scipy interpolation objects - def SaveInterpolationTables(self, table_dir, total=True, diff=True): - if total: - self._redefine_interpolation_objects(total=True) - with open( - os.path.join(table_dir, "total_cross_sections.npy"), "wb" - ) as f: - np.save(f, self.total_cross_section_table) - if diff: - self._redefine_interpolation_objects(diff=True) - with open( - os.path.join(table_dir, "differential_cross_sections.npy"), "wb" - ) as f: - np.save(f, self.differential_cross_section_table) - def GetPossiblePrimaries(self): return [Particle.ParticleType(self.ups_case.nu_projectile.pdgid)] diff --git a/resources/CrossSections/DarkNewsTables/DarkNewsDecay.py b/resources/CrossSections/DarkNewsTables/DarkNewsDecay.py index 5a3b37125..c21d947a3 100644 --- a/resources/CrossSections/DarkNewsTables/DarkNewsDecay.py +++ b/resources/CrossSections/DarkNewsTables/DarkNewsDecay.py @@ -27,49 +27,35 @@ def __init__(self, dec_case): self.total_width = None def load_from_table(self, table_dir): - if table_dir is None: - print( - "No table_dir specified; will sample from new VEGAS integrator for each decay" - ) - print("WARNING: this will siginficantly slow down event generation") - return - # Make the table directory where will we store cross section integrators - table_dir_exists = False - if os.path.exists(table_dir): - # print("Directory '%s' already exists"%table_dir) - table_dir_exists = True - else: + if not os.path.exists(table_dir): try: os.makedirs(table_dir, exist_ok=False) - print("Directory '%s' created successfully" % table_dir) except OSError as error: - print("Directory '%s' cannot be created" % table_dir) - exit(0) + raise RuntimeError("Directory '%s' cannot be created" % table_dir) # Try to find the decay integrator - int_file = os.path.join(table_dir, "decay_integrator.pkl") - if os.path.isfile(int_file): - with open(int_file, "rb") as ifile: - self.decay_integrator = pickle.load(ifile) - # Try to find the normalization information - norm_file = os.path.join(table_dir, "decay_norm.json") - if os.path.isfile(norm_file): - with open( - norm_file, - ) as nfile: - self.decay_norm = json.load(nfile) + decay_file = os.path.join(table_dir, "decay.pkl") + if os.path.isfile(decay_file): + with open(decay_file, "rb") as f: + self.decay_norm, self.decay_integrator = pickle.load(f) + def save_to_table(self, table_dir): + with open(os.path.join(table_dir, "decay.pkl") as f: + pickle.dump(f, { + "decay_integrator": self.decay_integrator, + "decay_norm": self.decay_norm + }) # serialization method def get_representation(self): - return {"decay_integrator":self.decay_integrator, - "decay_norm":self.decay_norm, - "dec_case":self.dec_case, - "PS_samples":self.PS_samples, - "PS_weights":self.PS_weights, - "PS_weights_CDF":self.PS_weights_CDF, - "total_width":self.total_width, + return {"decay_integrator": self.decay_integrator, + "decay_norm": self.decay_norm, + "dec_case": self.dec_case, + "PS_samples": self.PS_samples, + "PS_weights": self.PS_weights, + "PS_weights_CDF": self.PS_weights_CDF, + "total_width": self.total_width, } def SetIntegratorAndNorm(self, decay_norm, decay_integrator): diff --git a/resources/CrossSections/DarkNewsTables/processes.py b/resources/CrossSections/DarkNewsTables/processes.py index c26e0aad7..1fbfaf7bb 100644 --- a/resources/CrossSections/DarkNewsTables/processes.py +++ b/resources/CrossSections/DarkNewsTables/processes.py @@ -11,6 +11,7 @@ f"DarkNewsTables-v{siren.utilities.darknews_version()}", must_exist=False ) + def GetDetectorModelTargets(detector_model): """ Determines the targets that exist inside the detector model @@ -117,15 +118,13 @@ def attempt_to_load_cross_section( table_subdir = os.path.join(table_dir, subdir) if os.path.isdir(table_subdir): try: - cross_section = append( - load_cross_section_from_table( - models, - ups_key, - table_subdir, - tolerance=tolerance, - interp_tolerance=interp_tolerance, - always_interpolate=always_interpolate, - ) + cross_section = load_cross_section_from_table( + models, + ups_key, + table_subdir, + tolerance=tolerance, + interp_tolerance=interp_tolerance, + always_interpolate=always_interpolate, ) loaded = True except Exception as e: @@ -138,14 +137,12 @@ def attempt_to_load_cross_section( table_subdir = os.path.join(table_dir, subdir) if os.path.isdir(table_subdir): try: - cross_section = append( - load_cross_section_from_pickle( - ups_key, - table_subdir, - tolerance=tolerance, - interp_tolerance=interp_tolerance, - always_interpolate=always_interpolate, - ) + cross_section = load_cross_section_from_pickle( + ups_key, + table_subdir, + tolerance=tolerance, + interp_tolerance=interp_tolerance, + always_interpolate=always_interpolate, ) loaded = True except Exception as e: @@ -156,14 +153,12 @@ def attempt_to_load_cross_section( break elif p == "normal": try: - cross_sections = append( - load_cross_section( - models, - ups_key, - tolerance=tolerance, - interp_tolerance=interp_tolerance, - always_interpolate=always_interpolate, - ) + cross_sections = load_cross_section( + models, + ups_key, + tolerance=tolerance, + interp_tolerance=interp_tolerance, + always_interpolate=always_interpolate, ) loaded = True except Exception as e: @@ -201,6 +196,125 @@ def load_cross_sections( return cross_sections +def load_decay( + model_container, + decay_key, +): + if decay_key not in model_container.dec_cases: + raise KeyError( + f'Decay key "{decay_key}" not present in model_container.dec_cases' + ) + decay_model = model_container.dec_cases[decay_key] + return PyDarkNewsDecay( + decay_model, + ) + + +def load_decay_from_table( + model_container, + decay_key, + table_dir, +): + subdir = "_".join(["Decay"] + [str(x) for x in decay_key]) + table_subdir = os.path.join(table_dir, subdir) + + decay = load_decay( + model_container, + decay_key, + ) + decay.load_from_table(table_subdir) + return decay + + +def load_decay_from_pickle( + decay_key, + table_dir, +): + subdir = "_".join(["Decay"] + [str(x) for x in decay_key]) + table_subdir = os.path.join(table_dir, subdir) + fname = os.path.join(table_dir, "dec_object.pkl") + with open(fname, "rb") as f: + dec_obj = pickle.load(f) + return dec_obj + + +def attempt_to_load_decay( + models, + dec_key, + tabel_dir, + preferences, +): + if len(preferences) == 0: + raise ValueError("preferences must have at least one entry") + + subdir = "_".join(["Decay"] + [str(x) for x in dec_key]) + loaded = False + decay = None + for p in preferences: + if p == "table": + table_subdir = os.path.join(table_dir, subdir) + if os.path.isdir(table_subdir): + try: + decay = load_decay_from_table( + models, + dec_key, + table_subdir, + ) + loaded = True + except Exception as e: + print("Encountered exception while loading DN decay from table") + raise e from None + break + elif p == "pickle": + table_subdir = os.path.join(table_dir, subdir) + if os.path.isdir(table_subdir): + try: + decay = load_decay_from_pickle( + ups_key, + table_dir, + ) + loaded = True + except Exception as e: + print("Encountered exception while loading DN decay from pickle") + raise e from None + break + elif p == "normal": + try: + decay = load_decay( + models, + dec_key, + ) + loaded = True + except Exception as e: + print("Encountered exception while loading DN decay normally") + raise e from None + break + + if not loaded: + raise RuntimeError("Not able to load DN decay with any strategy") + return decay + + +def load_decays( + model_kwargs, + table_dir=None, + preferences=None, +): + if preferences is None: + preferences = ["table", "pickle", "normal"] + + models = ModelContainer(**model_kwargs) + + if table_dir is None: + table_dir = "" + + decays = [] + for dec_key, dec_case in models.dec_cases.items(): + decays.append(attempt_to_load_decy(models, dec_key, table_dir, preferences)) + + return decays + + def load_processes( primary_type=None, target_types=None, @@ -245,11 +359,19 @@ def load_processes( } cross_sections = load_cross_sections( - model_kwargs, - table_dir=None, - tolerance=tolerance, - interp_tolerance=interp_tolerance, - always_interpolate=always_interpolate, + model_kwargs, + table_dir=None, + tolerance=tolerance, + interp_tolerance=interp_tolerance, + always_interpolate=always_interpolate, + ) + + decays = load_decays( + model_kwargs, + table_dir=None, + tolerance=tolerance, + interp_tolerance=interp_tolerance, + always_interpolate=always_interpolate, ) if fill_tables_at_start: From e88fee6964b2226db043258cdc4e740d547313ee Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Thu, 6 Jun 2024 09:28:58 -0500 Subject: [PATCH 07/85] Return cross sections and decays --- .../CrossSections/DarkNewsTables/processes.py | 120 +++--------------- 1 file changed, 19 insertions(+), 101 deletions(-) diff --git a/resources/CrossSections/DarkNewsTables/processes.py b/resources/CrossSections/DarkNewsTables/processes.py index 1fbfaf7bb..cb4d95edc 100644 --- a/resources/CrossSections/DarkNewsTables/processes.py +++ b/resources/CrossSections/DarkNewsTables/processes.py @@ -172,7 +172,7 @@ def attempt_to_load_cross_section( def load_cross_sections( - model_kwargs, + models, table_dir=None, tolerance=1e-6, interp_tolerance=5e-2, @@ -182,8 +182,6 @@ def load_cross_sections( if preferences is None: preferences = ["table", "pickle", "normal"] - models = ModelContainer(**model_kwargs) - if table_dir is None: table_dir = "" @@ -296,15 +294,13 @@ def attempt_to_load_decay( def load_decays( - model_kwargs, + models, table_dir=None, preferences=None, ): if preferences is None: preferences = ["table", "pickle", "normal"] - models = ModelContainer(**model_kwargs) - if table_dir is None: table_dir = "" @@ -347,33 +343,32 @@ def load_processes( base_path = os.path.dirname(os.path.abspath(__file__)) table_dir = os.path.join(base_path, "Dipole_M%2.2e_mu%2.2e" % (m4, mu_tr_mu4)) - model_kwargs = { - "m4": m4, - "mu_tr_mu4": mu_tr_mu4, - "UD4": UD4, - "Umu4": Umu4, - "epsilon": epsilon, - "gD": gD, - "decay_product": decay_product, - "noHC": noHC, - } + models = ModelContainer( + m4=m4, + mu_tr_mu4=mu_tr_mu4, + UD4=UD4, + Umu4=Umu4, + epsilon=epsilon, + gD=gD, + decay_product=decay_product, + noHC=noHC, + ) cross_sections = load_cross_sections( - model_kwargs, - table_dir=None, + models, + table_dir=table_dir, tolerance=tolerance, interp_tolerance=interp_tolerance, always_interpolate=always_interpolate, ) decays = load_decays( - model_kwargs, - table_dir=None, - tolerance=tolerance, - interp_tolerance=interp_tolerance, - always_interpolate=always_interpolate, + models, + table_dir=table_dir, ) + cross_sections = [xs for xs in cross_sections if len([s for s in xs.GetPossibleSignatures() if s.primary_type == primary_type])>0] + if fill_tables_at_start: if Emax is None: print( @@ -383,82 +378,5 @@ def load_processes( for cross_section in cross_sections: cross_section.FillInterpolationTables(Emax=Emax) - # Initialize primary InteractionCollection - # Loop over available cross sections and save those which match primary type - primary_cross_sections = [] - for cross_section in self.DN_processes.cross_sections: - if primary_type == _dataclasses.Particle.ParticleType( - cross_section.ups_case.nu_projectile.pdgid - ): - primary_cross_sections.append(cross_section) - primary_interaction_collection = _interactions.InteractionCollection( - primary_type, primary_cross_sections - ) - - # Initialize secondary processes and define secondary InteractionCollection objects - secondary_decays = {} - # Also keep track of the minimum decay width for defining the position distribution later - self.DN_min_decay_width = np.inf - # Loop over available decays, group by parent type - for decay in self.DN_processes.decays: - secondary_type = _dataclasses.Particle.ParticleType( - decay.dec_case.nu_parent.pdgid - ) - if secondary_type not in secondary_decays.keys(): - secondary_decays[secondary_type] = [] - secondary_decays[secondary_type].append(decay) - total_decay_width = decay.TotalDecayWidth(secondary_type) - if total_decay_width < self.DN_min_decay_width: - self.DN_min_decay_width = total_decay_width - # Now make the list of secondary cross section collections - # Add new secondary injection and physical processes at the same time - secondary_interaction_collections = [] - for secondary_type, decay_list in secondary_decays.items(): - # Define a sedcondary injection distribution - secondary_injection_process = _injection.SecondaryInjectionProcess() - secondary_physical_process = _injection.PhysicalProcess() - secondary_injection_process.primary_type = secondary_type - secondary_physical_process.primary_type = secondary_type - - # Add the secondary position distribution - if self.fid_vol is not None: - secondary_injection_process.AddSecondaryInjectionDistribution( - _distributions.SecondaryBoundedVertexDistribution(self.fid_vol) - ) - else: - secondary_injection_process.AddSecondaryInjectionDistribution( - _distributions.SecondaryPhysicalVertexDistribution() - ) - - self.secondary_injection_processes.append(secondary_injection_process) - self.secondary_physical_processes.append(secondary_physical_process) - - secondary_interaction_collections.append( - _interactions.InteractionCollection(secondary_type, decay_list) - ) - - self.SetInteractions( - primary_interaction_collection, secondary_interaction_collections - ) - + return cross_sections + decays -def GetFiducialVolume(self): - """ - :return: identified fiducial volume for the experiment, None if not found - """ - detector_model_file = _util.get_detector_model_path(self.experiment) - with open(detector_model_file) as file: - fiducial_line = None - detector_line = None - for line in file: - data = line.split() - if len(data) <= 0: - continue - elif data[0] == "fiducial": - fiducial_line = line - elif data[0] == "detector": - detector_line = line - if fiducial_line is None or detector_line is None: - return None - return _detector.DetectorModel.ParseFiducialVolume(fiducial_line, detector_line) - return None From c19596f7c57ad851129605a6c1a0a198a7db556e Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Thu, 6 Jun 2024 09:47:15 -0500 Subject: [PATCH 08/85] Introspect package version in __init__.py --- python/__init__.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/python/__init__.py b/python/__init__.py index 7f0f00e2d..aec0b49c8 100644 --- a/python/__init__.py +++ b/python/__init__.py @@ -9,6 +9,16 @@ from . import _util +# Intropspect package version +import sys +if sys.version_info >= (3, 8): + from importlib import metadata +else: + import importlib_metadata as metadata +__version__ = metadata.version(__package__) +del sys +del metadata + # set up some public-facing utilities functions utilities.get_resource_package_dir = _util.resource_package_dir utilities.get_detector_model_path = _util.get_detector_model_path From 79f1b345c9542f937f96ef4d7b111adf293e740d Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Thu, 6 Jun 2024 10:05:50 -0500 Subject: [PATCH 09/85] Start writing logic to generically load resources --- python/SIREN_DarkNews.py | 2 +- python/_util.py | 55 +++++++++++++++++++++++++++------------- 2 files changed, 38 insertions(+), 19 deletions(-) diff --git a/python/SIREN_DarkNews.py b/python/SIREN_DarkNews.py index b5c4fb2f5..bedbd7137 100644 --- a/python/SIREN_DarkNews.py +++ b/python/SIREN_DarkNews.py @@ -57,7 +57,7 @@ def __init__( if self.table_dir is None: self.table_dir = os.path.join( resources_dir, - "CrossSections", + "Processes", "DarkNewsTables", datetime.datetime.now().strftime("%Y_%m_%d__%H:%M"), ) diff --git a/python/_util.py b/python/_util.py index 5d4dff1ac..780faee1a 100644 --- a/python/_util.py +++ b/python/_util.py @@ -543,34 +543,53 @@ def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exi return os.path.join(base_dir, model_name, model_file_name) -def get_detector_model_path(model_name, must_exist=True): +def get_detector_model_file_path(model_name, must_exist=True): return _get_model_path(model_name, prefix="Detectors/densities", suffix=".dat", is_file=True, must_exist=must_exist) -def get_material_model_path(model_name, must_exist=True): +def get_material_model_file_path(model_name, must_exist=True): return _get_model_path(model_name, prefix="Detectors/materials", suffix=".dat", is_file=True, must_exist=must_exist) -def get_cross_section_model_path(model_name, must_exist=True): - return _get_model_path(model_name, prefix="CrossSections", is_file=False, must_exist=must_exist) +_resource_folder_by_name = { + "flux": "Fluxes", + "detector": "Detectors", + "processes": "Processes", +} def get_flux_model_path(model_name, must_exist=True): - return _get_model_path(model_name,prefix="Fluxes", is_file=False, must_exist=must_exist) + return _get_model_path(model_name, prefix=_resource_folder_by_name["flux"], is_file=False, must_exist=must_exist) + + +def get_detector_model_path(model_name, must_exist=True): + return _get_model_path(model_name, prefix=_resource_folder_by_name["detector"], is_file=False, must_exist=must_exist) + + +def get_processes_model_path(model_name, must_exist=True): + return _get_model_path(model_name, prefix=_resource_folder_by_name["processes"], is_file=False, must_exist=must_exist) + + +def load_resource(resource_name, resource_type, *args, **kwargs): + folder = _resource_folder_by_name[resource_type] + + abs_dir = _get_model_path(model_name, prefix=folder, is_file=False, must_exist=True) + + fname = os.path.join(abs_flux_dir, f"{resource_name}.py") + assert(os.path.isfile(fname)) + resource_module = load_module(f"siren-{resource_type}-{model_name}", fname, persist=False) + loader = getattr(resource_module, f"load_{resource_name}") + resource = loader(*args, **kwargs) + return resource def load_flux(model_name, *args, **kwargs): - abs_flux_dir = get_flux_model_path(model_name, must_exist=True) - - # require existence of flux.py - flux_file = os.path.join(abs_flux_dir, "flux.py") - assert(os.path.isfile(flux_file)) - spec = importlib.util.spec_from_file_location("flux", flux_file) - flux = importlib.util.module_from_spec(spec) - module_name = f"siren-flux-{model_name}-{str(uuid.uuid4())}" - sys.modules[module_name] = flux - spec.loader.exec_module(flux) - flux_file = flux.load_flux(*args, **kwargs) - del sys.modules[module_name] # remove flux directory from the system - return flux_file + return load_resource("flux", model_name, *args, **kwargs) + + +def load_detector(model_name, *args, **kwargs): + return load_resource("detector", model_name, *args, **kwargs) + +def load_processes(model_name, *args, **kwargs): + return load_resource("processes", model_name, *args, **kwargs) From d5bbec454b2d3e8a95af1932ccabc768f6108cf5 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Tue, 27 Aug 2024 14:05:29 -0600 Subject: [PATCH 10/85] Remove material model path method --- python/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/__init__.py b/python/__init__.py index aec0b49c8..b83d12fbe 100644 --- a/python/__init__.py +++ b/python/__init__.py @@ -22,7 +22,6 @@ # set up some public-facing utilities functions utilities.get_resource_package_dir = _util.resource_package_dir utilities.get_detector_model_path = _util.get_detector_model_path -utilities.get_material_model_path = _util.get_material_model_path utilities.get_cross_section_model_path = _util.get_cross_section_model_path utilities.get_flux_model_path = _util.get_flux_model_path utilities.load_flux = _util.load_flux From c9a09d1f32cfd6538e699348c00cce1408703afb Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 28 Aug 2024 15:48:34 -0600 Subject: [PATCH 11/85] Needs functools --- resources/CrossSections/DarkNewsTables/logger.py | 1 + 1 file changed, 1 insertion(+) diff --git a/resources/CrossSections/DarkNewsTables/logger.py b/resources/CrossSections/DarkNewsTables/logger.py index e8ebef6f6..bdd03c096 100644 --- a/resources/CrossSections/DarkNewsTables/logger.py +++ b/resources/CrossSections/DarkNewsTables/logger.py @@ -1,4 +1,5 @@ # Monkey patch DarkNews logger to hide printouts +import functools from DarkNews.ModelContainer import ModelContainer ModelContainer_configure_logger = ModelContainer.configure_logger From 844e0cde467ba0bfcedc390cdefe09dc804500d7 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 28 Aug 2024 16:01:22 -0600 Subject: [PATCH 12/85] import struct. Properly return the module from sys.modules. Call exec_module with the actual module. Swap the argument order in load_resource. Fix variable names in load_resource. --- python/_util.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/python/_util.py b/python/_util.py index 780faee1a..4a50c531f 100644 --- a/python/_util.py +++ b/python/_util.py @@ -133,6 +133,7 @@ def get_platform(): sys.platform does. The result can be: linux32, linux64, win32, win64, osx32, osx64. Other platforms may be added in the future. """ + import struct # Get platform if sys.platform.startswith("linux"): plat = "linux%i" @@ -175,11 +176,11 @@ def load_module(name, path, persist=True): url = pathlib.Path(os.path.abspath(path)).as_uri() module_name = f"{name}-{str(uuid.uuid5(uuid.NAMESPACE_URL, url))}" if module_name in sys.modules: - return module + return sys.modules[module_name] spec = importlib.util.spec_from_file_location(name, path) module = importlib.util.module_from_spec(spec) sys.modules[module_name] = module - spec.loader.exec_module(module_name) + spec.loader.exec_module(module) module = sys.modules[module_name] if not persist: del sys.modules[module_name] @@ -570,14 +571,14 @@ def get_processes_model_path(model_name, must_exist=True): return _get_model_path(model_name, prefix=_resource_folder_by_name["processes"], is_file=False, must_exist=must_exist) -def load_resource(resource_name, resource_type, *args, **kwargs): +def load_resource(resource_type, resource_name, *args, **kwargs): folder = _resource_folder_by_name[resource_type] - abs_dir = _get_model_path(model_name, prefix=folder, is_file=False, must_exist=True) + abs_dir = _get_model_path(resource_name, prefix=folder, is_file=False, must_exist=True) - fname = os.path.join(abs_flux_dir, f"{resource_name}.py") + fname = os.path.join(abs_dir, f"{resource_name}.py") assert(os.path.isfile(fname)) - resource_module = load_module(f"siren-{resource_type}-{model_name}", fname, persist=False) + resource_module = load_module(f"siren-{resource_type}-{resource_name}", fname, persist=False) loader = getattr(resource_module, f"load_{resource_name}") resource = loader(*args, **kwargs) return resource From 5940daeddb00df8127c3dcc4d338de58983f23b9 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 28 Aug 2024 16:01:52 -0600 Subject: [PATCH 13/85] docstrings, type hints, and variable names --- .../CrossSections/DarkNewsTables/processes.py | 177 +++++++++++++----- 1 file changed, 133 insertions(+), 44 deletions(-) diff --git a/resources/CrossSections/DarkNewsTables/processes.py b/resources/CrossSections/DarkNewsTables/processes.py index cb4d95edc..1d0832fcb 100644 --- a/resources/CrossSections/DarkNewsTables/processes.py +++ b/resources/CrossSections/DarkNewsTables/processes.py @@ -1,4 +1,5 @@ import os +from typing import Tuple, List, Any, Optional import siren base_path = os.path.dirname(os.path.abspath(__file__)) @@ -7,16 +8,30 @@ from DarkNews.ModelContainer import ModelContainer +# Import PyDarkNewsDecay and PyDarkNewsCrossSection +decay_file = os.path.join(base_path, "DarkNewsDecay.py") +cross_section_file = os.path.join(base_path, "DarkNewsCrossSection.py") +siren._util.load_module("DarkNewsDecay", decay_file) +siren._util.load_module("DarkNewsCrossSection", cross_section_file) + +from DarkNewsDecay import PyDarkNewsDecay +from DarkNewsCrossSection import PyDarkNewsCrossSection + xs_path = siren.utilities.get_cross_section_model_path( f"DarkNewsTables-v{siren.utilities.darknews_version()}", must_exist=False ) - -def GetDetectorModelTargets(detector_model): +def GetDetectorModelTargets(detector_model: siren.detector.DetectorModel) -> Tuple[List[siren.dataclasses.Particle.ParticleType], List[str]]: """ - Determines the targets that exist inside the detector model - :return: lists of targets and strings - :rtype: (list, list) + Determines the targets that exist inside the detector model. + + Args: + detector_model (siren.detector.DetectorModel): The detector model object. + + Returns: + Tuple[List[siren.dataclasses.Particle.ParticleType], List[str]]: A tuple containing two lists: + - List of target objects (ParticleType) + - List of target strings """ count = 0 targets = [] @@ -40,12 +55,28 @@ def GetDetectorModelTargets(detector_model): def load_cross_section( - model_container, - upscattering_key, - tolerance=1e-6, - interp_tolerance=5e-2, - always_interpolate=True, -): + model_container: ModelContainer, + upscattering_key: Any, + tolerance: float = 1e-6, + interp_tolerance: float = 5e-2, + always_interpolate: bool = True, +) -> PyDarkNewsCrossSection: + """ + Loads a cross-section object based on the given parameters. + + Args: + model_container (ModelContainer): The model container object. + upscattering_key (Any): The key for the upscattering model. + tolerance (float, optional): Tolerance for calculations. Defaults to 1e-6. + interp_tolerance (float, optional): Interpolation tolerance. Defaults to 5e-2. + always_interpolate (bool, optional): Whether to always interpolate. Defaults to True. + + Returns: + PyDarkNewsCrossSection: The loaded cross-section object. + + Raises: + KeyError: If the upscattering key is not present in model_container.ups_cases. + """ if upscattering_key not in model_container.ups_cases: raise KeyError( f'Upscattering key "{upscattering_key}" not present in model_container.ups_cases' @@ -88,6 +119,7 @@ def load_cross_section_from_pickle( interp_tolerance=5e-2, always_interpolate=True, ): + import pickle subdir = "_".join(["CrossSection"] + [str(x) for x in upscattering_key]) table_subdir = os.path.join(table_dir, subdir) fname = os.path.join(table_dir, "xs_object.pkl") @@ -102,11 +134,33 @@ def load_cross_section_from_pickle( def attempt_to_load_cross_section( - models, - ups_key, - tabel_dir, - preferences, -): + models: ModelContainer, + ups_key: Any, + table_dir: str, + preferences: List[str], + tolerance: float = 1e-6, + interp_tolerance: float = 5e-2, + always_interpolate: bool = True, +) -> PyDarkNewsCrossSection: + """ + Attempts to load a cross-section object using different strategies based on preferences. + + Args: + models (ModelContainer): The model container object. + ups_key (Any): The key for the upscattering model. + table_dir (str): Directory path for the tables. + preferences (List[str]): List of loading preferences (e.g., ["table", "pickle", "normal"]). + tolerance (float, optional): Tolerance for calculations. Defaults to 1e-6. + interp_tolerance (float, optional): Interpolation tolerance. Defaults to 5e-2. + always_interpolate (bool, optional): Whether to always interpolate. Defaults to True. + + Returns: + PyDarkNewsCrossSection: The loaded cross-section object. + + Raises: + ValueError: If preferences list is empty. + RuntimeError: If unable to load the cross-section with any strategy. + """ if len(preferences) == 0: raise ValueError("preferences must have at least one entry") @@ -188,7 +242,12 @@ def load_cross_sections( cross_sections = [] for ups_key, ups_case in models.ups_cases.items(): cross_sections.append( - attempt_to_load_cross_section(models, ups_key, table_dir, preferences) + attempt_to_load_cross_section(models, ups_key, + table_dir, + preferences, + tolerance, + interp_tolerance, + always_interpolate) ) return cross_sections @@ -228,6 +287,7 @@ def load_decay_from_pickle( decay_key, table_dir, ): + import pickle subdir = "_".join(["Decay"] + [str(x) for x in decay_key]) table_subdir = os.path.join(table_dir, subdir) fname = os.path.join(table_dir, "dec_object.pkl") @@ -238,14 +298,14 @@ def load_decay_from_pickle( def attempt_to_load_decay( models, - dec_key, - tabel_dir, + decay_key, + table_dir, preferences, ): if len(preferences) == 0: raise ValueError("preferences must have at least one entry") - subdir = "_".join(["Decay"] + [str(x) for x in dec_key]) + subdir = "_".join(["Decay"] + [str(x) for x in decay_key]) loaded = False decay = None for p in preferences: @@ -255,7 +315,7 @@ def attempt_to_load_decay( try: decay = load_decay_from_table( models, - dec_key, + decay_key, table_subdir, ) loaded = True @@ -268,7 +328,7 @@ def attempt_to_load_decay( if os.path.isdir(table_subdir): try: decay = load_decay_from_pickle( - ups_key, + decay_key, table_dir, ) loaded = True @@ -280,7 +340,7 @@ def attempt_to_load_decay( try: decay = load_decay( models, - dec_key, + decay_key, ) loaded = True except Exception as e: @@ -305,32 +365,61 @@ def load_decays( table_dir = "" decays = [] - for dec_key, dec_case in models.dec_cases.items(): - decays.append(attempt_to_load_decy(models, dec_key, table_dir, preferences)) + for decay_key, dec_case in models.dec_cases.items(): + decays.append(attempt_to_load_decay(models, decay_key, table_dir, preferences)) return decays def load_processes( - primary_type=None, - target_types=None, - fill_tables_at_start=False, - Emax=None, - m4=None, - mu_tr_mu4=None, # GeV^-1 - UD4=0, - Umu4=0, - epsilon=0.0, - gD=0.0, - decay_product="photon", - noHC=True, - HNLtype="dirac", - nuclear_targets=None, - detector_model=None, - tolerance=1e-6, # supposed to represent machine epsilon - interp_tolerance=5e-2, # relative interpolation tolerance - always_interpolate=True, # bool whether to always interpolate the total/differential cross section -): + primary_type: Optional[Any] = None, + target_types: Optional[List[Any]] = None, + fill_tables_at_start: bool = False, + Emax: Optional[float] = None, + m4: Optional[float] = None, + mu_tr_mu4: Optional[float] = None, + UD4: float = 0, + Umu4: float = 0, + epsilon: float = 0.0, + gD: float = 0.0, + decay_product: str = "photon", + noHC: bool = True, + HNLtype: str = "dirac", + nuclear_targets: Optional[List[str]] = None, + detector_model: Optional[Any] = None, + tolerance: float = 1e-6, + interp_tolerance: float = 5e-2, + always_interpolate: bool = True, +) -> List[Any]: + """ + Loads and returns a list of cross-section and decay objects based on the given parameters. + + Args: + primary_type (Optional[Any]): The primary particle type. + target_types (Optional[List[Any]]): List of target particle types. + fill_tables_at_start (bool): Whether to fill interpolation tables at start. + Emax (Optional[float]): Maximum energy for table filling. + m4 (Optional[float]): Mass parameter. + mu_tr_mu4 (Optional[float]): Transition magnetic moment parameter. + UD4 (float): UD4 parameter. + Umu4 (float): Umu4 parameter. + epsilon (float): Epsilon parameter. + gD (float): gD parameter. + decay_product (str): Type of decay product. + noHC (bool): noHC parameter. + HNLtype (str): Type of HNL (e.g., "dirac"). + nuclear_targets (Optional[List[str]]): List of nuclear targets. + detector_model (Optional[Any]): Detector model object. + tolerance (float): Tolerance for calculations. + interp_tolerance (float): Interpolation tolerance. + always_interpolate (bool): Whether to always interpolate. + + Returns: + List[Any]: A list of loaded cross-section and decay objects. + + Raises: + ValueError: If neither nuclear_targets nor detector_model is provided. + """ if nuclear_targets is None and detector_model is None: raise ValueError( From e33c191da13579f60cbbf5fa24c2597653ff5696 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 28 Aug 2024 21:21:01 -0600 Subject: [PATCH 14/85] Move cross sections. Fix string formatting --- python/_util.py | 2 +- .../CSMSDISSplines-v1.0/dsdxdy_nu_CC_iso.fits | Bin .../CSMSDISSplines-v1.0/dsdxdy_nu_NC_iso.fits | Bin .../CSMSDISSplines-v1.0/dsdxdy_nubar_CC_iso.fits | Bin .../CSMSDISSplines-v1.0/dsdxdy_nubar_NC_iso.fits | Bin .../CSMSDISSplines-v1.0/sigma_nu_CC_iso.fits | Bin .../CSMSDISSplines-v1.0/sigma_nu_NC_iso.fits | Bin .../CSMSDISSplines-v1.0/sigma_nubar_CC_iso.fits | Bin .../CSMSDISSplines-v1.0/sigma_nubar_NC_iso.fits | Bin .../DarkNewsTables/DarkNewsCrossSection.py | 0 .../DarkNewsTables/DarkNewsDecay.py | 0 .../DarkNewsTables/README.md | 0 .../DarkNewsTables/logger.py | 0 .../DarkNewsTables/processes.py | 0 14 files changed, 1 insertion(+), 1 deletion(-) rename resources/{CrossSections => Processes}/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_CC_iso.fits (100%) rename resources/{CrossSections => Processes}/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_NC_iso.fits (100%) rename resources/{CrossSections => Processes}/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_CC_iso.fits (100%) rename resources/{CrossSections => Processes}/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_NC_iso.fits (100%) rename resources/{CrossSections => Processes}/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_CC_iso.fits (100%) rename resources/{CrossSections => Processes}/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_NC_iso.fits (100%) rename resources/{CrossSections => Processes}/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_CC_iso.fits (100%) rename resources/{CrossSections => Processes}/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_NC_iso.fits (100%) rename resources/{CrossSections => Processes}/DarkNewsTables/DarkNewsCrossSection.py (100%) rename resources/{CrossSections => Processes}/DarkNewsTables/DarkNewsDecay.py (100%) rename resources/{CrossSections => Processes}/DarkNewsTables/README.md (100%) rename resources/{CrossSections => Processes}/DarkNewsTables/logger.py (100%) rename resources/{CrossSections => Processes}/DarkNewsTables/processes.py (100%) diff --git a/python/_util.py b/python/_util.py index 4a50c531f..f03a565dc 100644 --- a/python/_util.py +++ b/python/_util.py @@ -503,7 +503,7 @@ def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exi # Raise an error if no model file is found and we require it to exist if len(model_versions) == 0 and must_exist: raise ValueError( - "No model found for {}\nSearched in ".format( + "No model found for {}\nSearched in {}".format( model_name, os.path.join(base_dir, model_name) ) ) diff --git a/resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_CC_iso.fits b/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_CC_iso.fits similarity index 100% rename from resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_CC_iso.fits rename to resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_CC_iso.fits diff --git a/resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_NC_iso.fits b/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_NC_iso.fits similarity index 100% rename from resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_NC_iso.fits rename to resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_NC_iso.fits diff --git a/resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_CC_iso.fits b/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_CC_iso.fits similarity index 100% rename from resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_CC_iso.fits rename to resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_CC_iso.fits diff --git a/resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_NC_iso.fits b/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_NC_iso.fits similarity index 100% rename from resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_NC_iso.fits rename to resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_NC_iso.fits diff --git a/resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_CC_iso.fits b/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_CC_iso.fits similarity index 100% rename from resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_CC_iso.fits rename to resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_CC_iso.fits diff --git a/resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_NC_iso.fits b/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_NC_iso.fits similarity index 100% rename from resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_NC_iso.fits rename to resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_NC_iso.fits diff --git a/resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_CC_iso.fits b/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_CC_iso.fits similarity index 100% rename from resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_CC_iso.fits rename to resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_CC_iso.fits diff --git a/resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_NC_iso.fits b/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_NC_iso.fits similarity index 100% rename from resources/CrossSections/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_NC_iso.fits rename to resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_NC_iso.fits diff --git a/resources/CrossSections/DarkNewsTables/DarkNewsCrossSection.py b/resources/Processes/DarkNewsTables/DarkNewsCrossSection.py similarity index 100% rename from resources/CrossSections/DarkNewsTables/DarkNewsCrossSection.py rename to resources/Processes/DarkNewsTables/DarkNewsCrossSection.py diff --git a/resources/CrossSections/DarkNewsTables/DarkNewsDecay.py b/resources/Processes/DarkNewsTables/DarkNewsDecay.py similarity index 100% rename from resources/CrossSections/DarkNewsTables/DarkNewsDecay.py rename to resources/Processes/DarkNewsTables/DarkNewsDecay.py diff --git a/resources/CrossSections/DarkNewsTables/README.md b/resources/Processes/DarkNewsTables/README.md similarity index 100% rename from resources/CrossSections/DarkNewsTables/README.md rename to resources/Processes/DarkNewsTables/README.md diff --git a/resources/CrossSections/DarkNewsTables/logger.py b/resources/Processes/DarkNewsTables/logger.py similarity index 100% rename from resources/CrossSections/DarkNewsTables/logger.py rename to resources/Processes/DarkNewsTables/logger.py diff --git a/resources/CrossSections/DarkNewsTables/processes.py b/resources/Processes/DarkNewsTables/processes.py similarity index 100% rename from resources/CrossSections/DarkNewsTables/processes.py rename to resources/Processes/DarkNewsTables/processes.py From 1642d85d620616ede69f008be6ca5635ecc14b41 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 28 Aug 2024 22:48:48 -0600 Subject: [PATCH 15/85] cross_Section -> processes --- python/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/__init__.py b/python/__init__.py index b83d12fbe..14689b726 100644 --- a/python/__init__.py +++ b/python/__init__.py @@ -22,7 +22,7 @@ # set up some public-facing utilities functions utilities.get_resource_package_dir = _util.resource_package_dir utilities.get_detector_model_path = _util.get_detector_model_path -utilities.get_cross_section_model_path = _util.get_cross_section_model_path +utilities.get_processes_model_path = _util.get_processes_model_path utilities.get_flux_model_path = _util.get_flux_model_path utilities.load_flux = _util.load_flux From 34976e92dc88320a2175853653a8b96cd33a1de3 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 28 Aug 2024 22:50:34 -0600 Subject: [PATCH 16/85] Refactor _get_model_path. Add support for top-level loaders --- python/_util.py | 195 ++++++++++++++++++++++++++---------------------- 1 file changed, 105 insertions(+), 90 deletions(-) diff --git a/python/_util.py b/python/_util.py index f03a565dc..40d4db571 100644 --- a/python/_util.py +++ b/python/_util.py @@ -223,6 +223,24 @@ def load_module(name, path, persist=True): re.VERBOSE | re.IGNORECASE, ) +_UNVERSIONED_MODEL_PATTERN = ( + r""" + (?P + (?: + [a-zA-Z0-9]+ + ) + | + (?: + (?:[a-zA-Z0-9]+(?:[-_\.][a-zA-Z0-9]+)*(?:[-_\.][a-zA-Z]+[a-zA-Z0-9]*))? + ) + ) + (?: + - + (?P""" + + _VERSION_PATTERN + + r"))?" +) + _MODEL_PATTERN = ( r""" (?P @@ -418,128 +436,123 @@ def tokenize_version(version): return tuple(token_list) -def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exist=True): - # Get the path to the model file - _model_regex = re.compile( - r"^\s*" + _MODEL_PATTERN + ("" if suffix is None else r"(?:" + suffix + r")?") + r"\s*$", - re.VERBOSE | re.IGNORECASE, - ) - if suffix is None: - suffix = "" - # Get the path to the resources directory - resources_dir = resource_package_dir() +def _get_base_directory(resources_dir, prefix): base_dir = resources_dir - - # Add prefix if present if prefix is not None: base_dir = os.path.join(base_dir, prefix) + return base_dir - # Get the model name and version - d = _model_regex.match(model_name) - if d is None: - raise ValueError("Invalid model name: {}".format(model_name)) - d = d.groupdict() - model_name = d["model_name"] - version = d["version"] - - # Search for the model folder in the resources directory +def _find_model_folder_and_file(base_dir, model_name, must_exist, specific_file=None): model_names = [ f for f in os.listdir(base_dir) if not os.path.isfile(os.path.join(base_dir, f)) ] - model_names = [f for f in model_names if f.lower().startswith(model_name.lower())] - folder_exists = False + exact_model_names = [f for f in model_names if f.lower() == model_name.lower()] + + if len(exact_model_names) == 0: + model_names = [f for f in model_names if f.lower().startswith(model_name.lower())] + else: + model_names = exact_model_names if len(model_names) == 0 and must_exist: - # Whoops, we didn't find the model folder! - raise ValueError( - "No model folders found for {}\nSearched in ".format(model_name, base_dir) - ) + raise ValueError(f"No model folders found for {model_name}\nSearched in {base_dir}") elif len(model_names) == 0 and not must_exist: - # Let's use the provided model name as the folder name - model_name = model_name + return model_name, False, None elif len(model_names) == 1: - # We found the model folder! - folder_exists = True - model_name = model_names[0] + name = model_names[0] + if specific_file is not None: + specific_file_path = os.path.join(base_dir, name, specific_file) + if os.path.isfile(specific_file_path): + return name, True, specific_file_path + else: + return name, True, None else: - # Multiple model folders found, we cannot decide which one to use - raise ValueError( - "Multiple directories found for {}\nSearched in ".format( - model_name, base_dir - ) - ) + raise ValueError(f"Multiple directories found for {model_name}\nSearched in {base_dir}") +def _get_model_files(base_dir, model_name, is_file, folder_exists, version=None): if folder_exists: - # Search for the model file in the model folder - model_files = [ - f - for f in os.listdir(os.path.join(base_dir, model_name)) + if version: + version_dir = os.path.join(base_dir, model_name, f"v{version}") + if os.path.isdir(version_dir): + return [ + f for f in os.listdir(version_dir) + if is_file == os.path.isfile(os.path.join(version_dir, f)) + ] + return [ + f for f in os.listdir(os.path.join(base_dir, model_name)) if is_file == os.path.isfile(os.path.join(base_dir, model_name, f)) ] - else: - model_files = [] + return [] - # From the found model files, extract the model versions +def _extract_model_versions(model_files, model_regex, model_name): model_versions = [] for f in model_files: - d = _model_regex.match(f) + d = model_regex.match(f) if d is not None: if d.groupdict()["version"] is not None: model_versions.append(normalize_version(d.groupdict()["version"])) else: - print(ValueError( - "Input model file has no version: {}\nSearched in ".format( - f, os.path.join(base_dir, model_name) - ) - )) + print(f"Warning: Input model file has no version: {f}") elif f.lower().startswith(model_name.lower()): - print(ValueError( - "Unable to parse version from {}\nFound in ".format( - f, os.path.join(base_dir, model_name) - ) - )) - - # Raise an error if no model file is found and we require it to exist - if len(model_versions) == 0 and must_exist: - raise ValueError( - "No model found for {}\nSearched in {}".format( - model_name, os.path.join(base_dir, model_name) - ) - ) + print(f"Warning: Unable to parse version from {f}") + return model_versions +def _get_model_file_name(version, model_versions, model_files, model_name, suffix, must_exist): if version is None and must_exist: - # If no version is provided, use the latest version - version_idx, version = max( - enumerate(model_versions), key=lambda x: tokenize_version(x[1]) - ) - model_file_name = model_files[version_idx] + version_idx, version = max(enumerate(model_versions), key=lambda x: tokenize_version(x[1])) + return model_files[version_idx] elif version is None and not must_exist: - # If no version is provided and we don't require it to exist, default to v1 version = "v1" - model_file_name = "{}-v{}{}".format(model_name, version, suffix) + return f"{model_name}-v{version}{suffix}" else: - # A version is provided version = normalize_version(version) if must_exist: - # If the version must exist, raise an error if it doesn't if version not in model_versions: - raise ValueError( - "No model found for {}-{}\nSearched in ".format( - model_name, version, os.path.join(base_dir, model_name) - ) - ) + raise ValueError(f"No model found for {model_name}-{version}") version_idx = model_versions.index(version) - model_file_name = model_files[version_idx] + return model_files[version_idx] else: - # The version doesn't have to exist if version in model_versions: - # If the version exists, use it version_idx = model_versions.index(version) - model_file_name = model_files[version_idx] + return model_files[version_idx] else: - # Otherwise use the provided version - model_file_name = "{}-v{}{}".format(model_name, version, suffix) + return f"{model_name}-v{version}{suffix}" + +def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exist=True, specific_file=None): + _model_regex = re.compile( + r"^\s*" + _MODEL_PATTERN + ("" if suffix is None else r"(?:" + suffix + r")?") + r"\s*$", + re.VERBOSE | re.IGNORECASE, + ) + suffix = "" if suffix is None else suffix + + resources_dir = resource_package_dir() + base_dir = _get_base_directory(resources_dir, prefix) + + d = _model_regex.match(model_name) + if d is None: + raise ValueError(f"Invalid model name: {model_name}") + d = d.groupdict() + model_name, version = d["model_name"], d["version"] + + model_name, folder_exists, specific_file_path = _find_model_folder_and_file(base_dir, model_name, must_exist, specific_file) + + if specific_file_path and not version: + return os.path.dirname(specific_file_path) + + model_files = _get_model_files(base_dir, model_name, is_file, folder_exists, version) + model_versions = _extract_model_versions(model_files, _model_regex, model_name) + + if len(model_versions) == 0 and must_exist: + if specific_file_path: + return os.path.dirname(specific_file_path) + raise ValueError(f"No model found for {model_name}\nSearched in {os.path.join(base_dir, model_name)}") + + model_file_name = _get_model_file_name(version, model_versions, model_files, model_name, suffix, must_exist) + + if version: + version_dir = os.path.join(base_dir, model_name, f"v{version}") + if os.path.isdir(version_dir): + return os.path.join(version_dir, model_file_name) return os.path.join(base_dir, model_name, model_file_name) @@ -560,26 +573,28 @@ def get_material_model_file_path(model_name, must_exist=True): def get_flux_model_path(model_name, must_exist=True): - return _get_model_path(model_name, prefix=_resource_folder_by_name["flux"], is_file=False, must_exist=must_exist) + return _get_model_path(model_name, prefix=_resource_folder_by_name["flux"], is_file=False, must_exist=must_exist, specific_file=f"flux.py") def get_detector_model_path(model_name, must_exist=True): - return _get_model_path(model_name, prefix=_resource_folder_by_name["detector"], is_file=False, must_exist=must_exist) + return _get_model_path(model_name, prefix=_resource_folder_by_name["detector"], is_file=False, must_exist=must_exist, specific_file=f"detector.py") def get_processes_model_path(model_name, must_exist=True): - return _get_model_path(model_name, prefix=_resource_folder_by_name["processes"], is_file=False, must_exist=must_exist) + return _get_model_path(model_name, prefix=_resource_folder_by_name["processes"], is_file=False, must_exist=must_exist, specific_file="processes.py") def load_resource(resource_type, resource_name, *args, **kwargs): folder = _resource_folder_by_name[resource_type] + specific_file = f"{resource_type}.py" - abs_dir = _get_model_path(resource_name, prefix=folder, is_file=False, must_exist=True) + abs_dir = _get_model_path(resource_name, prefix=folder, is_file=False, must_exist=True, specific_file=specific_file) - fname = os.path.join(abs_dir, f"{resource_name}.py") + fname = os.path.join(abs_dir, f"{resource_type}.py") + print(fname) assert(os.path.isfile(fname)) resource_module = load_module(f"siren-{resource_type}-{resource_name}", fname, persist=False) - loader = getattr(resource_module, f"load_{resource_name}") + loader = getattr(resource_module, f"load_{resource_type}") resource = loader(*args, **kwargs) return resource From 16a7b61bc0952c5c3713d71083bd4a14e4ece56d Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 28 Aug 2024 22:51:20 -0600 Subject: [PATCH 17/85] Fix imports. loader -> logger --- resources/Processes/DarkNewsTables/processes.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/resources/Processes/DarkNewsTables/processes.py b/resources/Processes/DarkNewsTables/processes.py index 1d0832fcb..ff0ce8d66 100644 --- a/resources/Processes/DarkNewsTables/processes.py +++ b/resources/Processes/DarkNewsTables/processes.py @@ -3,21 +3,21 @@ import siren base_path = os.path.dirname(os.path.abspath(__file__)) -loader_file = os.path.join(base_path, "loader.py") -siren._util.load_module("loader", loader_file) +logger_file = os.path.join(base_path, "logger.py") +siren._util.load_module("logger", logger_file) -from DarkNews.ModelContainer import ModelContainer +from siren.DNModelContainer import ModelContainer # Import PyDarkNewsDecay and PyDarkNewsCrossSection decay_file = os.path.join(base_path, "DarkNewsDecay.py") cross_section_file = os.path.join(base_path, "DarkNewsCrossSection.py") -siren._util.load_module("DarkNewsDecay", decay_file) -siren._util.load_module("DarkNewsCrossSection", cross_section_file) +DarkNewsDecay = siren._util.load_module("DarkNewsDecay", decay_file) +DarkNewsCrossSection = siren._util.load_module("DarkNewsCrossSection", cross_section_file) -from DarkNewsDecay import PyDarkNewsDecay -from DarkNewsCrossSection import PyDarkNewsCrossSection +PyDarkNewsCrossSection = DarkNewsCrossSection.PyDarkNewsCrossSection +PyDarkNewsDecay = DarkNewsDecay.PyDarkNewsDecay -xs_path = siren.utilities.get_cross_section_model_path( +xs_path = siren.utilities.get_processes_model_path( f"DarkNewsTables-v{siren.utilities.darknews_version()}", must_exist=False ) From b81b555a58ff15c8355c5ab60b0073b26d4ca896 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 28 Aug 2024 22:51:46 -0600 Subject: [PATCH 18/85] Fix imports --- resources/Processes/DarkNewsTables/DarkNewsCrossSection.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/resources/Processes/DarkNewsTables/DarkNewsCrossSection.py b/resources/Processes/DarkNewsTables/DarkNewsCrossSection.py index 6843972f7..694ccb099 100644 --- a/resources/Processes/DarkNewsTables/DarkNewsCrossSection.py +++ b/resources/Processes/DarkNewsTables/DarkNewsCrossSection.py @@ -3,9 +3,11 @@ import functools from scipy.interpolate import LinearNDInterpolator, PchipInterpolator +from siren import _util + base_path = os.path.dirname(os.path.abspath(__file__)) -loader_file = os.path.join(base_path, "loader.py") -siren._util.load_module("loader", loader_file) +logger_file = os.path.join(base_path, "logger.py") +_util.load_module("logger", logger_file) # SIREN methods from siren.interactions import DarkNewsCrossSection From c9dc192798fc0479434899b643e8b250601d979c Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 28 Aug 2024 22:52:45 -0600 Subject: [PATCH 19/85] Fix imports. loader -> logger. Instance checking instead of type checking. non-zero CDF check. --- .../Processes/DarkNewsTables/DarkNewsDecay.py | 45 +++++++++++++------ 1 file changed, 32 insertions(+), 13 deletions(-) diff --git a/resources/Processes/DarkNewsTables/DarkNewsDecay.py b/resources/Processes/DarkNewsTables/DarkNewsDecay.py index c21d947a3..9c076b3d4 100644 --- a/resources/Processes/DarkNewsTables/DarkNewsDecay.py +++ b/resources/Processes/DarkNewsTables/DarkNewsDecay.py @@ -1,10 +1,12 @@ import os import numpy as np -import functools +import pickle + +from siren import _util base_path = os.path.dirname(os.path.abspath(__file__)) -loader_file = os.path.join(base_path, "loader.py") -siren._util.load_module("loader", loader_file) +logger_file = os.path.join(base_path, "logger.py") +_util.load_module("logger", logger_file) # SIREN methods from siren.interactions import DarkNewsDecay @@ -41,7 +43,7 @@ def load_from_table(self, table_dir): self.decay_norm, self.decay_integrator = pickle.load(f) def save_to_table(self, table_dir): - with open(os.path.join(table_dir, "decay.pkl") as f: + with open(os.path.join(table_dir, "decay.pkl")) as f: pickle.dump(f, { "decay_integrator": self.decay_integrator, "decay_norm": self.decay_norm @@ -94,7 +96,7 @@ def DifferentialDecayWidth(self, record): # Momentum variables of HNL necessary for calculating decay phase space PN = np.array(record.primary_momentum) - if type(self.dec_case) == FermionSinglePhotonDecay: + if isinstance(self.dec_case, FermionSinglePhotonDecay): gamma_idx = 0 for secondary in record.signature.secondary_types: if secondary == dataclasses.Particle.ParticleType.Gamma: @@ -107,7 +109,7 @@ def DifferentialDecayWidth(self, record): Pgamma = np.array(record.secondary_momenta[gamma_idx]) momenta = np.expand_dims(PN, 0), np.expand_dims(Pgamma, 0) - elif type(self.dec_case) == FermionDileptonDecay: + elif isinstance(self.dec_case, FermionDileptonDecay): lepminus_idx = -1 lepplus_idx = -1 nu_idx = -1 @@ -144,9 +146,9 @@ def DifferentialDecayWidth(self, record): return self.dec_case.differential_width(momenta) def TotalDecayWidth(self, arg1): - if type(arg1) == dataclasses.InteractionRecord: + if isinstance(arg1, dataclasses.InteractionRecord): primary = arg1.signature.primary_type - elif type(arg1) == dataclasses.Particle.ParticleType: + elif isinstance(arg1, dataclasses.Particle.ParticleType): primary = arg1 else: print("Incorrect function call to TotalDecayWidth!") @@ -155,7 +157,7 @@ def TotalDecayWidth(self, arg1): return 0 if self.total_width is None: # Need to set the total width - if type(self.dec_case) == FermionDileptonDecay and ( + if isinstance(self.dec_case, FermionDileptonDecay) and ( self.dec_case.vector_off_shell and self.dec_case.scalar_off_shell ): # total width calculation requires evaluating an integral @@ -194,9 +196,9 @@ def TotalDecayWidthForFinalState(self, record): return ret def DensityVariables(self): - if type(self.dec_case) == FermionSinglePhotonDecay: + if isinstance(self.dec_case, FermionSinglePhotonDecay): return "cost" - elif type(self.dec_case) == FermionDileptonDecay: + elif isinstance(self.dec_case, FermionDileptonDecay): if self.dec_case.vector_on_shell and self.dec_case.scalar_on_shell: print("Can't have both the scalar and vector on shell") exit(0) @@ -223,6 +225,23 @@ def GetPSSample(self, random): PSidx = np.argmax(x - self.PS_weights_CDF <= 0) return self.PS_samples[:, PSidx] + def GetPSSample(self, random): + # Make the PS weight CDF if that hasn't been done + if self.PS_weights_CDF is None: + self.PS_weights_CDF = np.cumsum(self.PS_weights) + + # Check that the CDF makes sense + total_weight = self.PS_weights_CDF[-1] + if total_weight == 0: + raise ValueError("Total weight is zero, cannot sample") + + # Random number to determine + x = random.Uniform(0, total_weight) + + # find first instance of a CDF entry greater than x + PSidx = np.argmax(x - self.PS_weights_CDF <= 0) + return self.PS_samples[:, PSidx] + def SampleRecordFromDarkNews(self, record, random): # First, make sure we have PS samples and weights if self.PS_samples is None or self.PS_weights is None: @@ -254,7 +273,7 @@ def SampleRecordFromDarkNews(self, record, random): secondaries = record.GetSecondaryParticleRecords() - if type(self.dec_case) == FermionSinglePhotonDecay: + if isinstance(self.dec_case, FermionSinglePhotonDecay): gamma_idx = 0 for secondary in record.signature.secondary_types: if secondary == dataclasses.Particle.ParticleType.Gamma: @@ -269,7 +288,7 @@ def SampleRecordFromDarkNews(self, record, random): secondaries[nu_idx].four_momentum = np.squeeze(four_momenta["P_decay_N_daughter"]) secondaries[nu_idx].mass = 0 - elif type(self.dec_case) == FermionDileptonDecay: + elif isinstance(self.dec_case, FermionDileptonDecay): lepminus_idx = -1 lepplus_idx = -1 nu_idx = -1 From fa099a43f53eee2b4db9e84d20eee17632714302 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 28 Aug 2024 23:02:36 -0600 Subject: [PATCH 20/85] Avoid repeated use of np.append --- .../DarkNewsTables/DarkNewsCrossSection.py | 51 ++++++++++--------- 1 file changed, 27 insertions(+), 24 deletions(-) diff --git a/resources/Processes/DarkNewsTables/DarkNewsCrossSection.py b/resources/Processes/DarkNewsTables/DarkNewsCrossSection.py index 694ccb099..dfb5dc482 100644 --- a/resources/Processes/DarkNewsTables/DarkNewsCrossSection.py +++ b/resources/Processes/DarkNewsTables/DarkNewsCrossSection.py @@ -2,6 +2,7 @@ import numpy as np import functools from scipy.interpolate import LinearNDInterpolator, PchipInterpolator +from typing import List, Tuple from siren import _util @@ -17,6 +18,7 @@ # DarkNews methods from DarkNews import phase_space + # A class representing a single ups_case DarkNews class # Only handles methods concerning the upscattering part class PyDarkNewsCrossSection(DarkNewsCrossSection): @@ -51,9 +53,7 @@ def load_from_table(self, table_dir): total_xsec_file = os.path.join(table_dir, "total_cross_sections.npy") if os.path.exists(total_xsec_file): self.total_cross_section_table = np.load(total_xsec_file) - diff_xsec_file = os.path.join( - table_dir, "differential_cross_sections.npy" - ) + diff_xsec_file = os.path.join(table_dir, "differential_cross_sections.npy") if os.path.exists(diff_xsec_file): self.differential_cross_section_table = np.load(diff_xsec_file) @@ -62,9 +62,7 @@ def load_from_table(self, table_dir): def save_to_table(self, table_dir, total=True, diff=True): if total: self._redefine_interpolation_objects(total=True) - with open( - os.path.join(table_dir, "total_cross_sections.npy"), "wb" - ) as f: + with open(os.path.join(table_dir, "total_cross_sections.npy"), "wb") as f: np.save(f, self.total_cross_section_table) if diff: self._redefine_interpolation_objects(diff=True) @@ -91,7 +89,6 @@ def get_representation(self): # tolerance, interp_tolerance, always_interpolate # kwargs argument can be used to set any of these def configure(self, **kwargs): - for k, v in kwargs.items(): self.__setattr__(k, v) @@ -254,22 +251,22 @@ def _query_interpolation_table(self, inputs, mode): else: return -1 - def FillTableAtEnergy(self, E, total=True, diff=True, factor=0.8): + def FillTableAtEnergy( + self, E: float, total: bool = True, diff: bool = True, factor: float = 0.8 + ) -> int: num_added_points = 0 + new_total_points: List[Tuple[float, float]] = [] + new_diff_points: List[Tuple[float, float, float]] = [] + if total: xsec = self.ups_case.total_xsec(E) - self.total_cross_section_table = np.append( - self.total_cross_section_table, [[E, xsec]], axis=0 - ) + new_total_points.append((E, xsec)) num_added_points += 1 + if diff: interaction = dataclasses.InteractionRecord() - interaction.signature.primary_type = self.GetPossiblePrimaries()[ - 0 - ] # only one primary - interaction.signature.target_type = self.GetPossibleTargets()[ - 0 - ] # only one target + interaction.signature.primary_type = self.GetPossiblePrimaries()[0] + interaction.signature.target_type = self.GetPossibleTargets()[0] interaction.target_mass = self.ups_case.MA interaction.primary_momentum = [E, 0, 0, 0] zmin, zmax = self.tolerance, 1 @@ -279,13 +276,19 @@ def FillTableAtEnergy(self, E, total=True, diff=True, factor=0.8): while z < zmax: Q2 = Q2min + z * (Q2max - Q2min) dxsec = self.ups_case.diff_xsec_Q2(E, Q2).item() - self.differential_cross_section_table = np.append( - self.differential_cross_section_table, - [[E, z, dxsec]], - axis=0, - ) + new_diff_points.append((E, z, dxsec)) num_added_points += 1 z *= 1 + factor * self.interp_tolerance + + if new_total_points: + self.total_cross_section_table = np.vstack( + (self.total_cross_section_table, new_total_points) + ) + if new_diff_points: + self.differential_cross_section_table = np.vstack( + (self.differential_cross_section_table, new_diff_points) + ) + self._redefine_interpolation_objects(total=total, diff=diff) return num_added_points @@ -473,8 +476,8 @@ def TotalCrossSection(self, arg1, energy=None, target=None): # If we have reached this block, we must compute the cross section using DarkNews xsec = self.ups_case.total_xsec(energy) - self.total_cross_section_table = np.append( - self.total_cross_section_table, [[energy, xsec]], axis=0 + self.total_cross_section_table = np.vstack( + (self.total_cross_section_table, [[energy, xsec]]) ) self._redefine_interpolation_objects(total=True) return xsec From 5fbf8e4d43bcf8f7b4b93ef822894c6fe2823cfb Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 28 Aug 2024 23:09:11 -0600 Subject: [PATCH 21/85] Override logging in our own copy of ModelContainer --- python/DNModelContainer.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/python/DNModelContainer.py b/python/DNModelContainer.py index 73f6f779c..49482b060 100644 --- a/python/DNModelContainer.py +++ b/python/DNModelContainer.py @@ -552,8 +552,15 @@ def configure_logger( ValueError: _description_ """ - loglevel = loglevel.upper() - _numeric_level = getattr(logging, loglevel, None) + # override default loglevel + loglevel = logging.WARNING + + if isinstance(loglevel, int): + _numeric_level = loglevel + elif isinstance(loglevel, str): + loglevel = loglevel.upper() + _numeric_level = getattr(logging, loglevel, None) + if not isinstance(_numeric_level, int): raise ValueError("Invalid log level: %s" % self.loglevel) logger.setLevel(_numeric_level) From 811780d2f66188e0dc1188e38dfea8340c6b43bd Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 28 Aug 2024 23:12:12 -0600 Subject: [PATCH 22/85] Check for DarkNews.ModelContainer.configure_logger in case it is removed in the future. --- resources/Processes/DarkNewsTables/logger.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/resources/Processes/DarkNewsTables/logger.py b/resources/Processes/DarkNewsTables/logger.py index bdd03c096..5f9ee80ba 100644 --- a/resources/Processes/DarkNewsTables/logger.py +++ b/resources/Processes/DarkNewsTables/logger.py @@ -1,12 +1,18 @@ # Monkey patch DarkNews logger to hide printouts import functools -from DarkNews.ModelContainer import ModelContainer -ModelContainer_configure_logger = ModelContainer.configure_logger +dn_has_modelcontainer_logger = False +try: + from DarkNews.ModelContainer import ModelContainer + ModelContainer_configure_logger = ModelContainer.configure_logger + dn_has_modelcontainer_logger = True +except: + pass -@functools.wraps(ModelContainer.configure_logger) -def suppress_info(self, logger, loglevel="INFO", prettyprinter=None, logfile=None, verbose=False): - return ModelContainer_configure_logger(self, logger, loglevel="WARNING", prettyprinter=prettyprinter, logfile=logfile, verbose=verbose) +if dn_has_modelcontainer_logger: + @functools.wraps(ModelContainer.configure_logger) + def suppress_info(self, logger, loglevel="INFO", prettyprinter=None, logfile=None, verbose=False): + return ModelContainer_configure_logger(self, logger, loglevel="WARNING", prettyprinter=prettyprinter, logfile=logfile, verbose=verbose) -ModelContainer.configure_logger = suppress_info + ModelContainer.configure_logger = suppress_info From 0b6f53dcfe845bc4060e9306e4ec52a0432696f3 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Thu, 29 Aug 2024 21:27:44 -0600 Subject: [PATCH 23/85] Move detector files --- .../densities_ATLAS-v1.dat} | 0 .../materials_ATLAS-v1.dat} | 0 .../CCM-v1.dat => CCM/densities_CCM-v1.dat} | 0 .../CCM-v2.dat => CCM/densities_CCM-v2.dat} | 0 .../CCM-v1.dat => CCM/materials_CCM-v1.dat} | 0 resources/Detectors/CCM/materials_CCM-v2.dat | 54 +++++++++++++++++++ .../densities_DUNEFD-v1.dat} | 0 .../materials_DUNEFD-v1.dat} | 0 .../densities_HyperK-v1.dat} | 0 .../materials_HyperK-v1.dat} | 0 .../densities_IceCube-v1.dat} | 0 .../materials_IceCube-v1.dat} | 0 .../densities_MINERvA-v1.dat} | 0 .../materials_MINERvA-v1.dat} | 0 .../densities_MiniBooNE-v1.dat} | 0 .../materials_MiniBooNE-v1.dat} | 0 .../{densities/README => README_densities} | 0 .../{materials/README => README_materials} | 0 18 files changed, 54 insertions(+) rename resources/Detectors/{densities/ATLAS/ATLAS-v1.dat => ATLAS/densities_ATLAS-v1.dat} (100%) rename resources/Detectors/{materials/ATLAS/ATLAS-v1.dat => ATLAS/materials_ATLAS-v1.dat} (100%) rename resources/Detectors/{densities/CCM/CCM-v1.dat => CCM/densities_CCM-v1.dat} (100%) rename resources/Detectors/{densities/CCM/CCM-v2.dat => CCM/densities_CCM-v2.dat} (100%) rename resources/Detectors/{materials/CCM/CCM-v1.dat => CCM/materials_CCM-v1.dat} (100%) create mode 100644 resources/Detectors/CCM/materials_CCM-v2.dat rename resources/Detectors/{densities/DUNEFD/DUNEFD-v1.dat => DUNEFD/densities_DUNEFD-v1.dat} (100%) rename resources/Detectors/{materials/DUNEFD/DUNEFD-v1.dat => DUNEFD/materials_DUNEFD-v1.dat} (100%) rename resources/Detectors/{densities/HyperK/HyperK-v1.dat => HyperK/densities_HyperK-v1.dat} (100%) rename resources/Detectors/{materials/HyperK/HyperK-v1.dat => HyperK/materials_HyperK-v1.dat} (100%) rename resources/Detectors/{densities/IceCube/IceCube-v1.dat => IceCube/densities_IceCube-v1.dat} (100%) rename resources/Detectors/{materials/IceCube/IceCube-v1.dat => IceCube/materials_IceCube-v1.dat} (100%) rename resources/Detectors/{densities/MINERvA/MINERvA-v1.dat => MINERvA/densities_MINERvA-v1.dat} (100%) rename resources/Detectors/{materials/MINERvA/MINERvA-v1.dat => MINERvA/materials_MINERvA-v1.dat} (100%) rename resources/Detectors/{densities/MiniBooNE/MiniBooNE-v1.dat => MiniBooNE/densities_MiniBooNE-v1.dat} (100%) rename resources/Detectors/{materials/MiniBooNE/MiniBooNE-v1.dat => MiniBooNE/materials_MiniBooNE-v1.dat} (100%) rename resources/Detectors/{densities/README => README_densities} (100%) rename resources/Detectors/{materials/README => README_materials} (100%) diff --git a/resources/Detectors/densities/ATLAS/ATLAS-v1.dat b/resources/Detectors/ATLAS/densities_ATLAS-v1.dat similarity index 100% rename from resources/Detectors/densities/ATLAS/ATLAS-v1.dat rename to resources/Detectors/ATLAS/densities_ATLAS-v1.dat diff --git a/resources/Detectors/materials/ATLAS/ATLAS-v1.dat b/resources/Detectors/ATLAS/materials_ATLAS-v1.dat similarity index 100% rename from resources/Detectors/materials/ATLAS/ATLAS-v1.dat rename to resources/Detectors/ATLAS/materials_ATLAS-v1.dat diff --git a/resources/Detectors/densities/CCM/CCM-v1.dat b/resources/Detectors/CCM/densities_CCM-v1.dat similarity index 100% rename from resources/Detectors/densities/CCM/CCM-v1.dat rename to resources/Detectors/CCM/densities_CCM-v1.dat diff --git a/resources/Detectors/densities/CCM/CCM-v2.dat b/resources/Detectors/CCM/densities_CCM-v2.dat similarity index 100% rename from resources/Detectors/densities/CCM/CCM-v2.dat rename to resources/Detectors/CCM/densities_CCM-v2.dat diff --git a/resources/Detectors/materials/CCM/CCM-v1.dat b/resources/Detectors/CCM/materials_CCM-v1.dat similarity index 100% rename from resources/Detectors/materials/CCM/CCM-v1.dat rename to resources/Detectors/CCM/materials_CCM-v1.dat diff --git a/resources/Detectors/CCM/materials_CCM-v2.dat b/resources/Detectors/CCM/materials_CCM-v2.dat new file mode 100644 index 000000000..a1c3649e2 --- /dev/null +++ b/resources/Detectors/CCM/materials_CCM-v2.dat @@ -0,0 +1,54 @@ +# Material model file +# Detector: CCM +# Version: v2 +# Date: 2023-03-18 +# Authors: Nicholas Kamp +# Notes: +# Uses PREM model of the Earth, assumes a single far detector with the liquid argon embedded directly in the roc + +ARGON 1 # CCM Argon +1000180400 1.0000000 # Ar40 100.0%% + +STEEL 4 # Mostly Fe +1000060120 0.0013000 # C12 0.13% +1000140280 0.0020000 # Si28 0.2% +1000250550 0.0100000 # Mn55 1.0% +1000260560 0.9870000 # Fe56 98.7% + +LEAD 2 # Mostly Pb +1000822080 0.9995000 # Pb208 99.95% +1000290630 0.0005000 # Cu63 0.05% + +GRAPHITE 1 # Essentially Carbon +1000060120 1.0000000 # C12 100% + +WATER 2 # H20 +1000010010 0.1150000 # H1 11.5% +1000080160 0.8850000 # 016 88.5% + +AIR 2 # N2 + O2 +1000070140 0.7562326 # N2 78% in volume +1000080160 0.2437674 # O2 22% in volume + +# Guess based on https://mcnp.lanl.gov/pdf_files/la-ur-07-5898.pdf +CONCRETE 7 # mostly SiO2 +1000010010 0.0045300 # H1 0.453% +1000080160 0.5126000 # 016 51.26% +1000110230 0.0152700 # Na23 1.527% +1000130270 0.0355500 # Al27 3.555% +1000140280 0.3603600 # Si28 36.036% +1000200400 0.0579100 # Ca40 5.791% +1000260560 0.0137800 # Fe56 1.378% + +ALUMINUM 1 # pure Al +1000130270 1.0000000 # Al27 100% + +BERYLLIUM 1 # pure Be +1000040090 1.0000000 # Be9 100% + +TUNGSTEN 1 # pure W +1000741830 1.0000000 # W183 100% + +POLY 2 # C2H4 +1000060120 0.8571400 # C12 85.714% +1000010010 0.1428600 # H1 14.286% diff --git a/resources/Detectors/densities/DUNEFD/DUNEFD-v1.dat b/resources/Detectors/DUNEFD/densities_DUNEFD-v1.dat similarity index 100% rename from resources/Detectors/densities/DUNEFD/DUNEFD-v1.dat rename to resources/Detectors/DUNEFD/densities_DUNEFD-v1.dat diff --git a/resources/Detectors/materials/DUNEFD/DUNEFD-v1.dat b/resources/Detectors/DUNEFD/materials_DUNEFD-v1.dat similarity index 100% rename from resources/Detectors/materials/DUNEFD/DUNEFD-v1.dat rename to resources/Detectors/DUNEFD/materials_DUNEFD-v1.dat diff --git a/resources/Detectors/densities/HyperK/HyperK-v1.dat b/resources/Detectors/HyperK/densities_HyperK-v1.dat similarity index 100% rename from resources/Detectors/densities/HyperK/HyperK-v1.dat rename to resources/Detectors/HyperK/densities_HyperK-v1.dat diff --git a/resources/Detectors/materials/HyperK/HyperK-v1.dat b/resources/Detectors/HyperK/materials_HyperK-v1.dat similarity index 100% rename from resources/Detectors/materials/HyperK/HyperK-v1.dat rename to resources/Detectors/HyperK/materials_HyperK-v1.dat diff --git a/resources/Detectors/densities/IceCube/IceCube-v1.dat b/resources/Detectors/IceCube/densities_IceCube-v1.dat similarity index 100% rename from resources/Detectors/densities/IceCube/IceCube-v1.dat rename to resources/Detectors/IceCube/densities_IceCube-v1.dat diff --git a/resources/Detectors/materials/IceCube/IceCube-v1.dat b/resources/Detectors/IceCube/materials_IceCube-v1.dat similarity index 100% rename from resources/Detectors/materials/IceCube/IceCube-v1.dat rename to resources/Detectors/IceCube/materials_IceCube-v1.dat diff --git a/resources/Detectors/densities/MINERvA/MINERvA-v1.dat b/resources/Detectors/MINERvA/densities_MINERvA-v1.dat similarity index 100% rename from resources/Detectors/densities/MINERvA/MINERvA-v1.dat rename to resources/Detectors/MINERvA/densities_MINERvA-v1.dat diff --git a/resources/Detectors/materials/MINERvA/MINERvA-v1.dat b/resources/Detectors/MINERvA/materials_MINERvA-v1.dat similarity index 100% rename from resources/Detectors/materials/MINERvA/MINERvA-v1.dat rename to resources/Detectors/MINERvA/materials_MINERvA-v1.dat diff --git a/resources/Detectors/densities/MiniBooNE/MiniBooNE-v1.dat b/resources/Detectors/MiniBooNE/densities_MiniBooNE-v1.dat similarity index 100% rename from resources/Detectors/densities/MiniBooNE/MiniBooNE-v1.dat rename to resources/Detectors/MiniBooNE/densities_MiniBooNE-v1.dat diff --git a/resources/Detectors/materials/MiniBooNE/MiniBooNE-v1.dat b/resources/Detectors/MiniBooNE/materials_MiniBooNE-v1.dat similarity index 100% rename from resources/Detectors/materials/MiniBooNE/MiniBooNE-v1.dat rename to resources/Detectors/MiniBooNE/materials_MiniBooNE-v1.dat diff --git a/resources/Detectors/densities/README b/resources/Detectors/README_densities similarity index 100% rename from resources/Detectors/densities/README rename to resources/Detectors/README_densities diff --git a/resources/Detectors/materials/README b/resources/Detectors/README_materials similarity index 100% rename from resources/Detectors/materials/README rename to resources/Detectors/README_materials From 8a39ee4946c8f89953a2cf522fab8a3c8ed94172 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Thu, 29 Aug 2024 21:36:53 -0600 Subject: [PATCH 24/85] Move detector files into subfolders --- .../ATLAS/{densities_ATLAS-v1.dat => ATLAS-v1/densities.dat} | 0 .../ATLAS/{materials_ATLAS-v1.dat => ATLAS-v1/materials.dat} | 0 .../Detectors/CCM/{densities_CCM-v1.dat => CCM-v1/densities.dat} | 0 .../Detectors/CCM/{materials_CCM-v1.dat => CCM-v1/materials.dat} | 0 .../DUNEFD/{densities_DUNEFD-v1.dat => DUNEFD-v1/densities.dat} | 0 .../DUNEFD/{materials_DUNEFD-v1.dat => DUNEFD-v1/materials.dat} | 0 .../HyperK/{densities_HyperK-v1.dat => HyperK-v1/densities.dat} | 0 .../HyperK/{materials_HyperK-v1.dat => HyperK-v1/materials.dat} | 0 .../{densities_IceCube-v1.dat => IceCube-v1/densities.dat} | 0 .../{materials_IceCube-v1.dat => IceCube-v1/materials.dat} | 0 .../{densities_MINERvA-v1.dat => MINERvA-v1/densities.dat} | 0 .../{materials_MINERvA-v1.dat => MINERvA-v1/materials.dat} | 0 .../{densities_MiniBooNE-v1.dat => MiniBooNE-v1/densities.dat} | 0 .../{materials_MiniBooNE-v1.dat => MiniBooNE-v1/materials.dat} | 0 14 files changed, 0 insertions(+), 0 deletions(-) rename resources/Detectors/ATLAS/{densities_ATLAS-v1.dat => ATLAS-v1/densities.dat} (100%) rename resources/Detectors/ATLAS/{materials_ATLAS-v1.dat => ATLAS-v1/materials.dat} (100%) rename resources/Detectors/CCM/{densities_CCM-v1.dat => CCM-v1/densities.dat} (100%) rename resources/Detectors/CCM/{materials_CCM-v1.dat => CCM-v1/materials.dat} (100%) rename resources/Detectors/DUNEFD/{densities_DUNEFD-v1.dat => DUNEFD-v1/densities.dat} (100%) rename resources/Detectors/DUNEFD/{materials_DUNEFD-v1.dat => DUNEFD-v1/materials.dat} (100%) rename resources/Detectors/HyperK/{densities_HyperK-v1.dat => HyperK-v1/densities.dat} (100%) rename resources/Detectors/HyperK/{materials_HyperK-v1.dat => HyperK-v1/materials.dat} (100%) rename resources/Detectors/IceCube/{densities_IceCube-v1.dat => IceCube-v1/densities.dat} (100%) rename resources/Detectors/IceCube/{materials_IceCube-v1.dat => IceCube-v1/materials.dat} (100%) rename resources/Detectors/MINERvA/{densities_MINERvA-v1.dat => MINERvA-v1/densities.dat} (100%) rename resources/Detectors/MINERvA/{materials_MINERvA-v1.dat => MINERvA-v1/materials.dat} (100%) rename resources/Detectors/MiniBooNE/{densities_MiniBooNE-v1.dat => MiniBooNE-v1/densities.dat} (100%) rename resources/Detectors/MiniBooNE/{materials_MiniBooNE-v1.dat => MiniBooNE-v1/materials.dat} (100%) diff --git a/resources/Detectors/ATLAS/densities_ATLAS-v1.dat b/resources/Detectors/ATLAS/ATLAS-v1/densities.dat similarity index 100% rename from resources/Detectors/ATLAS/densities_ATLAS-v1.dat rename to resources/Detectors/ATLAS/ATLAS-v1/densities.dat diff --git a/resources/Detectors/ATLAS/materials_ATLAS-v1.dat b/resources/Detectors/ATLAS/ATLAS-v1/materials.dat similarity index 100% rename from resources/Detectors/ATLAS/materials_ATLAS-v1.dat rename to resources/Detectors/ATLAS/ATLAS-v1/materials.dat diff --git a/resources/Detectors/CCM/densities_CCM-v1.dat b/resources/Detectors/CCM/CCM-v1/densities.dat similarity index 100% rename from resources/Detectors/CCM/densities_CCM-v1.dat rename to resources/Detectors/CCM/CCM-v1/densities.dat diff --git a/resources/Detectors/CCM/materials_CCM-v1.dat b/resources/Detectors/CCM/CCM-v1/materials.dat similarity index 100% rename from resources/Detectors/CCM/materials_CCM-v1.dat rename to resources/Detectors/CCM/CCM-v1/materials.dat diff --git a/resources/Detectors/DUNEFD/densities_DUNEFD-v1.dat b/resources/Detectors/DUNEFD/DUNEFD-v1/densities.dat similarity index 100% rename from resources/Detectors/DUNEFD/densities_DUNEFD-v1.dat rename to resources/Detectors/DUNEFD/DUNEFD-v1/densities.dat diff --git a/resources/Detectors/DUNEFD/materials_DUNEFD-v1.dat b/resources/Detectors/DUNEFD/DUNEFD-v1/materials.dat similarity index 100% rename from resources/Detectors/DUNEFD/materials_DUNEFD-v1.dat rename to resources/Detectors/DUNEFD/DUNEFD-v1/materials.dat diff --git a/resources/Detectors/HyperK/densities_HyperK-v1.dat b/resources/Detectors/HyperK/HyperK-v1/densities.dat similarity index 100% rename from resources/Detectors/HyperK/densities_HyperK-v1.dat rename to resources/Detectors/HyperK/HyperK-v1/densities.dat diff --git a/resources/Detectors/HyperK/materials_HyperK-v1.dat b/resources/Detectors/HyperK/HyperK-v1/materials.dat similarity index 100% rename from resources/Detectors/HyperK/materials_HyperK-v1.dat rename to resources/Detectors/HyperK/HyperK-v1/materials.dat diff --git a/resources/Detectors/IceCube/densities_IceCube-v1.dat b/resources/Detectors/IceCube/IceCube-v1/densities.dat similarity index 100% rename from resources/Detectors/IceCube/densities_IceCube-v1.dat rename to resources/Detectors/IceCube/IceCube-v1/densities.dat diff --git a/resources/Detectors/IceCube/materials_IceCube-v1.dat b/resources/Detectors/IceCube/IceCube-v1/materials.dat similarity index 100% rename from resources/Detectors/IceCube/materials_IceCube-v1.dat rename to resources/Detectors/IceCube/IceCube-v1/materials.dat diff --git a/resources/Detectors/MINERvA/densities_MINERvA-v1.dat b/resources/Detectors/MINERvA/MINERvA-v1/densities.dat similarity index 100% rename from resources/Detectors/MINERvA/densities_MINERvA-v1.dat rename to resources/Detectors/MINERvA/MINERvA-v1/densities.dat diff --git a/resources/Detectors/MINERvA/materials_MINERvA-v1.dat b/resources/Detectors/MINERvA/MINERvA-v1/materials.dat similarity index 100% rename from resources/Detectors/MINERvA/materials_MINERvA-v1.dat rename to resources/Detectors/MINERvA/MINERvA-v1/materials.dat diff --git a/resources/Detectors/MiniBooNE/densities_MiniBooNE-v1.dat b/resources/Detectors/MiniBooNE/MiniBooNE-v1/densities.dat similarity index 100% rename from resources/Detectors/MiniBooNE/densities_MiniBooNE-v1.dat rename to resources/Detectors/MiniBooNE/MiniBooNE-v1/densities.dat diff --git a/resources/Detectors/MiniBooNE/materials_MiniBooNE-v1.dat b/resources/Detectors/MiniBooNE/MiniBooNE-v1/materials.dat similarity index 100% rename from resources/Detectors/MiniBooNE/materials_MiniBooNE-v1.dat rename to resources/Detectors/MiniBooNE/MiniBooNE-v1/materials.dat From 6c7d3465011d8d81dde10fb5df42e696a2e8e47c Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 30 Aug 2024 13:52:46 -0600 Subject: [PATCH 25/85] Add load_* methods to siren.utilities --- python/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/python/__init__.py b/python/__init__.py index 14689b726..411d6aa37 100644 --- a/python/__init__.py +++ b/python/__init__.py @@ -25,6 +25,8 @@ utilities.get_processes_model_path = _util.get_processes_model_path utilities.get_flux_model_path = _util.get_flux_model_path utilities.load_flux = _util.load_flux +utilities.load_detector = _util.load_detector +utilities.load_processes = _util.load_processes def darknews_version(): try: From 049325538a3d709bce3aea48947393bbf9905144 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 30 Aug 2024 13:53:55 -0600 Subject: [PATCH 26/85] Refactor model path search to just handle folders. Add special load_ function for the detector models --- python/_util.py | 130 +++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 129 insertions(+), 1 deletion(-) diff --git a/python/_util.py b/python/_util.py index 40d4db571..131ef679f 100644 --- a/python/_util.py +++ b/python/_util.py @@ -464,6 +464,8 @@ def _find_model_folder_and_file(base_dir, model_name, must_exist, specific_file= specific_file_path = os.path.join(base_dir, name, specific_file) if os.path.isfile(specific_file_path): return name, True, specific_file_path + else: + return name, True, None else: return name, True, None else: @@ -557,6 +559,108 @@ def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exi return os.path.join(base_dir, model_name, model_file_name) +def _get_model_folder(base_dir, model_name, must_exist): + model_names = [ + f for f in os.listdir(base_dir) if os.path.isdir(os.path.join(base_dir, f)) + ] + + exact_model_names = [f for f in model_names if f.lower() == model_name.lower()] + + if len(exact_model_names) == 0: + model_names = [f for f in model_names if f.lower().startswith(model_name.lower())] + else: + model_names = exact_model_names + + if len(model_names) == 0 and must_exist: + raise ValueError(f"No model folders found for {model_name}\nSearched in {base_dir}") + elif len(model_names) == 0 and not must_exist: + return model_name, False + elif len(model_names) == 1: + return model_names[0], True + else: + raise ValueError(f"Multiple directories found for {model_name}\nSearched in {base_dir}") + +def _get_model_subfolders(base_dir, model_regex): + model_subfolders = [ + f for f in os.listdir(base_dir) if os.path.isdir(os.path.join(base_dir, f)) + ] + model_subfolders = [ + f for f in model_subfolders if model_regex.match(f) is not None + ] + return model_subfolders + + +def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exist=True, specific_file=None): + _model_regex = re.compile( + r"^\s*" + _MODEL_PATTERN + ("" if suffix is None else r"(?:" + suffix + r")?") + r"\s*$", + re.VERBOSE | re.IGNORECASE, + ) + suffix = "" if suffix is None else suffix + + resources_dir = resource_package_dir() + base_dir = _get_base_directory(resources_dir, prefix) + + d = _model_regex.match(model_name) + if d is None: + raise ValueError(f"Invalid model name: {model_name}") + d = d.groupdict() + model_search_name, version = d["model_name"], d["version"] + + if version is not None: + version = normalize_version(version) + + found_model_name, folder_exists = _get_model_folder(base_dir, model_search_name, must_exist) + + model_dir = os.path.join(base_dir, found_model_name) + + if not must_exist and not folder_exists: + if version is None: + version = "v1" + + model_dir = os.path.join(model_dir, f"{found_model_name}-v{version}") + return model_dir + + + model_subfolders = _get_model_subfolders(model_dir, _model_regex) + + if len(model_subfolders) == 0: + if must_exist: + raise ValueError(f"No model folders found for {model_search_name}\nSearched in {model_dir}") + else: + if version is None: + version = "v1" + + model_dir = os.path.join(model_dir, f"{found_model_name}-v{version}") + return model_dir + + models_and_versions = [] + for f in model_subfolders: + d = _model_regex.match(f).groupdict() + if d["version"] is not None: + models_and_versions.append((f, normalize_version(d["version"]))) + + matching_models = [(m, v) for m, v in models_and_versions if v == version] + + if len(matching_models) == 1: + model_dir = os.path.join(model_dir, matching_models[0][0]) + return model_dir + elif len(matching_models) > 1: + raise ValueError(f"Multiple directories found for {model_search_name} with version {version}\nSearched in {model_dir}") + + top_level_has_specific_file = specific_file is not None and os.path.isfile(os.path.join(model_dir, specific_file)) + + if top_level_has_specific_file: + return model_dir + + if len(matching_models) == 0: + if must_exist and version is not None: + raise ValueError(f"No model folders found for {model_search_name} with version {version}\nSearched in {model_dir}") + + found_model_subfolder, subfolder_version = max(models_and_versions, key=lambda x: tokenize_version(x[1])) + + return os.path.join(model_dir, found_model_subfolder) + + def get_detector_model_file_path(model_name, must_exist=True): return _get_model_path(model_name, prefix="Detectors/densities", suffix=".dat", is_file=True, must_exist=must_exist) @@ -604,7 +708,31 @@ def load_flux(model_name, *args, **kwargs): def load_detector(model_name, *args, **kwargs): - return load_resource("detector", model_name, *args, **kwargs) + resource_type = "detector" + resource_name = model_name + folder = _resource_folder_by_name[resource_type] + specific_file = f"{resource_type}.py" + + abs_dir = _get_model_path(resource_name, prefix=folder, is_file=False, must_exist=True, specific_file=specific_file) + + script_fname = os.path.join(abs_dir, f"{resource_type}.py") + if os.path.isfile(script_fname): + resource_module = load_module(f"siren-{resource_type}-{resource_name}", script_fname, persist=False) + loader = getattr(resource_module, f"load_{resource_type}") + resource = loader(*args, **kwargs) + return resource + + densities_fname = os.path.join(abs_dir, "densities.dat") + materials_fname = os.path.join(abs_dir, "materials.dat") + + if os.path.isfile(densities_fname) and os.path.isfile(materials_fname): + from . import detector as _detector + detector_model = _detector.DetectorModel() + detector_model.LoadMaterialModel(materials_fname) + detector_model.LoadDetectorModel(densities_fname) + return detector_model + + raise ValueError("Could not find detector loading script \"{script_fname}\" or densities and materials files \"{densities_fname}\", \"materials_fname\"") def load_processes(model_name, *args, **kwargs): From b59d9d6270aeda31436c54830d58383f37988dee Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 30 Aug 2024 15:48:48 -0600 Subject: [PATCH 27/85] Need to defined is_configured ahead of time --- resources/Processes/DarkNewsTables/DarkNewsCrossSection.py | 1 + 1 file changed, 1 insertion(+) diff --git a/resources/Processes/DarkNewsTables/DarkNewsCrossSection.py b/resources/Processes/DarkNewsTables/DarkNewsCrossSection.py index dfb5dc482..fbc4476c1 100644 --- a/resources/Processes/DarkNewsTables/DarkNewsCrossSection.py +++ b/resources/Processes/DarkNewsTables/DarkNewsCrossSection.py @@ -31,6 +31,7 @@ def __init__( ): DarkNewsCrossSection.__init__(self) # C++ constructor + self.is_configured = False self.ups_case = ups_case self.tolerance = tolerance self.interp_tolerance = interp_tolerance From 8cb75a59258b44db3d77d8173025aa3114649634 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 30 Aug 2024 15:51:37 -0600 Subject: [PATCH 28/85] Pass nuclear targets to ModelContainer. Typo "s" --- resources/Processes/DarkNewsTables/processes.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/resources/Processes/DarkNewsTables/processes.py b/resources/Processes/DarkNewsTables/processes.py index ff0ce8d66..d5bcce35b 100644 --- a/resources/Processes/DarkNewsTables/processes.py +++ b/resources/Processes/DarkNewsTables/processes.py @@ -207,7 +207,7 @@ def attempt_to_load_cross_section( break elif p == "normal": try: - cross_sections = load_cross_section( + cross_section = load_cross_section( models, ups_key, tolerance=tolerance, @@ -433,6 +433,7 @@ def load_processes( table_dir = os.path.join(base_path, "Dipole_M%2.2e_mu%2.2e" % (m4, mu_tr_mu4)) models = ModelContainer( + nuclear_targets=nuclear_targets, m4=m4, mu_tr_mu4=mu_tr_mu4, UD4=UD4, From a271cff4e4f01af91f75dfa0c7ca85756d98f7ae Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 30 Aug 2024 16:47:33 -0600 Subject: [PATCH 29/85] Inherit from a common Interaction class to enable a more concise constructor --- .../interactions/private/CrossSection.cxx | 2 +- .../private/InteractionCollection.cxx | 18 ++++++++++++++ .../public/SIREN/interactions/CrossSection.h | 5 +++- .../public/SIREN/interactions/Decay.h | 5 +++- .../public/SIREN/interactions/Interaction.h | 24 +++++++++++++++++++ .../interactions/InteractionCollection.h | 1 + 6 files changed, 52 insertions(+), 3 deletions(-) create mode 100644 projects/interactions/public/SIREN/interactions/Interaction.h diff --git a/projects/interactions/private/CrossSection.cxx b/projects/interactions/private/CrossSection.cxx index dfcbf49f6..2d0a3fdac 100644 --- a/projects/interactions/private/CrossSection.cxx +++ b/projects/interactions/private/CrossSection.cxx @@ -31,4 +31,4 @@ bool CrossSection::operator==(CrossSection const & other) const { } } // namespace interactions -} // namespace siren \ No newline at end of file +} // namespace siren diff --git a/projects/interactions/private/InteractionCollection.cxx b/projects/interactions/private/InteractionCollection.cxx index 0f68772ba..9a3464a73 100644 --- a/projects/interactions/private/InteractionCollection.cxx +++ b/projects/interactions/private/InteractionCollection.cxx @@ -7,6 +7,7 @@ #include // for vector #include // for pair +#include "SIREN/interactions/Interaction.h" // for Interaction #include "SIREN/interactions/CrossSection.h" // for CrossSe... #include "SIREN/interactions/Decay.h" // for Decay #include "SIREN/dataclasses/InteractionRecord.h" // for Interac... @@ -60,6 +61,23 @@ InteractionCollection::InteractionCollection(siren::dataclasses::ParticleType pr InitializeTargetTypes(); } +InteractionCollection::InteractionCollection(siren::dataclasses::ParticleType primary_type, std::vector> interactions) : primary_type(primary_type) { + for(auto interaction : interactions) { + std::shared_ptr xs = std::dynamic_pointer_cast(interaction); + if(xs) { + cross_sections.push_back(xs); + } else { + std::shared_ptr dec = std::dynamic_pointer_cast(interaction); + if(dec) { + decays.push_back(dec); + } else { + throw std::runtime_error("InteractionCollection: Interaction is neither a CrossSection nor a Decay"); + } + } + } + InitializeTargetTypes(); +} + bool InteractionCollection::operator==(InteractionCollection const & other) const { return std::tie(primary_type, target_types, cross_sections, decays) diff --git a/projects/interactions/public/SIREN/interactions/CrossSection.h b/projects/interactions/public/SIREN/interactions/CrossSection.h index 57ef742ea..0512218c9 100644 --- a/projects/interactions/public/SIREN/interactions/CrossSection.h +++ b/projects/interactions/public/SIREN/interactions/CrossSection.h @@ -17,6 +17,7 @@ #include "SIREN/dataclasses/Particle.h" // for Particle #include "SIREN/dataclasses/InteractionSignature.h" // for InteractionSignature #include "SIREN/utilities/Random.h" // for SIREN_random +#include "SIREN/interactions/Interaction.h" // for Interaction namespace siren { namespace dataclasses { class InteractionRecord; } } namespace siren { namespace dataclasses { class CrossSectionDistributionRecord; } } @@ -26,7 +27,7 @@ namespace siren { namespace utilities { class SIREN_random; } } namespace siren { namespace interactions { -class CrossSection { +class CrossSection : public Interaction { friend cereal::access; private: void SampleFinalState(dataclasses::InteractionRecord &, std::shared_ptr) const; @@ -59,6 +60,8 @@ friend cereal::access; } // namespace siren CEREAL_CLASS_VERSION(siren::interactions::CrossSection, 0); +CEREAL_REGISTER_TYPE(siren::interactions::CrossSection); +CEREAL_REGISTER_POLYMORPHIC_RELATION(siren::interactions::Interaction, siren::interactions::CrossSection); #endif // SIREN_CrossSection_H diff --git a/projects/interactions/public/SIREN/interactions/Decay.h b/projects/interactions/public/SIREN/interactions/Decay.h index 3c3512664..e547a4051 100644 --- a/projects/interactions/public/SIREN/interactions/Decay.h +++ b/projects/interactions/public/SIREN/interactions/Decay.h @@ -17,6 +17,7 @@ #include "SIREN/dataclasses/Particle.h" // for Particle #include "SIREN/dataclasses/InteractionSignature.h" // for InteractionSignature #include "SIREN/utilities/Random.h" // for SIREN_random +#include "SIREN/interactions/Interaction.h" // for Interaction namespace siren { namespace dataclasses { class InteractionRecord; } } namespace siren { namespace dataclasses { class CrossSectionDistributionRecord; } } @@ -26,7 +27,7 @@ namespace siren { namespace utilities { class SIREN_random; } } namespace siren { namespace interactions { -class Decay { +class Decay : public Interaction { friend cereal::access; private: public: @@ -56,5 +57,7 @@ friend cereal::access; } // namespace siren CEREAL_CLASS_VERSION(siren::interactions::Decay, 0); +CEREAL_REGISTER_TYPE(siren::interactions::Decay); +CEREAL_REGISTER_POLYMORPHIC_RELATION(siren::interactions::Interaction, siren::interactions::Decay); #endif // SIREN_Decay_H diff --git a/projects/interactions/public/SIREN/interactions/Interaction.h b/projects/interactions/public/SIREN/interactions/Interaction.h new file mode 100644 index 000000000..042a6d0a8 --- /dev/null +++ b/projects/interactions/public/SIREN/interactions/Interaction.h @@ -0,0 +1,24 @@ +#pragma once +#ifndef SIREN_Interaction_H +#define SIREN_Interaction_H + +#include +#include +#include +#include +#include + +namespace siren { +namespace interactions { + +class Interaction { +public: + virtual ~Interaction() = default; +}; + +}; +}; + +CEREAL_CLASS_VERSION(siren::interactions::Interaction, 0); + +#endif // SIREN_Interaction_H diff --git a/projects/interactions/public/SIREN/interactions/InteractionCollection.h b/projects/interactions/public/SIREN/interactions/InteractionCollection.h index 3a8e8f80b..243b41142 100644 --- a/projects/interactions/public/SIREN/interactions/InteractionCollection.h +++ b/projects/interactions/public/SIREN/interactions/InteractionCollection.h @@ -48,6 +48,7 @@ class InteractionCollection { InteractionCollection(siren::dataclasses::ParticleType primary_type, std::vector> cross_sections); InteractionCollection(siren::dataclasses::ParticleType primary_type, std::vector> decays); InteractionCollection(siren::dataclasses::ParticleType primary_type, std::vector> cross_sections, std::vector> decays); + InteractionCollection(siren::dataclasses::ParticleType primary_type, std::vector> interactions); bool operator==(InteractionCollection const & other) const; std::vector> const & GetCrossSections() const {return cross_sections;} std::vector> const & GetDecays() const {return decays;} From a2a3dbb21b4e682a3a7f28d820ffcd7ea6be6773 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sat, 31 Aug 2024 19:02:20 -0600 Subject: [PATCH 30/85] load_process returns maps from particle type to interactions --- .../Processes/DarkNewsTables/processes.py | 35 ++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/resources/Processes/DarkNewsTables/processes.py b/resources/Processes/DarkNewsTables/processes.py index d5bcce35b..1a57ca933 100644 --- a/resources/Processes/DarkNewsTables/processes.py +++ b/resources/Processes/DarkNewsTables/processes.py @@ -1,6 +1,7 @@ import os from typing import Tuple, List, Any, Optional import siren +import collections base_path = os.path.dirname(os.path.abspath(__file__)) logger_file = os.path.join(base_path, "logger.py") @@ -370,6 +371,17 @@ def load_decays( return decays +# This class is a hacky workaround for an issue with the python reference counting of classes derived +# from a pybind11 trampoline class i.e. python cross-section classes and python decay classes that +# inherit from siren.interactions.CrossSection or siren.interactions.Decay. If these python classes +# are passed to the InteractionCollection constructor, but a python-side reference to them is not +# maintained, then their python side state/memory will be destroyed/deallocated. This class maintains +# a python-side reference to all PyDarkNewsCrossSection and PyDarkNewsDecay instances created by +# load_processes(...) to avoid this issue +class Holder: + holders = [] + def __init__(self): + Holder.holders.append(self) def load_processes( primary_type: Optional[Any] = None, @@ -468,5 +480,26 @@ def load_processes( for cross_section in cross_sections: cross_section.FillInterpolationTables(Emax=Emax) - return cross_sections + decays + primary_processes = collections.defaultdict(list) + # Loop over available cross sections and save those which match primary type + for cross_section in cross_sections: + if primary_type == siren.dataclasses.Particle.ParticleType( + cross_section.ups_case.nu_projectile.pdgid + ): + primary_processes[primary_type].append(cross_section) + + secondary_processes = collections.defaultdict(list) + # Loop over available decays, group by parent type + for decay in decays: + secondary_type = siren.dataclasses.Particle.ParticleType( + decay.dec_case.nu_parent.pdgid + ) + secondary_processes[secondary_type].append(decay) + + + holder = Holder() + holder.primary_processes = primary_processes + holder.secondary_processes = secondary_processes + + return dict(primary_processes), dict(secondary_processes) From 5492b695706f47c0a9edfe1726b645689ec1f533 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sat, 31 Aug 2024 19:41:57 -0600 Subject: [PATCH 31/85] Loader for CSMS splines --- .../CSMSDISSplines-v1.0/processes.py | 109 ++++++++++++++++++ 1 file changed, 109 insertions(+) create mode 100644 resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/processes.py diff --git a/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/processes.py b/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/processes.py new file mode 100644 index 000000000..394153550 --- /dev/null +++ b/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/processes.py @@ -0,0 +1,109 @@ +import os +from typing import Tuple, List, Any, Optional +import siren +import collections + +base_path = os.path.dirname(os.path.abspath(__file__)) + +neutrinos = [ + siren.dataclasses.Particle.ParticleType.NuE, + siren.dataclasses.Particle.ParticleType.NuMu, + siren.dataclasses.Particle.ParticleType.NuTau, +] +antineutrinos = [ + siren.dataclasses.Particle.ParticleType.NuEBar, + siren.dataclasses.Particle.ParticleType.NuMuBar, + siren.dataclasses.Particle.ParticleType.NuTauBar, +] +processes = ["CC", "NC"] + + +def _get_primary_types(primary_types): + if primary_types is None: + primary_types = [ + siren.dataclasses.Particle.ParticleType.NuE, + siren.dataclasses.Particle.ParticleType.NuMu, + siren.dataclasses.Particle.ParticleType.NuTau, + siren.dataclasses.Particle.ParticleType.NuEBar, + siren.dataclasses.Particle.ParticleType.NuMuBar, + siren.dataclasses.Particle.ParticleType.NuTauBar, + ] + + supported_primaries = neutrinos + antineutrinos + for i, p in enumerate(primary_types): + if p not in supported_primaries: + raise ValueError(f"primary_types[{i}] \"{p}\" not supported") + + if len(primary_types) == 0: + print("Warning: len(primary_types) == 0") + + return primary_types + +def _get_isoscalar(isoscalar): + if isoscalar is None: + isoscalar = True + + if not isoscalar: + raise ValueError("Non-isoscalar splines are not supported for CSMSDISSplines-v1.0") + + return isoscalar + + +def _get_target_types(isoscalar, target_types): + if target_types is None: + if isoscalar: + target_types = [siren.dataclasses.Particle.ParticleType.Nucleon] + else: + target_types = [siren.dataclasses.Particle.ParticleType.PPlus, siren.dataclasses.Particle.ParticleType.Neutron] + + if len(target_types) == 0: + print("Warning: len(target_types) == 0") + + return target_types + +def _get_process_types(process_types): + if process_types is None: + process_types = ["CC", "NC"] + + for i, p in enumerate(process_types): + if p not in processes: + raise ValueError(f"process_types[{i}] \"{p}\" not supported") + + if len(process_types) == 0: + print("Warning: len(process_types) == 0") + + return process_types + + +def load_processes( + primary_types: Optional[List[siren.dataclasses.Particle.ParticleType]] = None, + target_types: Optional[List[siren.dataclasses.Particle.ParticleType]] = None, + isoscalar: Optional[bool] = None, + process_types: Optional[List[str]] = None, + ): + + primary_types = _get_primary_types(primary_types) + isoscalar = _get_isoscalar(isoscalar) + target_types = _get_target_types(isoscalar, target_types) + process_types = _get_process_types(process_types) + + neutrino_types = [t for t in primary_types if t in neutrinos] + antineutrino_types = [t for t in primary_types if t in antineutrinos] + + primary_processes = [] + primary_processes_dict = collections.defaultdict(list) + + for process_type in process_types: + for primaries, nunubar in [[neutrinos, "nu"], [antineutrinos, "nubar"]]: + if isoscalar: + dxs_file = os.path.join(base_path, f"dsdxdy_{nunubar}_{process_type}_iso.fits") + xs_file = os.path.join(base_path, f"sigma_{nunubar}_{process_type}_iso.fits") + xs = siren.interactions.DISFromSpline(dxs_file, xs_file, primaries, target_types, "m") + primary_processes.append(xs) + for primary_type in primaries: + primary_processes_dict[primary_type].append(xs) + else: + pass + + return dict(primary_processes_dict), {} + From ea69003813403ccb2268620996bb14d44c250d92 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sun, 1 Sep 2024 18:00:25 -0600 Subject: [PATCH 32/85] Move resources --- .../ATLAS/ATLAS-v1/densities.dat | 0 .../ATLAS/ATLAS-v1/materials.dat | 0 .../CCM/CCM-v1/densities.dat | 0 .../CCM/CCM-v1/materials.dat | 0 .../CCM/densities_CCM-v2.dat | 0 .../CCM/materials_CCM-v2.dat | 0 .../DUNEFD/DUNEFD-v1/densities.dat | 0 .../DUNEFD/DUNEFD-v1/materials.dat | 0 .../HyperK/HyperK-v1/densities.dat | 0 .../HyperK/HyperK-v1/materials.dat | 0 .../IceCube/IceCube-v1/densities.dat | 0 .../IceCube/IceCube-v1/materials.dat | 0 .../MINERvA/MINERvA-v1/densities.dat | 0 .../MINERvA/MINERvA-v1/materials.dat | 0 .../MiniBooNE/MiniBooNE-v1/densities.dat | 0 .../MiniBooNE/MiniBooNE-v1/materials.dat | 0 resources/{Detectors => detectors}/README_densities | 0 resources/{Detectors => detectors}/README_materials | 0 .../AdditionalPaperPlots/PaperPlots.ipynb | 0 .../{Examples => examples}/Example1/DIS_ATLAS.py | 0 .../{Examples => examples}/Example1/DIS_DUNE.py | 0 .../{Examples => examples}/Example1/DIS_IceCube.py | 0 .../Example1/PaperPlots.ipynb | 0 .../Example2/DipolePortal_CCM.py | 0 .../Example2/DipolePortal_MINERvA.py | 0 .../Example2/DipolePortal_MiniBooNE.py | 0 .../Example2/DipolePortal_ND280UPGRD.py | 0 .../Example2/PaperPlots.ipynb | 0 resources/{Examples => examples}/figures.mplstyle | 0 .../legacy_examples/compiling_c_example.md | 0 .../legacy_examples/convert_to_i3.py | 0 .../legacy_examples/inject_muons.cpp | 0 .../legacy_examples/inject_muons.py | 0 .../{Fluxes => fluxes}/BNB/BNB-v1.0/BNB_FHC.dat | 0 .../{Fluxes => fluxes}/BNB/BNB-v1.0/BNB_RHC.dat | 0 resources/{Fluxes => fluxes}/BNB/BNB-v1.0/flux.py | 0 .../dN_dE_SNe_2n_D1_0_s20_t100d_NuMu_d10kpc.txt | 0 .../{Fluxes => fluxes}/HE_SN/HE_SN-v1.0/flux.py | 0 .../NUMI/NUMI-v1.0/NUMI_FHC_LE.dat | 0 .../NUMI/NUMI-v1.0/NUMI_FHC_ME.dat | 0 .../NUMI/NUMI-v1.0/NUMI_FHC_ME_unofficial.dat | 0 .../NUMI/NUMI-v1.0/NUMI_RHC_LE.dat | 0 .../NUMI/NUMI-v1.0/NUMI_RHC_ME.dat | 0 .../NUMI/NUMI-v1.0/NUMI_RHC_ME_unofficial.dat | 0 resources/{Fluxes => fluxes}/NUMI/NUMI-v1.0/flux.py | 0 .../CSMSDISSplines-v1.0/dsdxdy_nu_CC_iso.fits | Bin .../CSMSDISSplines-v1.0/dsdxdy_nu_NC_iso.fits | Bin .../CSMSDISSplines-v1.0/dsdxdy_nubar_CC_iso.fits | Bin .../CSMSDISSplines-v1.0/dsdxdy_nubar_NC_iso.fits | Bin .../CSMSDISSplines/CSMSDISSplines-v1.0/processes.py | 0 .../CSMSDISSplines-v1.0/sigma_nu_CC_iso.fits | Bin .../CSMSDISSplines-v1.0/sigma_nu_NC_iso.fits | Bin .../CSMSDISSplines-v1.0/sigma_nubar_CC_iso.fits | Bin .../CSMSDISSplines-v1.0/sigma_nubar_NC_iso.fits | Bin .../DarkNewsTables/DarkNewsCrossSection.py | 0 .../DarkNewsTables/DarkNewsDecay.py | 0 .../DarkNewsTables/README.md | 0 .../DarkNewsTables/logger.py | 0 .../DarkNewsTables/processes.py | 0 59 files changed, 0 insertions(+), 0 deletions(-) rename resources/{Detectors => detectors}/ATLAS/ATLAS-v1/densities.dat (100%) rename resources/{Detectors => detectors}/ATLAS/ATLAS-v1/materials.dat (100%) rename resources/{Detectors => detectors}/CCM/CCM-v1/densities.dat (100%) rename resources/{Detectors => detectors}/CCM/CCM-v1/materials.dat (100%) rename resources/{Detectors => detectors}/CCM/densities_CCM-v2.dat (100%) rename resources/{Detectors => detectors}/CCM/materials_CCM-v2.dat (100%) rename resources/{Detectors => detectors}/DUNEFD/DUNEFD-v1/densities.dat (100%) rename resources/{Detectors => detectors}/DUNEFD/DUNEFD-v1/materials.dat (100%) rename resources/{Detectors => detectors}/HyperK/HyperK-v1/densities.dat (100%) rename resources/{Detectors => detectors}/HyperK/HyperK-v1/materials.dat (100%) rename resources/{Detectors => detectors}/IceCube/IceCube-v1/densities.dat (100%) rename resources/{Detectors => detectors}/IceCube/IceCube-v1/materials.dat (100%) rename resources/{Detectors => detectors}/MINERvA/MINERvA-v1/densities.dat (100%) rename resources/{Detectors => detectors}/MINERvA/MINERvA-v1/materials.dat (100%) rename resources/{Detectors => detectors}/MiniBooNE/MiniBooNE-v1/densities.dat (100%) rename resources/{Detectors => detectors}/MiniBooNE/MiniBooNE-v1/materials.dat (100%) rename resources/{Detectors => detectors}/README_densities (100%) rename resources/{Detectors => detectors}/README_materials (100%) rename resources/{Examples => examples}/AdditionalPaperPlots/PaperPlots.ipynb (100%) rename resources/{Examples => examples}/Example1/DIS_ATLAS.py (100%) rename resources/{Examples => examples}/Example1/DIS_DUNE.py (100%) rename resources/{Examples => examples}/Example1/DIS_IceCube.py (100%) rename resources/{Examples => examples}/Example1/PaperPlots.ipynb (100%) rename resources/{Examples => examples}/Example2/DipolePortal_CCM.py (100%) rename resources/{Examples => examples}/Example2/DipolePortal_MINERvA.py (100%) rename resources/{Examples => examples}/Example2/DipolePortal_MiniBooNE.py (100%) rename resources/{Examples => examples}/Example2/DipolePortal_ND280UPGRD.py (100%) rename resources/{Examples => examples}/Example2/PaperPlots.ipynb (100%) rename resources/{Examples => examples}/figures.mplstyle (100%) rename resources/{Examples => examples}/legacy_examples/compiling_c_example.md (100%) rename resources/{Examples => examples}/legacy_examples/convert_to_i3.py (100%) rename resources/{Examples => examples}/legacy_examples/inject_muons.cpp (100%) rename resources/{Examples => examples}/legacy_examples/inject_muons.py (100%) rename resources/{Fluxes => fluxes}/BNB/BNB-v1.0/BNB_FHC.dat (100%) rename resources/{Fluxes => fluxes}/BNB/BNB-v1.0/BNB_RHC.dat (100%) rename resources/{Fluxes => fluxes}/BNB/BNB-v1.0/flux.py (100%) rename resources/{Fluxes => fluxes}/HE_SN/HE_SN-v1.0/dN_dE_SNe_2n_D1_0_s20_t100d_NuMu_d10kpc.txt (100%) rename resources/{Fluxes => fluxes}/HE_SN/HE_SN-v1.0/flux.py (100%) rename resources/{Fluxes => fluxes}/NUMI/NUMI-v1.0/NUMI_FHC_LE.dat (100%) rename resources/{Fluxes => fluxes}/NUMI/NUMI-v1.0/NUMI_FHC_ME.dat (100%) rename resources/{Fluxes => fluxes}/NUMI/NUMI-v1.0/NUMI_FHC_ME_unofficial.dat (100%) rename resources/{Fluxes => fluxes}/NUMI/NUMI-v1.0/NUMI_RHC_LE.dat (100%) rename resources/{Fluxes => fluxes}/NUMI/NUMI-v1.0/NUMI_RHC_ME.dat (100%) rename resources/{Fluxes => fluxes}/NUMI/NUMI-v1.0/NUMI_RHC_ME_unofficial.dat (100%) rename resources/{Fluxes => fluxes}/NUMI/NUMI-v1.0/flux.py (100%) rename resources/{Processes => processes}/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_CC_iso.fits (100%) rename resources/{Processes => processes}/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_NC_iso.fits (100%) rename resources/{Processes => processes}/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_CC_iso.fits (100%) rename resources/{Processes => processes}/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_NC_iso.fits (100%) rename resources/{Processes => processes}/CSMSDISSplines/CSMSDISSplines-v1.0/processes.py (100%) rename resources/{Processes => processes}/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_CC_iso.fits (100%) rename resources/{Processes => processes}/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_NC_iso.fits (100%) rename resources/{Processes => processes}/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_CC_iso.fits (100%) rename resources/{Processes => processes}/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_NC_iso.fits (100%) rename resources/{Processes => processes}/DarkNewsTables/DarkNewsCrossSection.py (100%) rename resources/{Processes => processes}/DarkNewsTables/DarkNewsDecay.py (100%) rename resources/{Processes => processes}/DarkNewsTables/README.md (100%) rename resources/{Processes => processes}/DarkNewsTables/logger.py (100%) rename resources/{Processes => processes}/DarkNewsTables/processes.py (100%) diff --git a/resources/Detectors/ATLAS/ATLAS-v1/densities.dat b/resources/detectors/ATLAS/ATLAS-v1/densities.dat similarity index 100% rename from resources/Detectors/ATLAS/ATLAS-v1/densities.dat rename to resources/detectors/ATLAS/ATLAS-v1/densities.dat diff --git a/resources/Detectors/ATLAS/ATLAS-v1/materials.dat b/resources/detectors/ATLAS/ATLAS-v1/materials.dat similarity index 100% rename from resources/Detectors/ATLAS/ATLAS-v1/materials.dat rename to resources/detectors/ATLAS/ATLAS-v1/materials.dat diff --git a/resources/Detectors/CCM/CCM-v1/densities.dat b/resources/detectors/CCM/CCM-v1/densities.dat similarity index 100% rename from resources/Detectors/CCM/CCM-v1/densities.dat rename to resources/detectors/CCM/CCM-v1/densities.dat diff --git a/resources/Detectors/CCM/CCM-v1/materials.dat b/resources/detectors/CCM/CCM-v1/materials.dat similarity index 100% rename from resources/Detectors/CCM/CCM-v1/materials.dat rename to resources/detectors/CCM/CCM-v1/materials.dat diff --git a/resources/Detectors/CCM/densities_CCM-v2.dat b/resources/detectors/CCM/densities_CCM-v2.dat similarity index 100% rename from resources/Detectors/CCM/densities_CCM-v2.dat rename to resources/detectors/CCM/densities_CCM-v2.dat diff --git a/resources/Detectors/CCM/materials_CCM-v2.dat b/resources/detectors/CCM/materials_CCM-v2.dat similarity index 100% rename from resources/Detectors/CCM/materials_CCM-v2.dat rename to resources/detectors/CCM/materials_CCM-v2.dat diff --git a/resources/Detectors/DUNEFD/DUNEFD-v1/densities.dat b/resources/detectors/DUNEFD/DUNEFD-v1/densities.dat similarity index 100% rename from resources/Detectors/DUNEFD/DUNEFD-v1/densities.dat rename to resources/detectors/DUNEFD/DUNEFD-v1/densities.dat diff --git a/resources/Detectors/DUNEFD/DUNEFD-v1/materials.dat b/resources/detectors/DUNEFD/DUNEFD-v1/materials.dat similarity index 100% rename from resources/Detectors/DUNEFD/DUNEFD-v1/materials.dat rename to resources/detectors/DUNEFD/DUNEFD-v1/materials.dat diff --git a/resources/Detectors/HyperK/HyperK-v1/densities.dat b/resources/detectors/HyperK/HyperK-v1/densities.dat similarity index 100% rename from resources/Detectors/HyperK/HyperK-v1/densities.dat rename to resources/detectors/HyperK/HyperK-v1/densities.dat diff --git a/resources/Detectors/HyperK/HyperK-v1/materials.dat b/resources/detectors/HyperK/HyperK-v1/materials.dat similarity index 100% rename from resources/Detectors/HyperK/HyperK-v1/materials.dat rename to resources/detectors/HyperK/HyperK-v1/materials.dat diff --git a/resources/Detectors/IceCube/IceCube-v1/densities.dat b/resources/detectors/IceCube/IceCube-v1/densities.dat similarity index 100% rename from resources/Detectors/IceCube/IceCube-v1/densities.dat rename to resources/detectors/IceCube/IceCube-v1/densities.dat diff --git a/resources/Detectors/IceCube/IceCube-v1/materials.dat b/resources/detectors/IceCube/IceCube-v1/materials.dat similarity index 100% rename from resources/Detectors/IceCube/IceCube-v1/materials.dat rename to resources/detectors/IceCube/IceCube-v1/materials.dat diff --git a/resources/Detectors/MINERvA/MINERvA-v1/densities.dat b/resources/detectors/MINERvA/MINERvA-v1/densities.dat similarity index 100% rename from resources/Detectors/MINERvA/MINERvA-v1/densities.dat rename to resources/detectors/MINERvA/MINERvA-v1/densities.dat diff --git a/resources/Detectors/MINERvA/MINERvA-v1/materials.dat b/resources/detectors/MINERvA/MINERvA-v1/materials.dat similarity index 100% rename from resources/Detectors/MINERvA/MINERvA-v1/materials.dat rename to resources/detectors/MINERvA/MINERvA-v1/materials.dat diff --git a/resources/Detectors/MiniBooNE/MiniBooNE-v1/densities.dat b/resources/detectors/MiniBooNE/MiniBooNE-v1/densities.dat similarity index 100% rename from resources/Detectors/MiniBooNE/MiniBooNE-v1/densities.dat rename to resources/detectors/MiniBooNE/MiniBooNE-v1/densities.dat diff --git a/resources/Detectors/MiniBooNE/MiniBooNE-v1/materials.dat b/resources/detectors/MiniBooNE/MiniBooNE-v1/materials.dat similarity index 100% rename from resources/Detectors/MiniBooNE/MiniBooNE-v1/materials.dat rename to resources/detectors/MiniBooNE/MiniBooNE-v1/materials.dat diff --git a/resources/Detectors/README_densities b/resources/detectors/README_densities similarity index 100% rename from resources/Detectors/README_densities rename to resources/detectors/README_densities diff --git a/resources/Detectors/README_materials b/resources/detectors/README_materials similarity index 100% rename from resources/Detectors/README_materials rename to resources/detectors/README_materials diff --git a/resources/Examples/AdditionalPaperPlots/PaperPlots.ipynb b/resources/examples/AdditionalPaperPlots/PaperPlots.ipynb similarity index 100% rename from resources/Examples/AdditionalPaperPlots/PaperPlots.ipynb rename to resources/examples/AdditionalPaperPlots/PaperPlots.ipynb diff --git a/resources/Examples/Example1/DIS_ATLAS.py b/resources/examples/Example1/DIS_ATLAS.py similarity index 100% rename from resources/Examples/Example1/DIS_ATLAS.py rename to resources/examples/Example1/DIS_ATLAS.py diff --git a/resources/Examples/Example1/DIS_DUNE.py b/resources/examples/Example1/DIS_DUNE.py similarity index 100% rename from resources/Examples/Example1/DIS_DUNE.py rename to resources/examples/Example1/DIS_DUNE.py diff --git a/resources/Examples/Example1/DIS_IceCube.py b/resources/examples/Example1/DIS_IceCube.py similarity index 100% rename from resources/Examples/Example1/DIS_IceCube.py rename to resources/examples/Example1/DIS_IceCube.py diff --git a/resources/Examples/Example1/PaperPlots.ipynb b/resources/examples/Example1/PaperPlots.ipynb similarity index 100% rename from resources/Examples/Example1/PaperPlots.ipynb rename to resources/examples/Example1/PaperPlots.ipynb diff --git a/resources/Examples/Example2/DipolePortal_CCM.py b/resources/examples/Example2/DipolePortal_CCM.py similarity index 100% rename from resources/Examples/Example2/DipolePortal_CCM.py rename to resources/examples/Example2/DipolePortal_CCM.py diff --git a/resources/Examples/Example2/DipolePortal_MINERvA.py b/resources/examples/Example2/DipolePortal_MINERvA.py similarity index 100% rename from resources/Examples/Example2/DipolePortal_MINERvA.py rename to resources/examples/Example2/DipolePortal_MINERvA.py diff --git a/resources/Examples/Example2/DipolePortal_MiniBooNE.py b/resources/examples/Example2/DipolePortal_MiniBooNE.py similarity index 100% rename from resources/Examples/Example2/DipolePortal_MiniBooNE.py rename to resources/examples/Example2/DipolePortal_MiniBooNE.py diff --git a/resources/Examples/Example2/DipolePortal_ND280UPGRD.py b/resources/examples/Example2/DipolePortal_ND280UPGRD.py similarity index 100% rename from resources/Examples/Example2/DipolePortal_ND280UPGRD.py rename to resources/examples/Example2/DipolePortal_ND280UPGRD.py diff --git a/resources/Examples/Example2/PaperPlots.ipynb b/resources/examples/Example2/PaperPlots.ipynb similarity index 100% rename from resources/Examples/Example2/PaperPlots.ipynb rename to resources/examples/Example2/PaperPlots.ipynb diff --git a/resources/Examples/figures.mplstyle b/resources/examples/figures.mplstyle similarity index 100% rename from resources/Examples/figures.mplstyle rename to resources/examples/figures.mplstyle diff --git a/resources/Examples/legacy_examples/compiling_c_example.md b/resources/examples/legacy_examples/compiling_c_example.md similarity index 100% rename from resources/Examples/legacy_examples/compiling_c_example.md rename to resources/examples/legacy_examples/compiling_c_example.md diff --git a/resources/Examples/legacy_examples/convert_to_i3.py b/resources/examples/legacy_examples/convert_to_i3.py similarity index 100% rename from resources/Examples/legacy_examples/convert_to_i3.py rename to resources/examples/legacy_examples/convert_to_i3.py diff --git a/resources/Examples/legacy_examples/inject_muons.cpp b/resources/examples/legacy_examples/inject_muons.cpp similarity index 100% rename from resources/Examples/legacy_examples/inject_muons.cpp rename to resources/examples/legacy_examples/inject_muons.cpp diff --git a/resources/Examples/legacy_examples/inject_muons.py b/resources/examples/legacy_examples/inject_muons.py similarity index 100% rename from resources/Examples/legacy_examples/inject_muons.py rename to resources/examples/legacy_examples/inject_muons.py diff --git a/resources/Fluxes/BNB/BNB-v1.0/BNB_FHC.dat b/resources/fluxes/BNB/BNB-v1.0/BNB_FHC.dat similarity index 100% rename from resources/Fluxes/BNB/BNB-v1.0/BNB_FHC.dat rename to resources/fluxes/BNB/BNB-v1.0/BNB_FHC.dat diff --git a/resources/Fluxes/BNB/BNB-v1.0/BNB_RHC.dat b/resources/fluxes/BNB/BNB-v1.0/BNB_RHC.dat similarity index 100% rename from resources/Fluxes/BNB/BNB-v1.0/BNB_RHC.dat rename to resources/fluxes/BNB/BNB-v1.0/BNB_RHC.dat diff --git a/resources/Fluxes/BNB/BNB-v1.0/flux.py b/resources/fluxes/BNB/BNB-v1.0/flux.py similarity index 100% rename from resources/Fluxes/BNB/BNB-v1.0/flux.py rename to resources/fluxes/BNB/BNB-v1.0/flux.py diff --git a/resources/Fluxes/HE_SN/HE_SN-v1.0/dN_dE_SNe_2n_D1_0_s20_t100d_NuMu_d10kpc.txt b/resources/fluxes/HE_SN/HE_SN-v1.0/dN_dE_SNe_2n_D1_0_s20_t100d_NuMu_d10kpc.txt similarity index 100% rename from resources/Fluxes/HE_SN/HE_SN-v1.0/dN_dE_SNe_2n_D1_0_s20_t100d_NuMu_d10kpc.txt rename to resources/fluxes/HE_SN/HE_SN-v1.0/dN_dE_SNe_2n_D1_0_s20_t100d_NuMu_d10kpc.txt diff --git a/resources/Fluxes/HE_SN/HE_SN-v1.0/flux.py b/resources/fluxes/HE_SN/HE_SN-v1.0/flux.py similarity index 100% rename from resources/Fluxes/HE_SN/HE_SN-v1.0/flux.py rename to resources/fluxes/HE_SN/HE_SN-v1.0/flux.py diff --git a/resources/Fluxes/NUMI/NUMI-v1.0/NUMI_FHC_LE.dat b/resources/fluxes/NUMI/NUMI-v1.0/NUMI_FHC_LE.dat similarity index 100% rename from resources/Fluxes/NUMI/NUMI-v1.0/NUMI_FHC_LE.dat rename to resources/fluxes/NUMI/NUMI-v1.0/NUMI_FHC_LE.dat diff --git a/resources/Fluxes/NUMI/NUMI-v1.0/NUMI_FHC_ME.dat b/resources/fluxes/NUMI/NUMI-v1.0/NUMI_FHC_ME.dat similarity index 100% rename from resources/Fluxes/NUMI/NUMI-v1.0/NUMI_FHC_ME.dat rename to resources/fluxes/NUMI/NUMI-v1.0/NUMI_FHC_ME.dat diff --git a/resources/Fluxes/NUMI/NUMI-v1.0/NUMI_FHC_ME_unofficial.dat b/resources/fluxes/NUMI/NUMI-v1.0/NUMI_FHC_ME_unofficial.dat similarity index 100% rename from resources/Fluxes/NUMI/NUMI-v1.0/NUMI_FHC_ME_unofficial.dat rename to resources/fluxes/NUMI/NUMI-v1.0/NUMI_FHC_ME_unofficial.dat diff --git a/resources/Fluxes/NUMI/NUMI-v1.0/NUMI_RHC_LE.dat b/resources/fluxes/NUMI/NUMI-v1.0/NUMI_RHC_LE.dat similarity index 100% rename from resources/Fluxes/NUMI/NUMI-v1.0/NUMI_RHC_LE.dat rename to resources/fluxes/NUMI/NUMI-v1.0/NUMI_RHC_LE.dat diff --git a/resources/Fluxes/NUMI/NUMI-v1.0/NUMI_RHC_ME.dat b/resources/fluxes/NUMI/NUMI-v1.0/NUMI_RHC_ME.dat similarity index 100% rename from resources/Fluxes/NUMI/NUMI-v1.0/NUMI_RHC_ME.dat rename to resources/fluxes/NUMI/NUMI-v1.0/NUMI_RHC_ME.dat diff --git a/resources/Fluxes/NUMI/NUMI-v1.0/NUMI_RHC_ME_unofficial.dat b/resources/fluxes/NUMI/NUMI-v1.0/NUMI_RHC_ME_unofficial.dat similarity index 100% rename from resources/Fluxes/NUMI/NUMI-v1.0/NUMI_RHC_ME_unofficial.dat rename to resources/fluxes/NUMI/NUMI-v1.0/NUMI_RHC_ME_unofficial.dat diff --git a/resources/Fluxes/NUMI/NUMI-v1.0/flux.py b/resources/fluxes/NUMI/NUMI-v1.0/flux.py similarity index 100% rename from resources/Fluxes/NUMI/NUMI-v1.0/flux.py rename to resources/fluxes/NUMI/NUMI-v1.0/flux.py diff --git a/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_CC_iso.fits b/resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_CC_iso.fits similarity index 100% rename from resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_CC_iso.fits rename to resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_CC_iso.fits diff --git a/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_NC_iso.fits b/resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_NC_iso.fits similarity index 100% rename from resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_NC_iso.fits rename to resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nu_NC_iso.fits diff --git a/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_CC_iso.fits b/resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_CC_iso.fits similarity index 100% rename from resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_CC_iso.fits rename to resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_CC_iso.fits diff --git a/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_NC_iso.fits b/resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_NC_iso.fits similarity index 100% rename from resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_NC_iso.fits rename to resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/dsdxdy_nubar_NC_iso.fits diff --git a/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/processes.py b/resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/processes.py similarity index 100% rename from resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/processes.py rename to resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/processes.py diff --git a/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_CC_iso.fits b/resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_CC_iso.fits similarity index 100% rename from resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_CC_iso.fits rename to resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_CC_iso.fits diff --git a/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_NC_iso.fits b/resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_NC_iso.fits similarity index 100% rename from resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_NC_iso.fits rename to resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nu_NC_iso.fits diff --git a/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_CC_iso.fits b/resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_CC_iso.fits similarity index 100% rename from resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_CC_iso.fits rename to resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_CC_iso.fits diff --git a/resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_NC_iso.fits b/resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_NC_iso.fits similarity index 100% rename from resources/Processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_NC_iso.fits rename to resources/processes/CSMSDISSplines/CSMSDISSplines-v1.0/sigma_nubar_NC_iso.fits diff --git a/resources/Processes/DarkNewsTables/DarkNewsCrossSection.py b/resources/processes/DarkNewsTables/DarkNewsCrossSection.py similarity index 100% rename from resources/Processes/DarkNewsTables/DarkNewsCrossSection.py rename to resources/processes/DarkNewsTables/DarkNewsCrossSection.py diff --git a/resources/Processes/DarkNewsTables/DarkNewsDecay.py b/resources/processes/DarkNewsTables/DarkNewsDecay.py similarity index 100% rename from resources/Processes/DarkNewsTables/DarkNewsDecay.py rename to resources/processes/DarkNewsTables/DarkNewsDecay.py diff --git a/resources/Processes/DarkNewsTables/README.md b/resources/processes/DarkNewsTables/README.md similarity index 100% rename from resources/Processes/DarkNewsTables/README.md rename to resources/processes/DarkNewsTables/README.md diff --git a/resources/Processes/DarkNewsTables/logger.py b/resources/processes/DarkNewsTables/logger.py similarity index 100% rename from resources/Processes/DarkNewsTables/logger.py rename to resources/processes/DarkNewsTables/logger.py diff --git a/resources/Processes/DarkNewsTables/processes.py b/resources/processes/DarkNewsTables/processes.py similarity index 100% rename from resources/Processes/DarkNewsTables/processes.py rename to resources/processes/DarkNewsTables/processes.py From 7e5e5f179d20420317e313128b8a8b760d5c3473 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sun, 1 Sep 2024 18:01:31 -0600 Subject: [PATCH 33/85] un-capitalize folders --- python/_util.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/python/_util.py b/python/_util.py index 131ef679f..7ab1cca90 100644 --- a/python/_util.py +++ b/python/_util.py @@ -662,17 +662,17 @@ def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exi def get_detector_model_file_path(model_name, must_exist=True): - return _get_model_path(model_name, prefix="Detectors/densities", suffix=".dat", is_file=True, must_exist=must_exist) + return _get_model_path(model_name, prefix="detectors/densities", suffix=".dat", is_file=True, must_exist=must_exist) def get_material_model_file_path(model_name, must_exist=True): - return _get_model_path(model_name, prefix="Detectors/materials", suffix=".dat", is_file=True, must_exist=must_exist) + return _get_model_path(model_name, prefix="detectors/materials", suffix=".dat", is_file=True, must_exist=must_exist) _resource_folder_by_name = { - "flux": "Fluxes", - "detector": "Detectors", - "processes": "Processes", + "flux": "fluxes", + "detector": "detectors", + "processes": "processes", } From 55d8dc66a8d055f7d70ac973230ce5a1246c5d42 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 11 Sep 2024 20:09:40 -0600 Subject: [PATCH 34/85] First pass at Injector wrapper --- python/Injector.py | 203 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 203 insertions(+) create mode 100644 python/Injector.py diff --git a/python/Injector.py b/python/Injector.py new file mode 100644 index 000000000..6b0bc233d --- /dev/null +++ b/python/Injector.py @@ -0,0 +1,203 @@ +from . import utilities as _utilities +from . import math as _math +from . import dataclasses as _dataclasses +from . import geometry as _geometry +from . import detector as _detector +from . import interactions as _interactions +from . import distributions as _distributions +from . import injection as _injection + +import collections +from functools import wraps + +from typing import Tuple, List, Dict, Optional, Union +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + import siren + +_Injector = _injection.Injector + +ParticleType = _dataclasses.Particle.ParticleType +CrossSection = _interactions.CrossSection +Decay = _interactions.Decay +Interaction = _interactions.Interaction +DetectorModel = _detector.DetectorModel +SIREN_random = _utilities.SIREN_random +PrimaryInjectionDistribution = _distributions.PrimaryInjectionDistribution +SecondaryInjectionDistribution = _distributions.SecondaryInjectionDistribution +SecondaryInjectionProcess = _injection.SecondaryInjectionProcess + +class Injector: + def __init__( + self, + number_of_events: int, + detector_model: "DetectorModel", + random: "SIREN_random", + primary_interactions: Dict["ParticleType", List[Union["CrossSection", "Decay", "Interaction"]]], + primary_injection_distributions: List["PrimaryInjectionDistribution"], + secondary_interactions: Optional[Dict["ParticleType", List[Union["CrossSection", "Decay", "Interaction"]]]] = None, + secondary_injection_distributions: Optional[Dict["ParticleType", List["SecondaryInjectionDistribution"]]] = None, + ): + self.number_of_events = number_of_events + + self.detector_model = detector_model + + if len(primary_interactions) != 1: + raise ValueError(f"len(primary_interactions) != 1") + + if (secondary_interactions is None) != (secondary_injection_distributions is None): + raise ValueError("Neither or both of secondary_interactions and secondary_injection_distributions must be provided") + + if secondary_interactions is None: + secondary_interactions = dict() + secondary_injection_distributions = dict() + + self.primary_interactions = primary_interactions + self.primary_injection_distributions = primary_injection_distributions + + primary_type, primary_interactions = list(primary_interactions.items())[0] + + self.primary_interaction_collection = _interactions.InteractionCollection( + primary_type, primary_interactions + ) + self.primary_process = _injection.PrimaryInjectionProcess( + primary_type, self.primary_interaction_collection + ) + for dist in primary_injection_distributions: + self.primary_process.AddPrimaryInjectionDistribution(dist) + + + self.secondary_interactions = secondary_interactions + self.secondary_injection_distributions = secondary_injection_distributions + + self.secondary_interaction_collections = [] + self.secondary_processes = [] + for secondary_type, secondary_interactions in secondary_interactions.items(): + secondary_distributions = self.secondary_injection_distributions[secondary_type] + secondary_process = SecondaryInjectionProcess(secondary_type, secondary_interactions) + for dist in secondary_distributions: + secondary_process.AddSecondaryInjectionDistribution(dist) + self.secondary_processes.append(secondary_process) + + self.injector = _injection.Injector( + self.number_of_events, + self.detector_model, + self.primary_process, + self.secondary_processes, + self.random, + ) + + # TODO define wrapper functions that modify the internal state of the python object + @wraps(_Injector.SetPrimaryProcess) + def SetPrimaryProcess(self, primary_process): + # Get the internals first + primary_injection_distributions = primary_process.GetPrimaryInjectionDistributions() + primary_interaction_collection = primary_process.GetInteractionCollection() + primary_interactions = list(primary_interaction_collection.GetCrossSections()) + list(primary_interaction_collection.GetDecays()) + + # Now we can overwite things + self.injector.SetPrimaryProcess(primary_process) + self.primary_process = primary_process + self.primary_injection_distributions = primary_injection_distributions + self.primary_interaction_collection = primary_interaction_collection + self.primary_interactions = {primary_process.primary_type: primary_interactions} + + @wraps(_Injector.SetStoppingCondition) + def SetStoppingCondition(self, stopping_condition): + self.stopping_condition = stopping_condition + self.injector.SetStoppingCondition(stopping_condition) + + @wraps(_Injector.AddSecondaryProcess) + def AddSecondaryProcess(self, secondary_process): + # Update internal state + secondary_type = secondary_process.secondary_type + secondary_distributions = secondary_process.GetSecondaryInjectionDistributions() + secondary_interaction_collection = secondary_process.GetInteractionCollection() + secondary_interactions = list(secondary_interaction_collection.GetCrossSections()) + list(secondary_interaction_collection.GetDecays()) + + # Update class attributes + self.secondary_processes.append(secondary_process) + if secondary_type not in self.secondary_interactions: + self.secondary_interactions[secondary_type] = [] + self.secondary_interactions[secondary_type].extend(secondary_interactions) + if secondary_type not in self.secondary_injection_distributions: + self.secondary_injection_distributions[secondary_type] = [] + self.secondary_injection_distributions[secondary_type].extend(secondary_distributions) + + # Update the underlying C++ object + self.injector.AddSecondaryProcess(secondary_process) + + @wraps(_Injector.GetPrimaryProcess) + def GetPrimaryProcess(self): + return self.primary_process + + @wraps(_Injector.GetSecondaryProcessMap) + def GetSecondaryProcesses(self): + return self.secondary_processes + + @wraps(_Injector.NewRecord) + def NewRecord(self): + return self.injector.NewRecord() + + @wraps(_Injector.SetRandom) + def SetRandom(self, random): + self.injector.SetRandom(random) + + @wraps(_Injector.GenerateEvent) + def GenerateEvent(self): + return self.injector.GenerateEvent() + + @wraps(_Injector.DensityVariables) + def DensityVariables(self): + return self.injector.DensityVariables() + + @wraps(_Injector.Name) + def Name(self): + return self.injector.Name() + + @wraps(_Injector.GetPrimaryInjectionDistributions) + def GetPrimaryInjectionDistributions(self): + return self.primary_injection_distributions + + @wraps(_Injector.GetDetectorModel) + def GetDetectorModel(self): + return self.detector_model + + @wraps(_Injector.GetInteractions) + def GetInteractions(self): + return self.injector.GetInteractions() + + @wraps(_Injector.InjectedEvents) + def InjectedEvents(self): + return self.injector.InjectedEvents() + + @wraps(_Injector.EventsToInject) + def EventsToInject(self): + return self.injector.EventsToInject() + + @wraps(_Injector.ResetInjectedEvents) + def ResetInjectedEvents(self): + self.injector.ResetInjectedEvents() + + @wraps(_Injector.SaveInjector) + def SaveInjector(self, filename): + self.injector.SaveInjector(filename) + + @wraps(_Injector.LoadInjector) + def LoadInjector(self, filename): + self.injector.LoadInjector(filename) + # Update Python object state after loading + self.primary_process = self.injector.GetPrimaryProcess() + self.secondary_processes = self.injector.GetSecondaryProcesses() + self.primary_injection_distributions = self.primary_process.GetPrimaryInjectionDistributions() + self.primary_interaction_collection = self.primary_process.GetInteractionCollection() + self.primary_interactions = {self.primary_process.primary_type: list(self.primary_interaction_collection.GetCrossSections()) + list(self.primary_interaction_collection.GetDecays())} + # Update secondary interactions and distributions + self.secondary_interactions = {} + self.secondary_injection_distributions = {} + for process in self.secondary_processes: + secondary_type = process.secondary_type + self.secondary_interactions[secondary_type] = list(process.GetInteractionCollection().GetCrossSections()) + list(process.GetInteractionCollection().GetDecays()) + self.secondary_injection_distributions[secondary_type] = process.GetSecondaryInjectionDistributions() + From e9443fb9ae40e3ea2b66cc1afeabc506895a27dd Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 11 Sep 2024 20:09:59 -0600 Subject: [PATCH 35/85] Remvoe the holder --- resources/processes/DarkNewsTables/processes.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/resources/processes/DarkNewsTables/processes.py b/resources/processes/DarkNewsTables/processes.py index 1a57ca933..d9e4b802e 100644 --- a/resources/processes/DarkNewsTables/processes.py +++ b/resources/processes/DarkNewsTables/processes.py @@ -497,9 +497,9 @@ def load_processes( secondary_processes[secondary_type].append(decay) - holder = Holder() - holder.primary_processes = primary_processes - holder.secondary_processes = secondary_processes + #holder = Holder() + #holder.primary_processes = primary_processes + #holder.secondary_processes = secondary_processes return dict(primary_processes), dict(secondary_processes) From e01cdca854e3c0510ba3e9f20171aaa91422046a Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 11 Sep 2024 20:10:24 -0600 Subject: [PATCH 36/85] particle type properties for the processes --- projects/injection/private/pybindings/injection.cxx | 3 +++ 1 file changed, 3 insertions(+) diff --git a/projects/injection/private/pybindings/injection.cxx b/projects/injection/private/pybindings/injection.cxx index 044ccd06d..2dfc7bdf4 100644 --- a/projects/injection/private/pybindings/injection.cxx +++ b/projects/injection/private/pybindings/injection.cxx @@ -43,16 +43,19 @@ PYBIND11_MODULE(injection,m) { class_, Process>(m, "PhysicalProcess") .def(init<>()) + .def_property("primary_type", &Process::GetPrimaryType, &Process::SetPrimaryType) .def("AddPhysicalDistribution",&PhysicalProcess::AddPhysicalDistribution) .def("GetPhysicalDistributions",&PhysicalProcess::GetPhysicalDistributions); class_, Process>(m, "PrimaryInjectionProcess") .def(init<>()) + .def_property("primary_type", &Process::GetPrimaryType, &Process::SetPrimaryType) .def("AddPrimaryInjectionDistribution",&PrimaryInjectionProcess::AddPrimaryInjectionDistribution) .def("GetPrimaryInjectionDistributions",&PrimaryInjectionProcess::GetPrimaryInjectionDistributions); class_, Process>(m, "SecondaryInjectionProcess") .def(init<>()) + .def_property("secondary_type", &Process::GetSecondaryType, &Process::SetSecondaryType) .def("AddSecondaryInjectionDistribution",&SecondaryInjectionProcess::AddSecondaryInjectionDistribution) .def("GetSecondaryInjectionDistributions",&SecondaryInjectionProcess::GetSecondaryInjectionDistributions); From 6e8a5b1c5206953f9e7c6a02ee892dcba770cefd Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 11 Sep 2024 20:10:56 -0600 Subject: [PATCH 37/85] New access methods for process subclasses --- projects/injection/private/Process.cxx | 18 ++++++++++++++++++ .../injection/public/SIREN/injection/Process.h | 4 ++++ 2 files changed, 22 insertions(+) diff --git a/projects/injection/private/Process.cxx b/projects/injection/private/Process.cxx index 524161c47..93c9228a7 100644 --- a/projects/injection/private/Process.cxx +++ b/projects/injection/private/Process.cxx @@ -90,6 +90,16 @@ std::vector> const & Phys return physical_distributions; } +void SetPhysicalDistributions(std::vector> const & distributions) { + for(std::vector>::const_iterator it_1 = distributions.begin(); it_1 != distributions.end(); ++it_1) { + for(std::vector>::const_iterator it_2 = it_1 + 1; it_2 != distributions.end(); ++it_2) { + if((*it_1) == (*it_2)) + throw std::runtime_error("Cannot add duplicate WeightableDistributions"); + } + } + physical_distributions = distributions; +} + PrimaryInjectionProcess::PrimaryInjectionProcess(siren::dataclasses::ParticleType _primary_type, std::shared_ptr _interactions) : PhysicalProcess(_primary_type, _interactions) {}; PrimaryInjectionProcess::PrimaryInjectionProcess(PrimaryInjectionProcess const & other) : PhysicalProcess(other), primary_injection_distributions(other.primary_injection_distributions) {}; @@ -144,6 +154,14 @@ SecondaryInjectionProcess & SecondaryInjectionProcess::operator=(SecondaryInject return *this; }; +void SecondaryInjectionProcess::SetSecondaryType(siren::dataclasses::ParticleType _primary_type) { + primary_type = _primary_type; +} + +siren::dataclasses::ParticleType SecondaryInjectionProcess::GetSecondaryType() const { + return primary_type; +} + void SecondaryInjectionProcess::AddPhysicalDistribution(std::shared_ptr dist) { throw std::runtime_error("Cannot add a physical distribution to an SecondaryInjectionProcess"); } diff --git a/projects/injection/public/SIREN/injection/Process.h b/projects/injection/public/SIREN/injection/Process.h index f6c429eaf..5510f0d28 100644 --- a/projects/injection/public/SIREN/injection/Process.h +++ b/projects/injection/public/SIREN/injection/Process.h @@ -68,6 +68,7 @@ class PhysicalProcess : public Process { virtual ~PhysicalProcess() = default; virtual void AddPhysicalDistribution(std::shared_ptr dist); std::vector> const & GetPhysicalDistributions() const; + virtual void SetPhysicalDistributions(std::vector> const & distributions); template void serialize(Archive & archive, std::uint32_t const version) { if(version == 0) { @@ -116,6 +117,9 @@ class SecondaryInjectionProcess : public PhysicalProcess { SecondaryInjectionProcess(SecondaryInjectionProcess && other); SecondaryInjectionProcess & operator=(SecondaryInjectionProcess const & other); SecondaryInjectionProcess & operator=(SecondaryInjectionProcess && other); + void SetSecondaryType(siren::dataclasses::ParticleType _primary_type); + siren::dataclasses::ParticleType GetSecondaryType() const; + virtual ~SecondaryInjectionProcess() = default; virtual void AddPhysicalDistribution(std::shared_ptr dist) override; virtual void AddSecondaryInjectionDistribution(std::shared_ptr dist); From 23c169a6194420ae5a2d87c0a1904a8711cad902 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 11 Sep 2024 20:47:41 -0600 Subject: [PATCH 38/85] Add more accessor method for processes --- projects/injection/private/Injector.cxx | 4 +++ projects/injection/private/Process.cxx | 34 +++++++++++++++++-- .../private/pybindings/injection.cxx | 17 ++++++---- .../public/SIREN/injection/Injector.h | 1 + .../public/SIREN/injection/Process.h | 2 ++ 5 files changed, 48 insertions(+), 10 deletions(-) diff --git a/projects/injection/private/Injector.cxx b/projects/injection/private/Injector.cxx index 877a729e6..e7f15b656 100644 --- a/projects/injection/private/Injector.cxx +++ b/projects/injection/private/Injector.cxx @@ -461,6 +461,10 @@ std::shared_ptr Injector::GetDetectorModel() con return detector_model; } +void Injector::SetDetectorModel(std::shared_ptr detector_model) { + this->detector_model = detector_model; +} + std::shared_ptr Injector::GetInteractions() const { return primary_process->GetInteractions(); } diff --git a/projects/injection/private/Process.cxx b/projects/injection/private/Process.cxx index 93c9228a7..0c467df92 100644 --- a/projects/injection/private/Process.cxx +++ b/projects/injection/private/Process.cxx @@ -90,7 +90,7 @@ std::vector> const & Phys return physical_distributions; } -void SetPhysicalDistributions(std::vector> const & distributions) { +void PhysicalProcess::SetPhysicalDistributions(std::vector> const & distributions) { for(std::vector>::const_iterator it_1 = distributions.begin(); it_1 != distributions.end(); ++it_1) { for(std::vector>::const_iterator it_2 = it_1 + 1; it_2 != distributions.end(); ++it_2) { if((*it_1) == (*it_2)) @@ -130,6 +130,20 @@ void PrimaryInjectionProcess::AddPrimaryInjectionDistribution(std::shared_ptr(dist)); } +void PrimaryInjectionProcess::SetPrimaryInjectionDistributions(std::vector> const & distributions) { + for(std::vector>::const_iterator it_1 = distributions.begin(); it_1 != distributions.end(); ++it_1) { + for(std::vector>::const_iterator it_2 = it_1 + 1; it_2 != distributions.end(); ++it_2) { + if((*it_1) == (*it_2)) + throw std::runtime_error("Cannot add duplicate PrimaryInjectionDistributions"); + } + } + primary_injection_distributions = distributions; + physical_distributions.clear(); + for(auto dist: primary_injection_distributions) { + physical_distributions.push_back(std::static_pointer_cast(dist)); + } +} + std::vector> const & PrimaryInjectionProcess::GetPrimaryInjectionDistributions() const { return primary_injection_distributions; } @@ -155,11 +169,11 @@ SecondaryInjectionProcess & SecondaryInjectionProcess::operator=(SecondaryInject }; void SecondaryInjectionProcess::SetSecondaryType(siren::dataclasses::ParticleType _primary_type) { - primary_type = _primary_type; + SetPrimaryType(_primary_type); } siren::dataclasses::ParticleType SecondaryInjectionProcess::GetSecondaryType() const { - return primary_type; + return GetPrimaryType(); } void SecondaryInjectionProcess::AddPhysicalDistribution(std::shared_ptr dist) { @@ -175,6 +189,20 @@ void SecondaryInjectionProcess::AddSecondaryInjectionDistribution(std::shared_pt secondary_injection_distributions.push_back(dist); } +void SecondaryInjectionProcess::SetSecondaryInjectionDistributions(std::vector> const & distributions) { + for(std::vector>::const_iterator it_1 = distributions.begin(); it_1 != distributions.end(); ++it_1) { + for(std::vector>::const_iterator it_2 = it_1 + 1; it_2 != distributions.end(); ++it_2) { + if((*it_1) == (*it_2)) + throw std::runtime_error("Cannot add duplicate SecondaryInjectionDistributions"); + } + } + secondary_injection_distributions = distributions; + physical_distributions.clear(); + for(auto dist: secondary_injection_distributions) { + physical_distributions.push_back(std::static_pointer_cast(dist)); + } +} + std::vector> const & SecondaryInjectionProcess::GetSecondaryInjectionDistributions() const { return secondary_injection_distributions; } diff --git a/projects/injection/private/pybindings/injection.cxx b/projects/injection/private/pybindings/injection.cxx index 2dfc7bdf4..494f9f884 100644 --- a/projects/injection/private/pybindings/injection.cxx +++ b/projects/injection/private/pybindings/injection.cxx @@ -44,20 +44,23 @@ PYBIND11_MODULE(injection,m) { class_, Process>(m, "PhysicalProcess") .def(init<>()) .def_property("primary_type", &Process::GetPrimaryType, &Process::SetPrimaryType) - .def("AddPhysicalDistribution",&PhysicalProcess::AddPhysicalDistribution) - .def("GetPhysicalDistributions",&PhysicalProcess::GetPhysicalDistributions); + .def_property("interactions", &Process::GetInteractions, &Process::SetInteractions) + .def_property("distributions", &PhysicalProcess::GetPhysicalDistributions, &PhysicalProcess::SetPhysicalDistributions) + .def("AddPhysicalDistribution",&PhysicalProcess::AddPhysicalDistribution); class_, Process>(m, "PrimaryInjectionProcess") .def(init<>()) .def_property("primary_type", &Process::GetPrimaryType, &Process::SetPrimaryType) - .def("AddPrimaryInjectionDistribution",&PrimaryInjectionProcess::AddPrimaryInjectionDistribution) - .def("GetPrimaryInjectionDistributions",&PrimaryInjectionProcess::GetPrimaryInjectionDistributions); + .def_property("interactions", &Process::GetInteractions, &Process::SetInteractions) + .def_property("distributions", &PrimaryInjectionProcess::GetPrimaryInjectionDistributions, &PrimaryInjectionProcess::SetPrimaryInjectionDistributions) + .def("AddPrimaryInjectionDistribution",&PrimaryInjectionProcess::AddPrimaryInjectionDistribution); class_, Process>(m, "SecondaryInjectionProcess") .def(init<>()) - .def_property("secondary_type", &Process::GetSecondaryType, &Process::SetSecondaryType) - .def("AddSecondaryInjectionDistribution",&SecondaryInjectionProcess::AddSecondaryInjectionDistribution) - .def("GetSecondaryInjectionDistributions",&SecondaryInjectionProcess::GetSecondaryInjectionDistributions); + .def_property("secondary_type", &SecondaryInjectionProcess::GetSecondaryType, &SecondaryInjectionProcess::SetSecondaryType) + .def_property("interactions", &Process::GetInteractions, &Process::SetInteractions) + .def_property("distributions", &SecondaryInjectionProcess::GetSecondaryInjectionDistributions, &SecondaryInjectionProcess::SetSecondaryInjectionDistributions) + .def("AddSecondaryInjectionDistribution",&SecondaryInjectionProcess::AddSecondaryInjectionDistribution); // Injection diff --git a/projects/injection/public/SIREN/injection/Injector.h b/projects/injection/public/SIREN/injection/Injector.h index c7249eb9d..c776c9971 100644 --- a/projects/injection/public/SIREN/injection/Injector.h +++ b/projects/injection/public/SIREN/injection/Injector.h @@ -103,6 +103,7 @@ friend cereal::access; virtual std::tuple SecondaryInjectionBounds(siren::dataclasses::InteractionRecord const & interaction) const; virtual std::vector> GetPrimaryInjectionDistributions() const; virtual std::shared_ptr GetDetectorModel() const; + virtual void SetDetectorModel(std::shared_ptr detector_model); virtual std::shared_ptr GetInteractions() const; unsigned int InjectedEvents() const; unsigned int EventsToInject() const; diff --git a/projects/injection/public/SIREN/injection/Process.h b/projects/injection/public/SIREN/injection/Process.h index 5510f0d28..c3d926400 100644 --- a/projects/injection/public/SIREN/injection/Process.h +++ b/projects/injection/public/SIREN/injection/Process.h @@ -94,6 +94,7 @@ class PrimaryInjectionProcess : public PhysicalProcess { virtual ~PrimaryInjectionProcess() = default; virtual void AddPhysicalDistribution(std::shared_ptr dist) override; virtual void AddPrimaryInjectionDistribution(std::shared_ptr dist); + void SetPrimaryInjectionDistributions(std::vector> const & distributions); std::vector> const & GetPrimaryInjectionDistributions() const; template void serialize(Archive & archive, std::uint32_t const version) { @@ -123,6 +124,7 @@ class SecondaryInjectionProcess : public PhysicalProcess { virtual ~SecondaryInjectionProcess() = default; virtual void AddPhysicalDistribution(std::shared_ptr dist) override; virtual void AddSecondaryInjectionDistribution(std::shared_ptr dist); + void SetSecondaryInjectionDistributions(std::vector> const & distributions); std::vector> const & GetSecondaryInjectionDistributions() const; template void serialize(Archive & archive, std::uint32_t const version) { From c97cd103c4ac97ef344b1ff98cf474e831cf7fdb Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 11 Sep 2024 22:21:46 -0600 Subject: [PATCH 39/85] Additional setters and getters --- projects/injection/private/Injector.cxx | 10 ++++++++++ projects/injection/private/pybindings/injection.cxx | 2 ++ projects/injection/public/SIREN/injection/Injector.h | 2 ++ 3 files changed, 14 insertions(+) diff --git a/projects/injection/private/Injector.cxx b/projects/injection/private/Injector.cxx index e7f15b656..d6e8ff059 100644 --- a/projects/injection/private/Injector.cxx +++ b/projects/injection/private/Injector.cxx @@ -135,6 +135,16 @@ void Injector::AddSecondaryProcess(std::shared_ptrGetPrimaryType(), vtx_dist}); } +void Injector::SetSecondaryProcesses(std::vector> secondaries) { + secondary_processes.clear(); + secondary_position_distributions.clear(); + secondary_process_map.clear(); + secondary_position_distribution_map.clear(); + for(auto secondary : secondaries) { + AddSecondaryProcess(secondary); + } +} + siren::dataclasses::InteractionRecord Injector::NewRecord() const { siren::dataclasses::InteractionRecord record; record.signature.primary_type = primary_process->GetPrimaryType(); diff --git a/projects/injection/private/pybindings/injection.cxx b/projects/injection/private/pybindings/injection.cxx index 494f9f884..97c3bc1c8 100644 --- a/projects/injection/private/pybindings/injection.cxx +++ b/projects/injection/private/pybindings/injection.cxx @@ -71,6 +71,7 @@ PYBIND11_MODULE(injection,m) { .def(init, std::shared_ptr, std::shared_ptr>()) .def(init, std::shared_ptr, std::vector>, std::shared_ptr>()) .def("SetStoppingCondition",&Injector::SetStoppingCondition) + .def("GetStoppingCondition",&Injector::GetStoppingCondition) .def("SetPrimaryProcess",&Injector::SetPrimaryProcess) .def("AddSecondaryProcess",&Injector::AddSecondaryProcess) .def("GetPrimaryProcess",&Injector::GetPrimaryProcess) @@ -83,6 +84,7 @@ PYBIND11_MODULE(injection,m) { .def("Name",&Injector::Name) .def("GetPrimaryInjectionDistributions",&Injector::GetPrimaryInjectionDistributions) .def("GetDetectorModel",&Injector::GetDetectorModel) + .def("SetDetectorModel",&Injector::SetDetectorModel) .def("GetInteractions",&Injector::GetInteractions) .def("InjectedEvents",&Injector::InjectedEvents) .def("EventsToInject",&Injector::EventsToInject) diff --git a/projects/injection/public/SIREN/injection/Injector.h b/projects/injection/public/SIREN/injection/Injector.h index c776c9971..bd82e28bf 100644 --- a/projects/injection/public/SIREN/injection/Injector.h +++ b/projects/injection/public/SIREN/injection/Injector.h @@ -78,6 +78,7 @@ friend cereal::access; Injector(unsigned int events_to_inject, std::shared_ptr detector_model, std::shared_ptr primary_process, std::vector> secondary_processes, std::shared_ptr random); void SetStoppingCondition(std::function, size_t)> f_in) {stopping_condition = f_in;} + std::function, size_t)> GetStoppingCondition() {return stopping_condition;} std::shared_ptr FindPrimaryVertexDistribution(std::shared_ptr process); std::shared_ptr FindSecondaryVertexDistribution(std::shared_ptr process); void SetPrimaryProcess(std::shared_ptr primary); @@ -85,6 +86,7 @@ friend cereal::access; std::vector> GetSecondaryProcesses() {return secondary_processes;} std::map> GetSecondaryProcessMap() {return secondary_process_map;} void AddSecondaryProcess(std::shared_ptr secondary); + void SetSecondaryProcesses(std::vector> secondary_processes); virtual siren::dataclasses::InteractionRecord NewRecord() const; // set primary type from primary process; void SetRandom(std::shared_ptr random); virtual void SampleCrossSection(siren::dataclasses::InteractionRecord & record) const; From b4e7f6f5b4b413db5d1eeff9b36da8a2fbd97d17 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 11 Sep 2024 22:22:30 -0600 Subject: [PATCH 40/85] Set primary type when setting interactions --- projects/injection/private/Process.cxx | 3 +++ 1 file changed, 3 insertions(+) diff --git a/projects/injection/private/Process.cxx b/projects/injection/private/Process.cxx index 0c467df92..0543f7aa0 100644 --- a/projects/injection/private/Process.cxx +++ b/projects/injection/private/Process.cxx @@ -25,6 +25,7 @@ Process & Process::operator=(Process && other) { void Process::SetInteractions(std::shared_ptr _interactions) { interactions = _interactions; + primary_type = interactions->GetPrimaryType(); } std::shared_ptr Process::GetInteractions() const { @@ -33,6 +34,8 @@ std::shared_ptr Process::GetInteractions() void Process::SetPrimaryType(siren::dataclasses::ParticleType _primary_type) { primary_type = _primary_type; + if(interactions) + interactions->SetPrimaryType(_primary_type); } siren::dataclasses::ParticleType Process::GetPrimaryType() const { From 488548607e1db100b1f34ae384ac821c892723b9 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 11 Sep 2024 22:22:56 -0600 Subject: [PATCH 41/85] Additional setters and getters --- projects/interactions/private/InteractionCollection.cxx | 8 ++++++++ .../private/pybindings/InteractionCollection.h | 2 ++ .../public/SIREN/interactions/InteractionCollection.h | 2 ++ 3 files changed, 12 insertions(+) diff --git a/projects/interactions/private/InteractionCollection.cxx b/projects/interactions/private/InteractionCollection.cxx index 9a3464a73..af6f0e411 100644 --- a/projects/interactions/private/InteractionCollection.cxx +++ b/projects/interactions/private/InteractionCollection.cxx @@ -116,6 +116,14 @@ bool InteractionCollection::MatchesPrimary(dataclasses::InteractionRecord const return primary_type == record.signature.primary_type; } +siren::dataclasses::ParticleType InteractionCollection::GetPrimaryType() const { + return primary_type; +} + +void InteractionCollection::SetPrimaryType(siren::dataclasses::ParticleType primary_type) { + this->primary_type = primary_type; +} + std::map InteractionCollection::TotalCrossSectionByTarget(siren::dataclasses::InteractionRecord const & record) const { std::map result; for(siren::dataclasses::ParticleType target : target_types) { diff --git a/projects/interactions/private/pybindings/InteractionCollection.h b/projects/interactions/private/pybindings/InteractionCollection.h index 5d8314acc..1df424399 100644 --- a/projects/interactions/private/pybindings/InteractionCollection.h +++ b/projects/interactions/private/pybindings/InteractionCollection.h @@ -35,5 +35,7 @@ void register_InteractionCollection(pybind11::module_ & m) { .def("TotalDecayWidth",&InteractionCollection::TotalDecayWidth) .def("TotalDecayLength",&InteractionCollection::TotalDecayLength) .def("MatchesPrimary",&InteractionCollection::MatchesPrimary) + .def("GetPrimaryType",&InteractionCollection::GetPrimaryType) + .def("SetPrimaryType",&InteractionCollection::SetPrimaryType) ; } diff --git a/projects/interactions/public/SIREN/interactions/InteractionCollection.h b/projects/interactions/public/SIREN/interactions/InteractionCollection.h index 243b41142..ecab54659 100644 --- a/projects/interactions/public/SIREN/interactions/InteractionCollection.h +++ b/projects/interactions/public/SIREN/interactions/InteractionCollection.h @@ -63,6 +63,8 @@ class InteractionCollection { }; double TotalDecayWidth(siren::dataclasses::InteractionRecord const & record) const; double TotalDecayLength(siren::dataclasses::InteractionRecord const & record) const; + siren::dataclasses::ParticleType GetPrimaryType() const; + void SetPrimaryType(siren::dataclasses::ParticleType primary_type); virtual bool MatchesPrimary(dataclasses::InteractionRecord const & record) const; std::map TotalCrossSectionByTarget(siren::dataclasses::InteractionRecord const & record) const; std::map TotalCrossSectionByTargetAllFinalStates(siren::dataclasses::InteractionRecord const & record) const; From 3b62df20b247f6261f91118345052c9619da263d Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 11 Sep 2024 22:24:13 -0600 Subject: [PATCH 42/85] Produce a random seed by default. Getter for seed --- projects/utilities/private/Random.cxx | 31 +++++++-- .../private/pybindings/utilities.cxx | 4 +- .../utilities/public/SIREN/utilities/Random.h | 64 ++++++++++--------- 3 files changed, 64 insertions(+), 35 deletions(-) diff --git a/projects/utilities/private/Random.cxx b/projects/utilities/private/Random.cxx index 78a2e0f2f..214d2d12a 100644 --- a/projects/utilities/private/Random.cxx +++ b/projects/utilities/private/Random.cxx @@ -1,19 +1,27 @@ #include "SIREN/utilities/Random.h" +#include +#include #include +#include // for uint32_t #include +#include + +namespace { + std::mutex global_seed_lock; +} namespace siren { namespace utilities { - SIREN_random::SIREN_random(void){ + SIREN_random::SIREN_random() { // default to boring seed - seed = 1; + seed = generate_seed(); configuration = std::default_random_engine(seed); generator = std::uniform_real_distribution( 0.0, 1.0); } - SIREN_random::SIREN_random( unsigned int _seed ){ + SIREN_random::SIREN_random(uint64_t _seed) { seed = _seed; configuration = std::default_random_engine(seed); generator = std::uniform_real_distribution( 0.0, 1.0); @@ -40,10 +48,25 @@ namespace utilities { } // reconfigures the generator with a new seed - void SIREN_random::set_seed( unsigned int new_seed) { + void SIREN_random::set_seed(uint64_t new_seed) { seed = new_seed; this->configuration = std::default_random_engine(seed); } + uint64_t SIREN_random::get_seed() const { + return seed; + } + + uint64_t SIREN_random::generate_seed() { + std::atomic_thread_fence(std::memory_order_acquire); + std::lock_guard lg(global_seed_lock); + std::hash string_hash; + std::stringstream s; + s << time(0) << getpid() << gethostid(); + std::atomic_thread_fence(std::memory_order_release); + uint64_t seed = string_hash(s.str()); + return seed; + } + } // namespace utilities } // namespace siren diff --git a/projects/utilities/private/pybindings/utilities.cxx b/projects/utilities/private/pybindings/utilities.cxx index 0feafd28f..c2581980c 100644 --- a/projects/utilities/private/pybindings/utilities.cxx +++ b/projects/utilities/private/pybindings/utilities.cxx @@ -16,5 +16,7 @@ PYBIND11_MODULE(utilities,m) { .def(init<>()) .def(init()) .def("Uniform",&SIREN_random::Uniform) - .def("set_seed",&SIREN_random::set_seed); + .def("set_seed",&SIREN_random::set_seed) + .def("get_seed",&SIREN_random::get_seed) + .def_static("generate_seed",&SIREN_random::generate_seed); } diff --git a/projects/utilities/public/SIREN/utilities/Random.h b/projects/utilities/public/SIREN/utilities/Random.h index f6ae1e9db..599e23146 100644 --- a/projects/utilities/public/SIREN/utilities/Random.h +++ b/projects/utilities/public/SIREN/utilities/Random.h @@ -8,6 +8,7 @@ // this implements a class to sample numbers just like in an i3 service #include // default_random_engine, uniform_real_distribution +#include // for uint32_t #include #include @@ -22,43 +23,46 @@ namespace siren { namespace utilities { - class SIREN_random{ - public: - SIREN_random(); - SIREN_random( unsigned int _seed ); +class SIREN_random{ +public: + SIREN_random(); + SIREN_random(uint64_t _seed); - // this naming convention is used to - double Uniform( double from=0.0, double to=1.0); - double PowerLaw(double min, double max, double n); + // this naming convention is used to + double Uniform( double from=0.0, double to=1.0); + double PowerLaw(double min, double max, double n); - // in case this is set up without a seed! - void set_seed(unsigned int new_seed); + // in case this is set up without a seed! + void set_seed(uint64_t new_seed); + uint64_t get_seed() const; - template - void save(Archive & archive, std::uint32_t const version) const { - if(version == 0) { - archive(::cereal::make_nvp("Seed", seed)); - } else { - throw std::runtime_error("SIREN_random only supports version <= 0!"); - } - }; + static uint64_t generate_seed(); - template - void load(Archive & archive, std::uint32_t const version) { - if(version == 0) { - archive(::cereal::make_nvp("Seed", seed)); - set_seed(seed); - } else { - throw std::runtime_error("SIREN_random only supports version <= 0!"); - } - }; + template + void save(Archive & archive, std::uint32_t const version) const { + if(version == 0) { + archive(::cereal::make_nvp("Seed", seed)); + } else { + throw std::runtime_error("SIREN_random only supports version <= 0!"); + } + }; - private: - unsigned int seed; - std::default_random_engine configuration; - std::uniform_real_distribution generator; + template + void load(Archive & archive, std::uint32_t const version) { + if(version == 0) { + archive(::cereal::make_nvp("Seed", seed)); + set_seed(seed); + } else { + throw std::runtime_error("SIREN_random only supports version <= 0!"); + } }; +private: + uint64_t seed; + std::default_random_engine configuration; + std::uniform_real_distribution generator; +}; + } // namespace utilities } // namespace siren From 61dabf328b60532c13efc57816b6893d492cb4f5 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 11 Sep 2024 22:26:14 -0600 Subject: [PATCH 43/85] Rework Injector wrapper with @property --- python/Injector.py | 361 +++++++++++++++++++++++++++++---------------- 1 file changed, 232 insertions(+), 129 deletions(-) diff --git a/python/Injector.py b/python/Injector.py index 6b0bc233d..2b69ccb1a 100644 --- a/python/Injector.py +++ b/python/Injector.py @@ -31,56 +31,40 @@ class Injector: def __init__( self, - number_of_events: int, - detector_model: "DetectorModel", - random: "SIREN_random", + number_of_events: Optional[int] = None, + detector_model: Optional["DetectorModel"] = None, + seed: Optional[int] = None, primary_interactions: Dict["ParticleType", List[Union["CrossSection", "Decay", "Interaction"]]], primary_injection_distributions: List["PrimaryInjectionDistribution"], secondary_interactions: Optional[Dict["ParticleType", List[Union["CrossSection", "Decay", "Interaction"]]]] = None, secondary_injection_distributions: Optional[Dict["ParticleType", List["SecondaryInjectionDistribution"]]] = None, ): - self.number_of_events = number_of_events + self.__seed = None + self.__number_of_events = 0 + self.__detector_model = None - self.detector_model = detector_model + self.__primary_type = None + self.__primary_interactions = [] + self.__primary_injection_distributions = [] + + self.__secondary_interactions = [] + self.__secondary_injection_distributions = [] + self.__stopping_condition = None + + self.__injector = None if len(primary_interactions) != 1: raise ValueError(f"len(primary_interactions) != 1") if (secondary_interactions is None) != (secondary_injection_distributions is None): - raise ValueError("Neither or both of secondary_interactions and secondary_injection_distributions must be provided") + raise ValueError("Both or neither secondary_interactions and secondary_injection_distributions must be provided") if secondary_interactions is None: secondary_interactions = dict() secondary_injection_distributions = dict() - self.primary_interactions = primary_interactions - self.primary_injection_distributions = primary_injection_distributions - - primary_type, primary_interactions = list(primary_interactions.items())[0] - - self.primary_interaction_collection = _interactions.InteractionCollection( - primary_type, primary_interactions - ) - self.primary_process = _injection.PrimaryInjectionProcess( - primary_type, self.primary_interaction_collection - ) - for dist in primary_injection_distributions: - self.primary_process.AddPrimaryInjectionDistribution(dist) - - self.secondary_interactions = secondary_interactions - self.secondary_injection_distributions = secondary_injection_distributions - - self.secondary_interaction_collections = [] - self.secondary_processes = [] - for secondary_type, secondary_interactions in secondary_interactions.items(): - secondary_distributions = self.secondary_injection_distributions[secondary_type] - secondary_process = SecondaryInjectionProcess(secondary_type, secondary_interactions) - for dist in secondary_distributions: - secondary_process.AddSecondaryInjectionDistribution(dist) - self.secondary_processes.append(secondary_process) - - self.injector = _injection.Injector( + self.__injector = _injection.Injector( self.number_of_events, self.detector_model, self.primary_process, @@ -88,116 +72,235 @@ def __init__( self.random, ) - # TODO define wrapper functions that modify the internal state of the python object - @wraps(_Injector.SetPrimaryProcess) - def SetPrimaryProcess(self, primary_process): - # Get the internals first - primary_injection_distributions = primary_process.GetPrimaryInjectionDistributions() - primary_interaction_collection = primary_process.GetInteractionCollection() - primary_interactions = list(primary_interaction_collection.GetCrossSections()) + list(primary_interaction_collection.GetDecays()) - - # Now we can overwite things - self.injector.SetPrimaryProcess(primary_process) - self.primary_process = primary_process - self.primary_injection_distributions = primary_injection_distributions - self.primary_interaction_collection = primary_interaction_collection - self.primary_interactions = {primary_process.primary_type: primary_interactions} - - @wraps(_Injector.SetStoppingCondition) - def SetStoppingCondition(self, stopping_condition): - self.stopping_condition = stopping_condition - self.injector.SetStoppingCondition(stopping_condition) - - @wraps(_Injector.AddSecondaryProcess) - def AddSecondaryProcess(self, secondary_process): - # Update internal state - secondary_type = secondary_process.secondary_type - secondary_distributions = secondary_process.GetSecondaryInjectionDistributions() - secondary_interaction_collection = secondary_process.GetInteractionCollection() - secondary_interactions = list(secondary_interaction_collection.GetCrossSections()) + list(secondary_interaction_collection.GetDecays()) - - # Update class attributes - self.secondary_processes.append(secondary_process) - if secondary_type not in self.secondary_interactions: - self.secondary_interactions[secondary_type] = [] - self.secondary_interactions[secondary_type].extend(secondary_interactions) - if secondary_type not in self.secondary_injection_distributions: - self.secondary_injection_distributions[secondary_type] = [] - self.secondary_injection_distributions[secondary_type].extend(secondary_distributions) - - # Update the underlying C++ object - self.injector.AddSecondaryProcess(secondary_process) - - @wraps(_Injector.GetPrimaryProcess) - def GetPrimaryProcess(self): - return self.primary_process - - @wraps(_Injector.GetSecondaryProcessMap) - def GetSecondaryProcesses(self): - return self.secondary_processes + def __initialize_injector(self): + if self.__seed is None: + random = _utilities.SIREN_random() + self.__seed = random.get_seed() + else: + random = _utilities.SIREN_random(self.__seed) - @wraps(_Injector.NewRecord) - def NewRecord(self): - return self.injector.NewRecord() + if self.__number_of_events is None: + raise ValueError("number_of_events must be provided") + elif self.__number_of_events <= 0: + raise ValueError("number_of_events must be positive") - @wraps(_Injector.SetRandom) - def SetRandom(self, random): - self.injector.SetRandom(random) + if self.__detector_model is None: + raise ValueError("detector_model must be provided") - @wraps(_Injector.GenerateEvent) - def GenerateEvent(self): - return self.injector.GenerateEvent() + if self.__primary_type is None: + raise ValueError("primary_type must be provided") - @wraps(_Injector.DensityVariables) - def DensityVariables(self): - return self.injector.DensityVariables() + if len(self.__primary_interactions) == 0: + raise ValueError("primary_interactions must be provided") - @wraps(_Injector.Name) - def Name(self): - return self.injector.Name() - - @wraps(_Injector.GetPrimaryInjectionDistributions) - def GetPrimaryInjectionDistributions(self): - return self.primary_injection_distributions + if len(self.__primary_injection_distributions) == 0: + raise ValueError("primary_injection_distributions must be provided") - @wraps(_Injector.GetDetectorModel) - def GetDetectorModel(self): - return self.detector_model + if len(self.__secondary_interactions) == 0: + raise ValueError("secondary_interactions must be provided") - @wraps(_Injector.GetInteractions) - def GetInteractions(self): - return self.injector.GetInteractions() + if len(self.__secondary_injection_distributions) == 0: + raise ValueError("secondary_injection_distributions must be provided") - @wraps(_Injector.InjectedEvents) - def InjectedEvents(self): - return self.injector.InjectedEvents() + if list(sorted(self.__secondary_interactions.keys())) != list(sorted(self.__secondary_injection_distributions.keys())): + raise ValueError("secondary_interactions and secondary_injection_distributions must have the same keys") - @wraps(_Injector.EventsToInject) - def EventsToInject(self): - return self.injector.EventsToInject() + primary_type = self.primary_type + primary_interaction_collection = _interactions.InteractionCollection( + primary_type, self.primary_interactions + ) + primary_process = _injection.PrimaryInjectionProcess( + primary_type, primary_interaction_collection + ) + primary_process.distributions = self.primary_injection_distributions + + secondary_interactions = self.secondary_interactions + secondary_injection_distributions = self.secondary_injection_distributions + + secondary_interaction_collections = [] + secondary_processes = [] + for secondary_type, secondary_interactions in secondary_interactions.items(): + secondary_interaction_collection = _interactions.InteractionCollection( + secondary_type, secondary_interactions + ) + secondary_process = SecondaryInjectionProcess( + secondary_type, secondary_interaction_collection + ) + secondary_process.distributions = secondary_injection_distributions[secondary_type] + secondary_processes.append(secondary_process) + + self.__injector = _Injector( + self.number_of_events, + self.detector_model, + primary_process, + secondary_processes, + random, + ) + + if self.__stopping_condition is not None: + self.__injector.SetStoppingCondition(self.__stopping_condition) + + @property + def seed(self): + return self.__seed + + @property.setter + def seed(self, seed): + self.__seed = seed + if self.__injector is not None: + self.__injector.GetRandom().set_seed(seed) + + @property + def number_of_events(self): + if self.__injector is not None: + return self.__injector.EventsToInject() + return self.__number_of_events + + @property + def detector_model(self): + if self.__injector is not None: + return self.__injector.GetDetectorModel() + return self.__detector_model + + @property.setter + def detector_model(self, detector_model): + if self.__injector is not None: + self.__injector.SetDetectorModel(detector_model) + self.__detector_model = detector_model + + @property + def primary_type(self): + return self.__primary_type + + @property.setter + def primary_type(self, primary_type): + if self.__injector is not None: + primary_process = self.__injector.GetPrimaryProcess() + primary_process.primary_type = primary_type + self.__primary_type = primary_type + + @property + def primary_interactions(self): + return self.__primary_interactions + + @property.setter + def primary_interactions(self, primary_interactions): + if self.__injector is not None: + primary_process = self.__injector.GetPrimaryProcess() + primary_interaction_collection = _interactions.InteractionCollection( + self.primary_type, primary_interactions + ) + primary_process.interactions = primary_interaction_collection + self.__primary_interactions = primary_interactions + + @property + def primary_injection_distributions(self): + return self.__primary_injection_distributions + + @property.setter + def primary_injection_distributions(self, primary_injection_distributions): + if self.__injector is not None: + primary_process = self.__injector.GetPrimaryProcess() + primary_process.distributions = primary_injection_distributions + self.__primary_injection_distributions = primary_injection_distributions + + @property + def secondary_interactions(self): + return self.__secondary_interactions + + @property.setter + def secondary_interactions(self, secondary_interactions): + if self.__injector is not None: + secondary_processes = self.__injector.GetSecondaryProcessMap() + current_secondary_types = sorted(list(secondary_processes.keys())) + new_secondary_types = sorted(list(secondary_interactions.keys())) + if current_secondary_types != new_secondary_types: + raise ValueError("Cannot change the secondary types after initialization") + for secondary_type, secondary_process in secondary_processes.items(): + secondary_process.interactions = secondary_interactions[secondary_type] + self.__secondary_interactions = secondary_interactions + + @property + def secondary_injection_distributions(self): + return self.__secondary_injection_distributions + + @property.setter + def secondary_injection_distributions(self, secondary_injection_distributions): + if self.__injector is not None: + secondary_processes = self.__injector.GetSecondaryProcesses() + current_secondary_types = sorted(list(secondary_processes.keys())) + new_secondary_types = sorted(list(secondary_injection_distributions.keys())) + if current_secondary_types != new_secondary_types: + raise ValueError("Cannot change the secondary types after initialization") + for secondary_type, secondary_process in secondary_injection_distributions.items(): + secondary_process.distributions = secondary_distributions[secondary_type] + self.__secondary_injection_distributions = secondary_injection_distributions + + @property + def stopping_condition(self): + return self.__stopping_condition + + @property.setter + def stopping_condition(self, stopping_condition): + if self.__injector is not None: + self.__injector.SetStoppingCondition(stopping_condition) + self.__stopping_condition = stopping_condition + + @wraps(_Injector.NewRecord) + def new_record(self): + return self.__injector.NewRecord() + self.new_record.__name__ = "new_record" + self.new_record.__doc__ = _Injector.NewRecord.__doc__.replace("NewRecord", "new_record") + + @wraps(_Injector.GenerateEvent) + def generate_event(self): + if self.__injector is None: + self.__initialize_injector() + return self.__injector.GenerateEvent() + self.generate_event.__name__ = "generate_event" + self.generate_event.__doc__ = _Injector.GenerateEvent.__doc__.replace("GenerateEvent", "generate_event") + + @property + def density_variables(self): + if self.__injector is not None: + return self.__injector.DensityVariables() + return None + + @property + def injected_events(self): + if self.__injector is not None: + return self.__injector.InjectedEvents() + return 0 @wraps(_Injector.ResetInjectedEvents) - def ResetInjectedEvents(self): - self.injector.ResetInjectedEvents() + def reset_injected_events(self): + if self.__injector is not None: + self.__injector.ResetInjectedEvents() + self.reset_injected_events.__name__ = "reset_injected_events" + self.reset_injected_events.__doc__ = _Injector.ResetInjectedEvents.__doc__.replace("ResetInjectedEvents", "reset_injected_events") @wraps(_Injector.SaveInjector) - def SaveInjector(self, filename): - self.injector.SaveInjector(filename) + def save(self, filename): + self.__injector.SaveInjector(filename) + self.save.__name__ = "save" + self.save.__doc__ = _Injector.SaveInjector.__doc__.replace("SaveInjector", "save") @wraps(_Injector.LoadInjector) - def LoadInjector(self, filename): - self.injector.LoadInjector(filename) + def load(self, filename): + self.__injector.LoadInjector(filename) # Update Python object state after loading - self.primary_process = self.injector.GetPrimaryProcess() - self.secondary_processes = self.injector.GetSecondaryProcesses() - self.primary_injection_distributions = self.primary_process.GetPrimaryInjectionDistributions() - self.primary_interaction_collection = self.primary_process.GetInteractionCollection() - self.primary_interactions = {self.primary_process.primary_type: list(self.primary_interaction_collection.GetCrossSections()) + list(self.primary_interaction_collection.GetDecays())} - # Update secondary interactions and distributions - self.secondary_interactions = {} - self.secondary_injection_distributions = {} - for process in self.secondary_processes: - secondary_type = process.secondary_type - self.secondary_interactions[secondary_type] = list(process.GetInteractionCollection().GetCrossSections()) + list(process.GetInteractionCollection().GetDecays()) - self.secondary_injection_distributions[secondary_type] = process.GetSecondaryInjectionDistributions() + self.__number_of_events = self.__injector.EventsToInject() + self.__detector_model = self.__injector.GetDetectorModel() + primary_process = self.__injector.GetPrimaryProcess() + self.__primary_type = primary_process.primary_type + self.__primary_interactions = list(primary_process.interactions.GetCrossSections()) + list(primary_process.interactions.GetDecays()) + self.__primary_injection_distributions = list(primary_process.distributions) + + self.__secondary_interactions = {} + self.__secondary_injection_distributions = {} + for secondary_type, secondary_process in self.__injector.GetSecondaryProcessMap(): + self.__secondary_interactions[secondary_type] = list(secondary_process.interactions.GetCrossSections()) + list(secondary_process.interactions.GetDecays()) + self.__secondary_injection_distributions[secondary_type] = list(secondary_process.distributions) + + self.__stopping_condition = self.__injector.GetStoppingCondition() From 324699c8c58c16dac42b313ab0a743870aafb51d Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 11 Sep 2024 22:43:07 -0600 Subject: [PATCH 44/85] Fix some obvious runtime issues --- python/Injector.py | 88 +++++++++++++++++++++++----------------------- 1 file changed, 44 insertions(+), 44 deletions(-) diff --git a/python/Injector.py b/python/Injector.py index 2b69ccb1a..3a02dcaba 100644 --- a/python/Injector.py +++ b/python/Injector.py @@ -10,7 +10,7 @@ import collections from functools import wraps -from typing import Tuple, List, Dict, Optional, Union +from typing import Tuple, List, Dict, Optional, Union, Callable from typing import TYPE_CHECKING if TYPE_CHECKING: @@ -21,23 +21,24 @@ ParticleType = _dataclasses.Particle.ParticleType CrossSection = _interactions.CrossSection Decay = _interactions.Decay -Interaction = _interactions.Interaction DetectorModel = _detector.DetectorModel SIREN_random = _utilities.SIREN_random PrimaryInjectionDistribution = _distributions.PrimaryInjectionDistribution SecondaryInjectionDistribution = _distributions.SecondaryInjectionDistribution SecondaryInjectionProcess = _injection.SecondaryInjectionProcess +InteractionTreeDatum = _dataclasses.InteractionTreeDatum class Injector: def __init__( self, number_of_events: Optional[int] = None, - detector_model: Optional["DetectorModel"] = None, + detector_model: Optional[_detector.DetectorModel] = None, seed: Optional[int] = None, - primary_interactions: Dict["ParticleType", List[Union["CrossSection", "Decay", "Interaction"]]], - primary_injection_distributions: List["PrimaryInjectionDistribution"], - secondary_interactions: Optional[Dict["ParticleType", List[Union["CrossSection", "Decay", "Interaction"]]]] = None, - secondary_injection_distributions: Optional[Dict["ParticleType", List["SecondaryInjectionDistribution"]]] = None, + primary_interactions: Dict[_dataclasses.Particle.ParticleType, List[Union[_interactions.CrossSection, _interactions.Decay]]] = None, + primary_injection_distributions: List[_distributions.PrimaryInjectionDistribution] = None, + secondary_interactions: Optional[Dict[_dataclasses.Particle.ParticleType, List[Union[_interactions.CrossSection, _interactions.Decay]]]] = None, + secondary_injection_distributions: Optional[Dict[_dataclasses.Particle.ParticleType, List[_distributions.SecondaryInjectionDistribution]]] = None, + stopping_condition: Optional[Callable[[_dataclasses.InteractionTreeDatum, int], bool]] = None, ): self.__seed = None self.__number_of_events = 0 @@ -47,31 +48,30 @@ def __init__( self.__primary_interactions = [] self.__primary_injection_distributions = [] - self.__secondary_interactions = [] - self.__secondary_injection_distributions = [] + self.__secondary_interactions = {} + self.__secondary_injection_distributions = {} self.__stopping_condition = None self.__injector = None - if len(primary_interactions) != 1: - raise ValueError(f"len(primary_interactions) != 1") - - if (secondary_interactions is None) != (secondary_injection_distributions is None): - raise ValueError("Both or neither secondary_interactions and secondary_injection_distributions must be provided") - - if secondary_interactions is None: - secondary_interactions = dict() - secondary_injection_distributions = dict() + if seed is not None: + self.__seed = seed + if number_of_events is not None: + self.__number_of_events = number_of_events + if detector_model is not None: + self.__detector_model = detector_model + if primary_interactions is not None: + self.__primary_interactions = primary_interactions + if primary_injection_distributions is not None: + self.__primary_injection_distributions = primary_injection_distributions + if secondary_interactions is not None: + self.__secondary_interactions = secondary_interactions + if secondary_injection_distributions is not None: + self.__secondary_injection_distributions = secondary_injection_distributions + if stopping_condition is not None: + self.__stopping_condition = stopping_condition - self.__injector = _injection.Injector( - self.number_of_events, - self.detector_model, - self.primary_process, - self.secondary_processes, - self.random, - ) - def __initialize_injector(self): if self.__seed is None: random = _utilities.SIREN_random() @@ -90,8 +90,8 @@ def __initialize_injector(self): if self.__primary_type is None: raise ValueError("primary_type must be provided") - if len(self.__primary_interactions) == 0: - raise ValueError("primary_interactions must be provided") + if len(self.__primary_interactions) != 1: + raise ValueError("primary_interactions must have exactly one key") if len(self.__primary_injection_distributions) == 0: raise ValueError("primary_injection_distributions must be provided") @@ -144,7 +144,7 @@ def __initialize_injector(self): def seed(self): return self.__seed - @property.setter + @seed.setter def seed(self, seed): self.__seed = seed if self.__injector is not None: @@ -162,7 +162,7 @@ def detector_model(self): return self.__injector.GetDetectorModel() return self.__detector_model - @property.setter + @detector_model.setter def detector_model(self, detector_model): if self.__injector is not None: self.__injector.SetDetectorModel(detector_model) @@ -172,7 +172,7 @@ def detector_model(self, detector_model): def primary_type(self): return self.__primary_type - @property.setter + @primary_type.setter def primary_type(self, primary_type): if self.__injector is not None: primary_process = self.__injector.GetPrimaryProcess() @@ -183,7 +183,7 @@ def primary_type(self, primary_type): def primary_interactions(self): return self.__primary_interactions - @property.setter + @primary_interactions.setter def primary_interactions(self, primary_interactions): if self.__injector is not None: primary_process = self.__injector.GetPrimaryProcess() @@ -197,7 +197,7 @@ def primary_interactions(self, primary_interactions): def primary_injection_distributions(self): return self.__primary_injection_distributions - @property.setter + @primary_injection_distributions.setter def primary_injection_distributions(self, primary_injection_distributions): if self.__injector is not None: primary_process = self.__injector.GetPrimaryProcess() @@ -208,7 +208,7 @@ def primary_injection_distributions(self, primary_injection_distributions): def secondary_interactions(self): return self.__secondary_interactions - @property.setter + @secondary_interactions.setter def secondary_interactions(self, secondary_interactions): if self.__injector is not None: secondary_processes = self.__injector.GetSecondaryProcessMap() @@ -224,7 +224,7 @@ def secondary_interactions(self, secondary_interactions): def secondary_injection_distributions(self): return self.__secondary_injection_distributions - @property.setter + @secondary_injection_distributions.setter def secondary_injection_distributions(self, secondary_injection_distributions): if self.__injector is not None: secondary_processes = self.__injector.GetSecondaryProcesses() @@ -240,7 +240,7 @@ def secondary_injection_distributions(self, secondary_injection_distributions): def stopping_condition(self): return self.__stopping_condition - @property.setter + @stopping_condition.setter def stopping_condition(self, stopping_condition): if self.__injector is not None: self.__injector.SetStoppingCondition(stopping_condition) @@ -249,16 +249,16 @@ def stopping_condition(self, stopping_condition): @wraps(_Injector.NewRecord) def new_record(self): return self.__injector.NewRecord() - self.new_record.__name__ = "new_record" - self.new_record.__doc__ = _Injector.NewRecord.__doc__.replace("NewRecord", "new_record") + new_record.__name__ = "new_record" + new_record.__doc__ = _Injector.NewRecord.__doc__.replace("NewRecord", "new_record") @wraps(_Injector.GenerateEvent) def generate_event(self): if self.__injector is None: self.__initialize_injector() return self.__injector.GenerateEvent() - self.generate_event.__name__ = "generate_event" - self.generate_event.__doc__ = _Injector.GenerateEvent.__doc__.replace("GenerateEvent", "generate_event") + generate_event.__name__ = "generate_event" + generate_event.__doc__ = _Injector.GenerateEvent.__doc__.replace("GenerateEvent", "generate_event") @property def density_variables(self): @@ -276,14 +276,14 @@ def injected_events(self): def reset_injected_events(self): if self.__injector is not None: self.__injector.ResetInjectedEvents() - self.reset_injected_events.__name__ = "reset_injected_events" - self.reset_injected_events.__doc__ = _Injector.ResetInjectedEvents.__doc__.replace("ResetInjectedEvents", "reset_injected_events") + reset_injected_events.__name__ = "reset_injected_events" + reset_injected_events.__doc__ = _Injector.ResetInjectedEvents.__doc__.replace("ResetInjectedEvents", "reset_injected_events") @wraps(_Injector.SaveInjector) def save(self, filename): self.__injector.SaveInjector(filename) - self.save.__name__ = "save" - self.save.__doc__ = _Injector.SaveInjector.__doc__.replace("SaveInjector", "save") + save.__name__ = "save" + save.__doc__ = _Injector.SaveInjector.__doc__.replace("SaveInjector", "save") @wraps(_Injector.LoadInjector) def load(self, filename): From 9cecedb4382305f7962908b481fa8c6c20c56bca Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 11 Sep 2024 22:43:25 -0600 Subject: [PATCH 45/85] Replace Injector with python wrapper --- python/__init__.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/python/__init__.py b/python/__init__.py index 411d6aa37..c0a2760b7 100644 --- a/python/__init__.py +++ b/python/__init__.py @@ -8,6 +8,7 @@ from . import injection from . import _util +from . import Injector # Intropspect package version import sys @@ -28,6 +29,11 @@ utilities.load_detector = _util.load_detector utilities.load_processes = _util.load_processes +# Override the Injector with the python wrapper +injection._Injector = injection.Injector +injection.Injector = Injector.Injector +del Injector + def darknews_version(): try: import DarkNews From aae1f782ba0d9b5b05bfc845a41ba5cd76252628 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Thu, 12 Sep 2024 12:37:07 -0600 Subject: [PATCH 46/85] Fiducial volume utilities --- python/__init__.py | 1 + python/_util.py | 22 ++++++++++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/python/__init__.py b/python/__init__.py index c0a2760b7..b6b0fb225 100644 --- a/python/__init__.py +++ b/python/__init__.py @@ -28,6 +28,7 @@ utilities.load_flux = _util.load_flux utilities.load_detector = _util.load_detector utilities.load_processes = _util.load_processes +utilities.get_fiducial_volume = _util.get_fiducial_volume # Override the Injector with the python wrapper injection._Injector = injection.Injector diff --git a/python/_util.py b/python/_util.py index 7ab1cca90..abbea95ae 100644 --- a/python/_util.py +++ b/python/_util.py @@ -737,3 +737,25 @@ def load_detector(model_name, *args, **kwargs): def load_processes(model_name, *args, **kwargs): return load_resource("processes", model_name, *args, **kwargs) + +def get_fiducial_volume(experiment): + """ + :return: identified fiducial volume for the experiment, None if not found + """ + detector_model_file = get_detector_model_path(experiment) + "/densities.dat" + with open(detector_model_file) as file: + fiducial_line = None + detector_line = None + for line in file: + data = line.split() + if len(data) <= 0: + continue + elif data[0] == "fiducial": + fiducial_line = line + elif data[0] == "detector": + detector_line = line + if fiducial_line is None or detector_line is None: + return None + from . import detector as _detector + return _detector.DetectorModel.ParseFiducialVolume(fiducial_line, detector_line) + return None From d1902ef7e111a91e5e1e55668ea29593625b09d4 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Thu, 12 Sep 2024 12:37:40 -0600 Subject: [PATCH 47/85] Setter for number of events. Fix initialization checks --- python/Injector.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/python/Injector.py b/python/Injector.py index 3a02dcaba..017d7c644 100644 --- a/python/Injector.py +++ b/python/Injector.py @@ -90,18 +90,12 @@ def __initialize_injector(self): if self.__primary_type is None: raise ValueError("primary_type must be provided") - if len(self.__primary_interactions) != 1: - raise ValueError("primary_interactions must have exactly one key") + if len(self.__primary_interactions) == 0: + raise ValueError("primary_interactions must be provided") if len(self.__primary_injection_distributions) == 0: raise ValueError("primary_injection_distributions must be provided") - if len(self.__secondary_interactions) == 0: - raise ValueError("secondary_interactions must be provided") - - if len(self.__secondary_injection_distributions) == 0: - raise ValueError("secondary_injection_distributions must be provided") - if list(sorted(self.__secondary_interactions.keys())) != list(sorted(self.__secondary_injection_distributions.keys())): raise ValueError("secondary_interactions and secondary_injection_distributions must have the same keys") @@ -156,6 +150,12 @@ def number_of_events(self): return self.__injector.EventsToInject() return self.__number_of_events + @number_of_events.setter + def number_of_events(self, number_of_events): + if self.__injector is not None: + raise ValueError("Cannot change the number of events after initialization") + self.__number_of_events = number_of_events + @property def detector_model(self): if self.__injector is not None: From 6fd09b9cfbcd8298d846e09277e9d500f7840dc3 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Thu, 12 Sep 2024 12:51:09 -0600 Subject: [PATCH 48/85] Remove requirement for target_types --- .../primary/vertex/ColumnDepthPositionDistribution.cxx | 8 ++++---- .../primary/vertex/PointSourcePositionDistribution.cxx | 9 ++++----- .../distributions/private/pybindings/distributions.cxx | 4 ++-- .../primary/vertex/ColumnDepthPositionDistribution.h | 7 ++----- .../primary/vertex/PointSourcePositionDistribution.h | 7 ++----- 5 files changed, 14 insertions(+), 21 deletions(-) diff --git a/projects/distributions/private/primary/vertex/ColumnDepthPositionDistribution.cxx b/projects/distributions/private/primary/vertex/ColumnDepthPositionDistribution.cxx index a4eed9b3c..fe901fd85 100644 --- a/projects/distributions/private/primary/vertex/ColumnDepthPositionDistribution.cxx +++ b/projects/distributions/private/primary/vertex/ColumnDepthPositionDistribution.cxx @@ -173,7 +173,7 @@ double ColumnDepthPositionDistribution::GenerationProbability(std::shared_ptr depth_function, std::set target_types) : radius(radius), endcap_length(endcap_length), depth_function(depth_function), target_types(target_types) {} +ColumnDepthPositionDistribution::ColumnDepthPositionDistribution(double radius, double endcap_length, std::shared_ptr depth_function) : radius(radius), endcap_length(endcap_length), depth_function(depth_function) {} std::string ColumnDepthPositionDistribution::Name() const { return "ColumnDepthPositionDistribution"; @@ -215,7 +215,7 @@ bool ColumnDepthPositionDistribution::equal(WeightableDistribution const & other (depth_function and x->depth_function and *depth_function == *x->depth_function) or (!depth_function and !x->depth_function) ) - and target_types == x->target_types); + ); } bool ColumnDepthPositionDistribution::less(WeightableDistribution const & other) const { @@ -226,9 +226,9 @@ bool ColumnDepthPositionDistribution::less(WeightableDistribution const & other) and *depth_function < *x->depth_function); // Less than bool f = false; return - std::tie(radius, endcap_length, f, target_types) + std::tie(radius, endcap_length, f) < - std::tie(radius, x->endcap_length, depth_less, x->target_types); + std::tie(radius, x->endcap_length, depth_less); } } // namespace distributions diff --git a/projects/distributions/private/primary/vertex/PointSourcePositionDistribution.cxx b/projects/distributions/private/primary/vertex/PointSourcePositionDistribution.cxx index 977ae58da..db194e978 100644 --- a/projects/distributions/private/primary/vertex/PointSourcePositionDistribution.cxx +++ b/projects/distributions/private/primary/vertex/PointSourcePositionDistribution.cxx @@ -144,7 +144,7 @@ double PointSourcePositionDistribution::GenerationProbability(std::shared_ptr target_types) : origin(origin), max_distance(max_distance), target_types(target_types) {} +PointSourcePositionDistribution::PointSourcePositionDistribution(siren::math::Vector3D origin, double max_distance) : origin(origin), max_distance(max_distance) {} std::string PointSourcePositionDistribution::Name() const { return "PointSourcePositionDistribution"; @@ -177,16 +177,15 @@ bool PointSourcePositionDistribution::equal(WeightableDistribution const & other return false; else return (origin == x->origin - and max_distance == x->max_distance - and target_types == x->target_types); + and max_distance == x->max_distance); } bool PointSourcePositionDistribution::less(WeightableDistribution const & other) const { const PointSourcePositionDistribution* x = dynamic_cast(&other); return - std::tie(origin, max_distance, target_types) + std::tie(origin, max_distance) < - std::tie(origin, x->max_distance, x->target_types); + std::tie(origin, x->max_distance); } } // namespace distributions diff --git a/projects/distributions/private/pybindings/distributions.cxx b/projects/distributions/private/pybindings/distributions.cxx index e698f5a15..f70348f0f 100644 --- a/projects/distributions/private/pybindings/distributions.cxx +++ b/projects/distributions/private/pybindings/distributions.cxx @@ -187,7 +187,7 @@ PYBIND11_MODULE(distributions,m) { .def("Name",&CylinderVolumePositionDistribution::Name); class_, VertexPositionDistribution>(m, "ColumnDepthPositionDistribution") - .def(init, std::set>()) + .def(init>()) .def("GenerationProbability",&ColumnDepthPositionDistribution::GenerationProbability) .def("InjectionBounds",&ColumnDepthPositionDistribution::InjectionBounds) .def("Name",&ColumnDepthPositionDistribution::Name) @@ -202,7 +202,7 @@ PYBIND11_MODULE(distributions,m) { class_, VertexPositionDistribution>(m, "PointSourcePositionDistribution") .def(init<>()) - .def(init>()) + .def(init()) .def("GenerationProbability",&PointSourcePositionDistribution::GenerationProbability) .def("InjectionBounds",&PointSourcePositionDistribution::InjectionBounds) .def("Name",&PointSourcePositionDistribution::Name); diff --git a/projects/distributions/public/SIREN/distributions/primary/vertex/ColumnDepthPositionDistribution.h b/projects/distributions/public/SIREN/distributions/primary/vertex/ColumnDepthPositionDistribution.h index d0669dafc..ab067713b 100644 --- a/projects/distributions/public/SIREN/distributions/primary/vertex/ColumnDepthPositionDistribution.h +++ b/projects/distributions/public/SIREN/distributions/primary/vertex/ColumnDepthPositionDistribution.h @@ -38,7 +38,6 @@ friend cereal::access; double radius; double endcap_length; std::shared_ptr depth_function; - std::set target_types; siren::math::Vector3D SampleFromDisk(std::shared_ptr rand, siren::math::Vector3D const & dir) const; @@ -46,7 +45,7 @@ friend cereal::access; public: std::tuple GetSamplePosition(std::shared_ptr rand, std::shared_ptr detector_model, std::shared_ptr interactions, siren::dataclasses::PrimaryDistributionRecord & record); virtual double GenerationProbability(std::shared_ptr detector_model, std::shared_ptr interactions, siren::dataclasses::InteractionRecord const & record) const override; - ColumnDepthPositionDistribution(double radius, double endcap_length, std::shared_ptr depth_function, std::set target_types); + ColumnDepthPositionDistribution(double radius, double endcap_length, std::shared_ptr depth_function); std::string Name() const override; virtual std::shared_ptr clone() const override; virtual std::tuple InjectionBounds(std::shared_ptr detector_model, std::shared_ptr interactions, siren::dataclasses::InteractionRecord const & interaction) const override; @@ -56,7 +55,6 @@ friend cereal::access; archive(::cereal::make_nvp("Radius", radius)); archive(::cereal::make_nvp("EndcapLength", endcap_length)); archive(::cereal::make_nvp("DepthFunction", depth_function)); - archive(::cereal::make_nvp("TargetTypes", target_types)); archive(cereal::virtual_base_class(this)); } else { throw std::runtime_error("ColumnDepthPositionDistribution only supports version <= 0!"); @@ -72,8 +70,7 @@ friend cereal::access; archive(::cereal::make_nvp("Radius", r)); archive(::cereal::make_nvp("EndcapLength", l)); archive(::cereal::make_nvp("DepthFunction", f)); - archive(::cereal::make_nvp("TargetTypes", t)); - construct(r, l, f, t); + construct(r, l, f); archive(cereal::virtual_base_class(construct.ptr())); } else { throw std::runtime_error("ColumnDepthPositionDistribution only supports version <= 0!"); diff --git a/projects/distributions/public/SIREN/distributions/primary/vertex/PointSourcePositionDistribution.h b/projects/distributions/public/SIREN/distributions/primary/vertex/PointSourcePositionDistribution.h index d14279ac3..dd05add34 100644 --- a/projects/distributions/public/SIREN/distributions/primary/vertex/PointSourcePositionDistribution.h +++ b/projects/distributions/public/SIREN/distributions/primary/vertex/PointSourcePositionDistribution.h @@ -34,7 +34,6 @@ friend cereal::access; private: siren::math::Vector3D origin; double max_distance; - std::set target_types; siren::math::Vector3D SampleFromDisk(std::shared_ptr rand, siren::math::Vector3D const & dir) const; @@ -43,7 +42,7 @@ friend cereal::access; virtual double GenerationProbability(std::shared_ptr detector_model, std::shared_ptr interactions, siren::dataclasses::InteractionRecord const & record) const override; PointSourcePositionDistribution(); PointSourcePositionDistribution(const PointSourcePositionDistribution &) = default; - PointSourcePositionDistribution(siren::math::Vector3D origin, double max_distance, std::set target_types); + PointSourcePositionDistribution(siren::math::Vector3D origin, double max_distance); std::string Name() const override; virtual std::tuple InjectionBounds(std::shared_ptr detector_model, std::shared_ptr interactions, siren::dataclasses::InteractionRecord const & interaction) const override; virtual std::shared_ptr clone() const override; @@ -52,7 +51,6 @@ friend cereal::access; if(version == 0) { archive(::cereal::make_nvp("Origin", origin)); archive(::cereal::make_nvp("MaxDistance", max_distance)); - archive(::cereal::make_nvp("TargetTypes", target_types)); archive(cereal::virtual_base_class(this)); } else { throw std::runtime_error("PointSourcePositionDistribution only supports version <= 0!"); @@ -66,8 +64,7 @@ friend cereal::access; std::set t; archive(::cereal::make_nvp("Origin", r)); archive(::cereal::make_nvp("MaxDistance", l)); - archive(::cereal::make_nvp("TargetTypes", t)); - construct(r, l, t); + construct(r, l); archive(cereal::virtual_base_class(construct.ptr())); } else { throw std::runtime_error("PointSourcePositionDistribution only supports version <= 0!"); From bf999b822d6f23c6d558a44df8b558435c8a6ad7 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Thu, 12 Sep 2024 12:51:41 -0600 Subject: [PATCH 49/85] Add constructor with contents to pybindings --- projects/injection/private/pybindings/injection.cxx | 3 +++ 1 file changed, 3 insertions(+) diff --git a/projects/injection/private/pybindings/injection.cxx b/projects/injection/private/pybindings/injection.cxx index 97c3bc1c8..74a2ce20b 100644 --- a/projects/injection/private/pybindings/injection.cxx +++ b/projects/injection/private/pybindings/injection.cxx @@ -43,6 +43,7 @@ PYBIND11_MODULE(injection,m) { class_, Process>(m, "PhysicalProcess") .def(init<>()) + .def(init>()) .def_property("primary_type", &Process::GetPrimaryType, &Process::SetPrimaryType) .def_property("interactions", &Process::GetInteractions, &Process::SetInteractions) .def_property("distributions", &PhysicalProcess::GetPhysicalDistributions, &PhysicalProcess::SetPhysicalDistributions) @@ -50,6 +51,7 @@ PYBIND11_MODULE(injection,m) { class_, Process>(m, "PrimaryInjectionProcess") .def(init<>()) + .def(init>()) .def_property("primary_type", &Process::GetPrimaryType, &Process::SetPrimaryType) .def_property("interactions", &Process::GetInteractions, &Process::SetInteractions) .def_property("distributions", &PrimaryInjectionProcess::GetPrimaryInjectionDistributions, &PrimaryInjectionProcess::SetPrimaryInjectionDistributions) @@ -57,6 +59,7 @@ PYBIND11_MODULE(injection,m) { class_, Process>(m, "SecondaryInjectionProcess") .def(init<>()) + .def(init>()) .def_property("secondary_type", &SecondaryInjectionProcess::GetSecondaryType, &SecondaryInjectionProcess::SetSecondaryType) .def_property("interactions", &Process::GetInteractions, &Process::SetInteractions) .def_property("distributions", &SecondaryInjectionProcess::GetSecondaryInjectionDistributions, &SecondaryInjectionProcess::SetSecondaryInjectionDistributions) From 990e474036e623f4471d13627332fd4007cf8413 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 13 Sep 2024 18:13:03 -0600 Subject: [PATCH 50/85] Move examples --- .../ND280UPGRD-v1.dat => ND280UPGRD/densities_ND280UPGRD-v1.dat} | 0 .../ND280UPGRD-v1.dat => ND280UPGRD/materials_ND280UPGRD-v1.dat} | 0 resources/examples/{Example1 => example1}/DIS_ATLAS.py | 0 resources/examples/{Example1 => example1}/DIS_DUNE.py | 0 resources/examples/{Example1 => example1}/DIS_IceCube.py | 0 resources/examples/{Example1 => example1}/PaperPlots.ipynb | 0 resources/examples/{Example2 => example2}/DipolePortal_CCM.py | 0 resources/examples/{Example2 => example2}/DipolePortal_MINERvA.py | 0 .../examples/{Example2 => example2}/DipolePortal_MiniBooNE.py | 0 .../examples/{Example2 => example2}/DipolePortal_ND280UPGRD.py | 0 resources/examples/{Example2 => example2}/PaperPlots.ipynb | 0 11 files changed, 0 insertions(+), 0 deletions(-) rename resources/Detectors/{densities/ND280UPGRD/ND280UPGRD-v1.dat => ND280UPGRD/densities_ND280UPGRD-v1.dat} (100%) rename resources/Detectors/{materials/ND280UPGRD/ND280UPGRD-v1.dat => ND280UPGRD/materials_ND280UPGRD-v1.dat} (100%) rename resources/examples/{Example1 => example1}/DIS_ATLAS.py (100%) rename resources/examples/{Example1 => example1}/DIS_DUNE.py (100%) rename resources/examples/{Example1 => example1}/DIS_IceCube.py (100%) rename resources/examples/{Example1 => example1}/PaperPlots.ipynb (100%) rename resources/examples/{Example2 => example2}/DipolePortal_CCM.py (100%) rename resources/examples/{Example2 => example2}/DipolePortal_MINERvA.py (100%) rename resources/examples/{Example2 => example2}/DipolePortal_MiniBooNE.py (100%) rename resources/examples/{Example2 => example2}/DipolePortal_ND280UPGRD.py (100%) rename resources/examples/{Example2 => example2}/PaperPlots.ipynb (100%) diff --git a/resources/Detectors/densities/ND280UPGRD/ND280UPGRD-v1.dat b/resources/Detectors/ND280UPGRD/densities_ND280UPGRD-v1.dat similarity index 100% rename from resources/Detectors/densities/ND280UPGRD/ND280UPGRD-v1.dat rename to resources/Detectors/ND280UPGRD/densities_ND280UPGRD-v1.dat diff --git a/resources/Detectors/materials/ND280UPGRD/ND280UPGRD-v1.dat b/resources/Detectors/ND280UPGRD/materials_ND280UPGRD-v1.dat similarity index 100% rename from resources/Detectors/materials/ND280UPGRD/ND280UPGRD-v1.dat rename to resources/Detectors/ND280UPGRD/materials_ND280UPGRD-v1.dat diff --git a/resources/examples/Example1/DIS_ATLAS.py b/resources/examples/example1/DIS_ATLAS.py similarity index 100% rename from resources/examples/Example1/DIS_ATLAS.py rename to resources/examples/example1/DIS_ATLAS.py diff --git a/resources/examples/Example1/DIS_DUNE.py b/resources/examples/example1/DIS_DUNE.py similarity index 100% rename from resources/examples/Example1/DIS_DUNE.py rename to resources/examples/example1/DIS_DUNE.py diff --git a/resources/examples/Example1/DIS_IceCube.py b/resources/examples/example1/DIS_IceCube.py similarity index 100% rename from resources/examples/Example1/DIS_IceCube.py rename to resources/examples/example1/DIS_IceCube.py diff --git a/resources/examples/Example1/PaperPlots.ipynb b/resources/examples/example1/PaperPlots.ipynb similarity index 100% rename from resources/examples/Example1/PaperPlots.ipynb rename to resources/examples/example1/PaperPlots.ipynb diff --git a/resources/examples/Example2/DipolePortal_CCM.py b/resources/examples/example2/DipolePortal_CCM.py similarity index 100% rename from resources/examples/Example2/DipolePortal_CCM.py rename to resources/examples/example2/DipolePortal_CCM.py diff --git a/resources/examples/Example2/DipolePortal_MINERvA.py b/resources/examples/example2/DipolePortal_MINERvA.py similarity index 100% rename from resources/examples/Example2/DipolePortal_MINERvA.py rename to resources/examples/example2/DipolePortal_MINERvA.py diff --git a/resources/examples/Example2/DipolePortal_MiniBooNE.py b/resources/examples/example2/DipolePortal_MiniBooNE.py similarity index 100% rename from resources/examples/Example2/DipolePortal_MiniBooNE.py rename to resources/examples/example2/DipolePortal_MiniBooNE.py diff --git a/resources/examples/Example2/DipolePortal_ND280UPGRD.py b/resources/examples/example2/DipolePortal_ND280UPGRD.py similarity index 100% rename from resources/examples/Example2/DipolePortal_ND280UPGRD.py rename to resources/examples/example2/DipolePortal_ND280UPGRD.py diff --git a/resources/examples/Example2/PaperPlots.ipynb b/resources/examples/example2/PaperPlots.ipynb similarity index 100% rename from resources/examples/Example2/PaperPlots.ipynb rename to resources/examples/example2/PaperPlots.ipynb From 1b347514319b976bce7a0af0dc7f3e65b404613d Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 13 Sep 2024 19:31:03 -0600 Subject: [PATCH 51/85] Utilities for string manipulation (mainly inserting tabs) --- projects/utilities/CMakeLists.txt | 1 + .../utilities/private/StringManipulation.cxx | 41 +++++++++++++++++++ .../SIREN/utilities/StringManipulation.h | 17 ++++++++ 3 files changed, 59 insertions(+) create mode 100644 projects/utilities/private/StringManipulation.cxx create mode 100644 projects/utilities/public/SIREN/utilities/StringManipulation.h diff --git a/projects/utilities/CMakeLists.txt b/projects/utilities/CMakeLists.txt index f0dec7b97..5ad24dfaa 100644 --- a/projects/utilities/CMakeLists.txt +++ b/projects/utilities/CMakeLists.txt @@ -3,6 +3,7 @@ LIST (APPEND utilities_SOURCES ${PROJECT_SOURCE_DIR}/projects/utilities/private/Interpolator.cxx ${PROJECT_SOURCE_DIR}/projects/utilities/private/Random.cxx + ${PROJECT_SOURCE_DIR}/projects/utilities/private/StringManipulation.cxx ) add_library(SIREN_utilities OBJECT ${utilities_SOURCES}) set_property(TARGET SIREN_utilities PROPERTY POSITION_INDEPENDENT_CODE ON) diff --git a/projects/utilities/private/StringManipulation.cxx b/projects/utilities/private/StringManipulation.cxx new file mode 100644 index 000000000..96f009f65 --- /dev/null +++ b/projects/utilities/private/StringManipulation.cxx @@ -0,0 +1,41 @@ +#include +#include +#include + +#include "SIREN/utilities/StringManipulation.h" + +namespace siren { +namespace utilities { + +std::string add_prefix(std::string const & input, std::string const & prefix) { + std::istringstream iss(input); + std::vector lines; + std::string line; + ssize_t last_non_empty_line = -1; + size_t line_number = 0; + + // Read each line and track the last non-empty line + while (std::getline(iss, line)) { + lines.push_back(line); + if (!line.empty()) { + last_non_empty_line = line_number; + } + line_number++; + } + + std::ostringstream oss; + + // Add prefix to each line up to the last non-empty line + if (last_non_empty_line >= 0) { + for (size_t i = 0; i <= static_cast(last_non_empty_line); ++i) { + oss << prefix << lines[i] << '\n'; + } + // Ensure the string ends with an empty newline + oss << '\n'; + } + + return oss.str(); +} + +} // namespace utilities +} // namespace siren diff --git a/projects/utilities/public/SIREN/utilities/StringManipulation.h b/projects/utilities/public/SIREN/utilities/StringManipulation.h new file mode 100644 index 000000000..ef3d90a76 --- /dev/null +++ b/projects/utilities/public/SIREN/utilities/StringManipulation.h @@ -0,0 +1,17 @@ +#pragma once +#ifndef SIREN_StringMapulation_H +#define SIREN_StringMapulation_H + +#include + +namespace siren { +namespace utilities { + +constexpr char const * tab = " "; + +std::string add_prefix(std::string const & input, std::string const & prefix); + +} // namespace utilities +} // namespace siren + +#endif // SIREN_StringMapulation_H From bb2776d3619a6ff7eb0d77f34db715df8a9ba3db Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 13 Sep 2024 19:33:29 -0600 Subject: [PATCH 52/85] A few pythonic changes to dataclasses pybindings. Moving ParticleType into dataclasses. Clean up InteractionSignature __str__ and __repr__ --- .../private/InteractionSignature.cxx | 41 +++++++++++++----- .../private/pybindings/dataclasses.cxx | 42 +++++++------------ .../SIREN/dataclasses/InteractionSignature.h | 9 ++-- python/Injector.py | 8 ++-- python/__init__.py | 2 + 5 files changed, 58 insertions(+), 44 deletions(-) diff --git a/projects/dataclasses/private/InteractionSignature.cxx b/projects/dataclasses/private/InteractionSignature.cxx index 1cf3a381b..b0b54ebc0 100644 --- a/projects/dataclasses/private/InteractionSignature.cxx +++ b/projects/dataclasses/private/InteractionSignature.cxx @@ -1,4 +1,5 @@ #include "SIREN/dataclasses/InteractionSignature.h" +#include "SIREN/utilities/StringManipulation.h" #include // for tie, operator<, operator==, tuple #include // for operator<<, char_traits, basic_ostream, endl, ost... @@ -23,20 +24,38 @@ bool InteractionSignature::operator<(InteractionSignature const & other) const { } // namespace dataclasses } // namespace siren -std::ostream& operator<<(std::ostream& os, siren::dataclasses::InteractionSignature const& signature) { - std::stringstream ss; - ss << "InteractionSignature (" << &signature << ") "; - os << ss.str() << '\n'; - +std::ostream& operator<<(std::ostream& os, siren::dataclasses::InteractionSignature const & signature) { + os << to_repr(signature); + return os; +} - os << "PrimaryType: " << signature.primary_type << "\n"; - os << "TargetType: " << signature.target_type << "\n"; - os << "SecondaryTypes:"; +std::string to_str(siren::dataclasses::InteractionSignature const & signature) { + using siren::utilities::tab; + std::stringstream ss; + ss << "[ InteractionSignature (" << &signature << ") \n"; + ss << tab << "PrimaryType: " << signature.primary_type << '\n'; + ss << tab << "TargetType: " << signature.target_type << '\n'; + ss << tab << "SecondaryTypes:"; for(auto secondary: signature.secondary_types) { - os << " " << secondary; + ss << ' ' << secondary; } - os << std::endl; + ss << "\n]"; - return os; + return ss.str(); } +std::string to_repr(siren::dataclasses::InteractionSignature const & signature) { + using siren::dataclasses::ParticleType; + std::stringstream ss; + ss << "InteractionSignature( "; + ss << signature.primary_type << " "; + if(signature.primary_type == ParticleType::unknown or signature.target_type != ParticleType::unknown) { + ss << signature.target_type << " "; + } + ss << "-> "; + for(auto const & secondary : signature.secondary_types) { + ss << secondary << " "; + } + ss << ")"; + return ss.str(); +} diff --git a/projects/dataclasses/private/pybindings/dataclasses.cxx b/projects/dataclasses/private/pybindings/dataclasses.cxx index e328b280f..3a049edf6 100644 --- a/projects/dataclasses/private/pybindings/dataclasses.cxx +++ b/projects/dataclasses/private/pybindings/dataclasses.cxx @@ -31,9 +31,9 @@ PYBIND11_MODULE(dataclasses,m) { .def_readwrite("position",&Particle::position) .def_readwrite("length",&Particle::length) .def_readwrite("helicity",&Particle::helicity) - .def("GenerateID",&Particle::GenerateID); + .def("generate_id",&Particle::GenerateID); - enum_(particle, "ParticleType", arithmetic()) + enum_(m, "ParticleType", arithmetic()) #define X(a, b) .value( #a , ParticleType:: a ) #include "../../public/SIREN/dataclasses/ParticleTypes.def" #undef X @@ -41,21 +41,8 @@ PYBIND11_MODULE(dataclasses,m) { class_>(m, "InteractionSignature") .def(init<>()) - .def("__str__", [](InteractionSignature const & p) { std::stringstream ss; ss << p; return ss.str(); }) - .def("__repr__", [](InteractionSignature const & s) { - std::stringstream ss; - ss << "InteractionSignature( "; - ss << s.primary_type << " "; - if(s.primary_type == ParticleType::unknown or s.target_type != ParticleType::unknown) { - ss << s.target_type << " "; - } - ss << "-> "; - for(auto const & secondary : s.secondary_types) { - ss << secondary << " "; - } - ss << ")"; - return ss.str(); - }) + .def("__str__", [](InteractionSignature const & s) { return to_str(s); }) + .def("__repr__", [](InteractionSignature const & s) { return to_repr(s); }) .def_readwrite("primary_type",&InteractionSignature::primary_type) .def_readwrite("target_type",&InteractionSignature::target_type) .def_readwrite("secondary_types",&InteractionSignature::secondary_types); @@ -66,8 +53,8 @@ PYBIND11_MODULE(dataclasses,m) { [](siren::dataclasses::PrimaryDistributionRecord const & pdr) {siren::dataclasses::ParticleID id = pdr.id; return id;}) .def_property_readonly("type", [](siren::dataclasses::PrimaryDistributionRecord const & pdr) {siren::dataclasses::ParticleType pt = pdr.type; return pt;}) - .def("GetParticle", &PrimaryDistributionRecord::GetParticle) - .def("SetParticle", &PrimaryDistributionRecord::SetParticle) + //.def("GetParticle", &PrimaryDistributionRecord::GetParticle) + //.def("SetParticle", &PrimaryDistributionRecord::SetParticle) .def_property("mass", ((double const & (PrimaryDistributionRecord::*)())(&PrimaryDistributionRecord::GetMass)), &PrimaryDistributionRecord::SetMass) .def_property("energy", ((double const & (PrimaryDistributionRecord::*)())(&PrimaryDistributionRecord::GetEnergy)), &PrimaryDistributionRecord::SetEnergy) .def_property("kinetic_energy", ((double const & (PrimaryDistributionRecord::*)())(&PrimaryDistributionRecord::GetKineticEnergy)), &PrimaryDistributionRecord::SetKineticEnergy) @@ -78,7 +65,7 @@ PYBIND11_MODULE(dataclasses,m) { .def_property("initial_position", ((std::array const & (PrimaryDistributionRecord::*)())(&PrimaryDistributionRecord::GetInitialPosition)), &PrimaryDistributionRecord::SetInitialPosition) .def_property("interaction_vertex", ((std::array const & (PrimaryDistributionRecord::*)())(&PrimaryDistributionRecord::GetInteractionVertex)), &PrimaryDistributionRecord::SetInteractionVertex) .def_property("helicity", ((double const & (PrimaryDistributionRecord::*)())(&PrimaryDistributionRecord::GetHelicity)), &PrimaryDistributionRecord::SetHelicity) - .def("Finalize", &PrimaryDistributionRecord::Finalize); + .def("finalize", &PrimaryDistributionRecord::Finalize); class_>(m, "SecondaryParticleRecord") .def(init()) @@ -88,8 +75,8 @@ PYBIND11_MODULE(dataclasses,m) { [](siren::dataclasses::SecondaryParticleRecord const & spr) {siren::dataclasses::ParticleType pt = spr.type; return pt;}) .def_property_readonly("initial_position", [](siren::dataclasses::SecondaryParticleRecord const & spr) {std::array ip = spr.initial_position; return ip;}) - .def("GetParticle", &SecondaryParticleRecord::GetParticle) - .def("SetParticle", &SecondaryParticleRecord::SetParticle) + //.def("GetParticle", &SecondaryParticleRecord::GetParticle) + //.def("SetParticle", &SecondaryParticleRecord::SetParticle) .def_property("mass", ((double const & (SecondaryParticleRecord::*)())(&SecondaryParticleRecord::GetMass)), &SecondaryParticleRecord::SetMass) .def_property("energy", ((double const & (SecondaryParticleRecord::*)())(&SecondaryParticleRecord::GetEnergy)), &SecondaryParticleRecord::SetEnergy) .def_property("kinetic_energy", ((double const & (SecondaryParticleRecord::*)())(&SecondaryParticleRecord::GetKineticEnergy)), &SecondaryParticleRecord::SetKineticEnergy) @@ -97,7 +84,7 @@ PYBIND11_MODULE(dataclasses,m) { .def_property("three_momentum", ((std::array const & (SecondaryParticleRecord::*)())(&SecondaryParticleRecord::GetThreeMomentum)), &SecondaryParticleRecord::SetThreeMomentum) .def_property("four_momentum", ((std::array (SecondaryParticleRecord::*)())(&SecondaryParticleRecord::GetFourMomentum)), &SecondaryParticleRecord::SetFourMomentum) .def_property("helicity", ((double const & (SecondaryParticleRecord::*)())(&SecondaryParticleRecord::GetHelicity)), &SecondaryParticleRecord::SetHelicity) - .def("Finalize", &SecondaryParticleRecord::Finalize); + .def("finalize", &SecondaryParticleRecord::Finalize); class_>(m, "CrossSectionDistributionRecord") .def(init()) @@ -126,13 +113,16 @@ PYBIND11_MODULE(dataclasses,m) { .def_property("target_mass", ((double const & (siren::dataclasses::CrossSectionDistributionRecord::*)() const)(&siren::dataclasses::CrossSectionDistributionRecord::GetTargetMass)), &siren::dataclasses::CrossSectionDistributionRecord::SetTargetMass) .def_property("target_helicity", ((double const & (siren::dataclasses::CrossSectionDistributionRecord::*)() const)(&siren::dataclasses::CrossSectionDistributionRecord::GetTargetHelicity)), &siren::dataclasses::CrossSectionDistributionRecord::SetTargetHelicity) .def_property("interaction_parameters", ((std::map const & (siren::dataclasses::CrossSectionDistributionRecord::*)())(&siren::dataclasses::CrossSectionDistributionRecord::GetInteractionParameters)), &siren::dataclasses::CrossSectionDistributionRecord::SetInteractionParameters) - .def("GetSecondaryParticleRecord", + .def_property_readonly("secondary_particle_records", + [](siren::dataclasses::CrossSectionDistributionRecord & cdr) -> std::vector & {return cdr.GetSecondaryParticleRecords();}, + return_value_policy::reference_internal) + .def("get_econdary_particle_record", [](siren::dataclasses::CrossSectionDistributionRecord & cdr, size_t i) -> siren::dataclasses::SecondaryParticleRecord & {return cdr.GetSecondaryParticleRecord(i);}, return_value_policy::reference_internal) - .def("GetSecondaryParticleRecords", + .def("get_econdary_particle_records", [](siren::dataclasses::CrossSectionDistributionRecord & cdr) -> std::vector & {return cdr.GetSecondaryParticleRecords();}, return_value_policy::reference_internal) - .def("Finalize", &CrossSectionDistributionRecord::Finalize); + .def("finalize", &CrossSectionDistributionRecord::Finalize); class_>(m, "InteractionRecord") diff --git a/projects/dataclasses/public/SIREN/dataclasses/InteractionSignature.h b/projects/dataclasses/public/SIREN/dataclasses/InteractionSignature.h index f3325d5b6..52f66b0f1 100644 --- a/projects/dataclasses/public/SIREN/dataclasses/InteractionSignature.h +++ b/projects/dataclasses/public/SIREN/dataclasses/InteractionSignature.h @@ -4,16 +4,18 @@ #include // for ostream #include // for vector +#include // for string #include // for uint32_t #include // for runtime_error #include // for make_nvp, CEREAL_CL... #include "SIREN/dataclasses/Particle.h" // for Particle - // namespace siren { namespace dataclasses { struct InteractionSignature; } } -std::ostream& operator<<(std::ostream& os, siren::dataclasses::InteractionSignature const& signature); +std::ostream& operator<<(std::ostream& os, siren::dataclasses::InteractionSignature const & signature); +std::string to_str(siren::dataclasses::InteractionSignature const & signature); +std::string to_repr(siren::dataclasses::InteractionSignature const & signature); namespace siren { namespace dataclasses { @@ -25,7 +27,8 @@ struct InteractionSignature { bool operator==(InteractionSignature const & other) const; bool operator<(InteractionSignature const & other) const; - friend std::ostream& ::operator<<(std::ostream& os, InteractionSignature const& signature); + friend std::string (::to_str)(siren::dataclasses::InteractionSignature const & signature); + friend std::string (::to_repr)(siren::dataclasses::InteractionSignature const & signature); template void serialize(Archive & archive, std::uint32_t const version) { if(version == 0) { diff --git a/python/Injector.py b/python/Injector.py index 017d7c644..8ca27cbff 100644 --- a/python/Injector.py +++ b/python/Injector.py @@ -18,7 +18,7 @@ _Injector = _injection.Injector -ParticleType = _dataclasses.Particle.ParticleType +ParticleType = _dataclasses.ParticleType CrossSection = _interactions.CrossSection Decay = _interactions.Decay DetectorModel = _detector.DetectorModel @@ -34,10 +34,10 @@ def __init__( number_of_events: Optional[int] = None, detector_model: Optional[_detector.DetectorModel] = None, seed: Optional[int] = None, - primary_interactions: Dict[_dataclasses.Particle.ParticleType, List[Union[_interactions.CrossSection, _interactions.Decay]]] = None, + primary_interactions: Dict[_dataclasses.ParticleType, List[Union[_interactions.CrossSection, _interactions.Decay]]] = None, primary_injection_distributions: List[_distributions.PrimaryInjectionDistribution] = None, - secondary_interactions: Optional[Dict[_dataclasses.Particle.ParticleType, List[Union[_interactions.CrossSection, _interactions.Decay]]]] = None, - secondary_injection_distributions: Optional[Dict[_dataclasses.Particle.ParticleType, List[_distributions.SecondaryInjectionDistribution]]] = None, + secondary_interactions: Optional[Dict[_dataclasses.ParticleType, List[Union[_interactions.CrossSection, _interactions.Decay]]]] = None, + secondary_injection_distributions: Optional[Dict[_dataclasses.ParticleType, List[_distributions.SecondaryInjectionDistribution]]] = None, stopping_condition: Optional[Callable[[_dataclasses.InteractionTreeDatum, int], bool]] = None, ): self.__seed = None diff --git a/python/__init__.py b/python/__init__.py index b6b0fb225..587cce09a 100644 --- a/python/__init__.py +++ b/python/__init__.py @@ -35,6 +35,8 @@ injection.Injector = Injector.Injector del Injector +dataclasses.Particle.ParticleType = dataclasses.ParticleType + def darknews_version(): try: import DarkNews From d02b3b73404fae5f81c4a8dd9d467bb04d03c6cf Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 13 Sep 2024 20:47:16 -0600 Subject: [PATCH 53/85] Add bindings for ParticleID --- .../private/pybindings/dataclasses.cxx | 159 +++++++++++------- 1 file changed, 97 insertions(+), 62 deletions(-) diff --git a/projects/dataclasses/private/pybindings/dataclasses.cxx b/projects/dataclasses/private/pybindings/dataclasses.cxx index 3a049edf6..f2f1166b0 100644 --- a/projects/dataclasses/private/pybindings/dataclasses.cxx +++ b/projects/dataclasses/private/pybindings/dataclasses.cxx @@ -10,45 +10,80 @@ #include "../../public/SIREN/dataclasses/InteractionTree.h" #include +#include #include -using namespace pybind11; -PYBIND11_MODULE(dataclasses,m) { - using namespace siren::dataclasses; - - class_> particle(m, "Particle"); - - particle.def(init<>()) - .def(init()) - .def(init, std::array, double, double>()) - .def(init, std::array, double, double>()) - .def("__str__", [](Particle const & p) { std::stringstream ss; ss << p; return ss.str(); }) - .def_readwrite("id",&Particle::id) - .def_readwrite("type",&Particle::type) - .def_readwrite("mass",&Particle::mass) - .def_readwrite("momentum",&Particle::momentum) - .def_readwrite("position",&Particle::position) - .def_readwrite("length",&Particle::length) - .def_readwrite("helicity",&Particle::helicity) - .def("generate_id",&Particle::GenerateID); - - enum_(m, "ParticleType", arithmetic()) +PYBIND11_MODULE(dataclasses, m) { + namespace py = pybind11; + using namespace siren::dataclasses; + + // Create a Python class binding for siren::dataclasses::ParticleID + py::class_ particle_id(m, "ParticleID"); + + particle_id + .def(py::init<>()) + .def(py::init(), py::arg("major"), py::arg("minor")) + .def(py::init(), py::arg("other")) + .def(py::init(), py::arg("other")) + .def_static("generate_id", &siren::dataclasses::ParticleID::GenerateID) + .def("is_set", &siren::dataclasses::ParticleID::IsSet) + //.def_property("major_id", &siren::dataclasses::ParticleID::GetMajorID, &siren::dataclasses::ParticleID::SetMajorID) + // Getters for major and minor IDs + .def_property_readonly("major_id", &siren::dataclasses::ParticleID::GetMajorID) + .def_property_readonly("minor_id", &siren::dataclasses::ParticleID::GetMinorID) + // Method to set the ID + .def("set", &siren::dataclasses::ParticleID::SetID, py::arg("major"), py::arg("minor")) + // Overload the bool operator + .def("__bool__", &siren::dataclasses::ParticleID::operator bool) + // Comparison operators + .def(py::self == py::self) + .def(py::self != py::self) + .def(py::self < py::self) + // String representation + .def("__repr__", + [](const siren::dataclasses::ParticleID &id) { + std::ostringstream oss; + oss << id; + return oss.str(); + } + ) + // Optional: Serialize method if needed in Python + // .def("serialize", &siren::dataclasses::ParticleID::serialize) + ; + + py::class_> particle(m, "Particle"); + + particle.def(py::init<>()) + .def(py::init()) + .def(py::init, std::array, double, double>()) + .def(py::init, std::array, double, double>()) + .def("__str__", [](Particle const & p) { std::stringstream ss; ss << p; return ss.str(); }) + .def_readwrite("id",&Particle::id) + .def_readwrite("type",&Particle::type) + .def_readwrite("mass",&Particle::mass) + .def_readwrite("momentum",&Particle::momentum) + .def_readwrite("position",&Particle::position) + .def_readwrite("length",&Particle::length) + .def_readwrite("helicity",&Particle::helicity) + .def("generate_id",&Particle::GenerateID); + + py::enum_(m, "ParticleType", py::arithmetic()) #define X(a, b) .value( #a , ParticleType:: a ) #include "../../public/SIREN/dataclasses/ParticleTypes.def" #undef X .export_values(); - class_>(m, "InteractionSignature") - .def(init<>()) + py::class_>(m, "InteractionSignature") + .def(py::init<>()) .def("__str__", [](InteractionSignature const & s) { return to_str(s); }) .def("__repr__", [](InteractionSignature const & s) { return to_repr(s); }) .def_readwrite("primary_type",&InteractionSignature::primary_type) .def_readwrite("target_type",&InteractionSignature::target_type) .def_readwrite("secondary_types",&InteractionSignature::secondary_types); - class_>(m, "PrimaryDistributionRecord") - .def(init()) + py::class_>(m, "PrimaryDistributionRecord") + .def(py::init()) .def_property_readonly("id", [](siren::dataclasses::PrimaryDistributionRecord const & pdr) {siren::dataclasses::ParticleID id = pdr.id; return id;}) .def_property_readonly("type", @@ -67,8 +102,8 @@ PYBIND11_MODULE(dataclasses,m) { .def_property("helicity", ((double const & (PrimaryDistributionRecord::*)())(&PrimaryDistributionRecord::GetHelicity)), &PrimaryDistributionRecord::SetHelicity) .def("finalize", &PrimaryDistributionRecord::Finalize); - class_>(m, "SecondaryParticleRecord") - .def(init()) + py::class_>(m, "SecondaryParticleRecord") + .def(py::init()) .def_property_readonly("id", [](siren::dataclasses::SecondaryParticleRecord const & spr) {siren::dataclasses::ParticleID id = spr.id; return id;}) .def_property_readonly("type", @@ -86,8 +121,8 @@ PYBIND11_MODULE(dataclasses,m) { .def_property("helicity", ((double const & (SecondaryParticleRecord::*)())(&SecondaryParticleRecord::GetHelicity)), &SecondaryParticleRecord::SetHelicity) .def("finalize", &SecondaryParticleRecord::Finalize); - class_>(m, "CrossSectionDistributionRecord") - .def(init()) + py::class_>(m, "CrossSectionDistributionRecord") + .def(py::init()) .def_property_readonly("record", [](siren::dataclasses::CrossSectionDistributionRecord const & cdr) {siren::dataclasses::InteractionRecord ir = cdr.record; return ir;}) .def_property_readonly("signature", @@ -115,45 +150,45 @@ PYBIND11_MODULE(dataclasses,m) { .def_property("interaction_parameters", ((std::map const & (siren::dataclasses::CrossSectionDistributionRecord::*)())(&siren::dataclasses::CrossSectionDistributionRecord::GetInteractionParameters)), &siren::dataclasses::CrossSectionDistributionRecord::SetInteractionParameters) .def_property_readonly("secondary_particle_records", [](siren::dataclasses::CrossSectionDistributionRecord & cdr) -> std::vector & {return cdr.GetSecondaryParticleRecords();}, - return_value_policy::reference_internal) + py::return_value_policy::reference_internal) .def("get_econdary_particle_record", [](siren::dataclasses::CrossSectionDistributionRecord & cdr, size_t i) -> siren::dataclasses::SecondaryParticleRecord & {return cdr.GetSecondaryParticleRecord(i);}, - return_value_policy::reference_internal) + py::return_value_policy::reference_internal) .def("get_econdary_particle_records", [](siren::dataclasses::CrossSectionDistributionRecord & cdr) -> std::vector & {return cdr.GetSecondaryParticleRecords();}, - return_value_policy::reference_internal) + py::return_value_policy::reference_internal) .def("finalize", &CrossSectionDistributionRecord::Finalize); - class_>(m, "InteractionRecord") - .def(init<>()) - .def("__str__", [](InteractionRecord const & r) { std::stringstream ss; ss << r; return ss.str(); }) - .def_readwrite("signature",&InteractionRecord::signature) - .def_readwrite("primary_mass",&InteractionRecord::primary_mass) - .def_readwrite("primary_momentum",&InteractionRecord::primary_momentum) - .def_readwrite("primary_helicity",&InteractionRecord::primary_helicity) - .def_readwrite("target_mass",&InteractionRecord::target_mass) - .def_readwrite("target_helicity",&InteractionRecord::target_helicity) - .def_readwrite("interaction_vertex",&InteractionRecord::interaction_vertex) - .def_readwrite("secondary_masses",&InteractionRecord::secondary_masses) - .def_readwrite("secondary_momenta",&InteractionRecord::secondary_momenta) - .def_readwrite("secondary_helicities",&InteractionRecord::secondary_helicities) - .def_readwrite("interaction_parameters",&InteractionRecord::interaction_parameters); - - class_>(m, "InteractionTreeDatum") - .def(init()) - .def_readwrite("record",&InteractionTreeDatum::record) - .def_readwrite("parent",&InteractionTreeDatum::parent) - .def_readwrite("daughters",&InteractionTreeDatum::daughters) - .def("depth",&InteractionTreeDatum::depth); - - class_>(m, "InteractionTree") - .def(init<>()) - .def_readwrite("tree",&InteractionTree::tree) - .def("add_entry",static_cast (InteractionTree::*)(InteractionTreeDatum&,std::shared_ptr)>(&InteractionTree::add_entry)) - .def("add_entry",static_cast (InteractionTree::*)(InteractionRecord&,std::shared_ptr)>(&InteractionTree::add_entry)); - - m.def("SaveInteractionTrees",&SaveInteractionTrees); - m.def("LoadInteractionTrees",&LoadInteractionTrees,pybind11::return_value_policy::reference); + py::class_>(m, "InteractionRecord") + .def(py::init<>()) + .def("__str__", [](InteractionRecord const & r) { std::stringstream ss; ss << r; return ss.str(); }) + .def_readwrite("signature",&InteractionRecord::signature) + .def_readwrite("primary_mass",&InteractionRecord::primary_mass) + .def_readwrite("primary_momentum",&InteractionRecord::primary_momentum) + .def_readwrite("primary_helicity",&InteractionRecord::primary_helicity) + .def_readwrite("target_mass",&InteractionRecord::target_mass) + .def_readwrite("target_helicity",&InteractionRecord::target_helicity) + .def_readwrite("interaction_vertex",&InteractionRecord::interaction_vertex) + .def_readwrite("secondary_masses",&InteractionRecord::secondary_masses) + .def_readwrite("secondary_momenta",&InteractionRecord::secondary_momenta) + .def_readwrite("secondary_helicities",&InteractionRecord::secondary_helicities) + .def_readwrite("interaction_parameters",&InteractionRecord::interaction_parameters); + + py::class_>(m, "InteractionTreeDatum") + .def(py::init()) + .def_readwrite("record",&InteractionTreeDatum::record) + .def_readwrite("parent",&InteractionTreeDatum::parent) + .def_readwrite("daughters",&InteractionTreeDatum::daughters) + .def("depth",&InteractionTreeDatum::depth); + + py::class_>(m, "InteractionTree") + .def(py::init<>()) + .def_readwrite("tree",&InteractionTree::tree) + .def("add_entry",static_cast (InteractionTree::*)(InteractionTreeDatum&,std::shared_ptr)>(&InteractionTree::add_entry)) + .def("add_entry",static_cast (InteractionTree::*)(InteractionRecord&,std::shared_ptr)>(&InteractionTree::add_entry)); + + m.def("SaveInteractionTrees",&SaveInteractionTrees); + m.def("LoadInteractionTrees",&LoadInteractionTrees, py::return_value_policy::reference); } From 41768b9f1bf4e0281af408efe85196e1478e0900 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 13 Sep 2024 21:03:07 -0600 Subject: [PATCH 54/85] Clean up ParticleID __str__ and __repr__. Implement != operator --- projects/dataclasses/private/ParticleID.cxx | 34 ++++++++++++++++--- .../private/pybindings/dataclasses.cxx | 23 +++---------- .../public/SIREN/dataclasses/ParticleID.h | 5 +++ 3 files changed, 39 insertions(+), 23 deletions(-) diff --git a/projects/dataclasses/private/ParticleID.cxx b/projects/dataclasses/private/ParticleID.cxx index 2074ddae8..666abb78d 100644 --- a/projects/dataclasses/private/ParticleID.cxx +++ b/projects/dataclasses/private/ParticleID.cxx @@ -6,15 +6,35 @@ #include #include -std::ostream& operator<<(std::ostream& os, siren::dataclasses::ParticleID const& record) { - os << "ParticleID (" << &record << ")\n"; - os << "IDSet: " << record.id_set << "\n"; - os << "MajorID: " << record.major_id << "\n"; - os << "MinorID: " << record.minor_id; +#include "SIREN/utilities/StringManipulation.h" +std::ostream& operator<<(std::ostream& os, siren::dataclasses::ParticleID const& id) { + os << to_repr(id); return os; } +std::string to_str(siren::dataclasses::ParticleID const & id) { + using siren::utilities::tab; + std::stringstream ss; + ss << "[ ParticleID (" << &id << ")\n"; + ss << tab << "IDSet: " << id.IsSet() << '\n'; + ss << tab << "MajorID: " << id.GetMajorID() << '\n'; + ss << tab << "MinorID: " << id.GetMinorID() << '\n'; + ss << ']'; + return ss.str(); +} + +std::string to_repr(siren::dataclasses::ParticleID const & id) { + std::stringstream ss; + ss << "ParticleID("; + if(id.IsSet()) + ss << id.GetMajorID() << ", " << id.GetMinorID(); + else + ss << "unset"; + ss << ")"; + return ss.str(); +} + namespace siren { namespace dataclasses { @@ -60,6 +80,10 @@ bool ParticleID::operator==(ParticleID const & other) const { return std::tie(id_set, major_id, minor_id) == std::tie(id_set, other.major_id, other.minor_id); } +bool ParticleID::operator!=(ParticleID const & other) const { + return not (*this == other); +} + // Adapted from https://github.com/icecube/icetray-public/blob/4436c3e10c23f95a8965c98fecccb7775a361fab/dataclasses/private/dataclasses/physics/I3Particle.cxx#L42-L93 ParticleID ParticleID::GenerateID() { int this_pid = getpid(); diff --git a/projects/dataclasses/private/pybindings/dataclasses.cxx b/projects/dataclasses/private/pybindings/dataclasses.cxx index f2f1166b0..b80199ba8 100644 --- a/projects/dataclasses/private/pybindings/dataclasses.cxx +++ b/projects/dataclasses/private/pybindings/dataclasses.cxx @@ -26,30 +26,17 @@ PYBIND11_MODULE(dataclasses, m) { .def(py::init(), py::arg("major"), py::arg("minor")) .def(py::init(), py::arg("other")) .def(py::init(), py::arg("other")) - .def_static("generate_id", &siren::dataclasses::ParticleID::GenerateID) - .def("is_set", &siren::dataclasses::ParticleID::IsSet) - //.def_property("major_id", &siren::dataclasses::ParticleID::GetMajorID, &siren::dataclasses::ParticleID::SetMajorID) - // Getters for major and minor IDs .def_property_readonly("major_id", &siren::dataclasses::ParticleID::GetMajorID) .def_property_readonly("minor_id", &siren::dataclasses::ParticleID::GetMinorID) - // Method to set the ID - .def("set", &siren::dataclasses::ParticleID::SetID, py::arg("major"), py::arg("minor")) - // Overload the bool operator .def("__bool__", &siren::dataclasses::ParticleID::operator bool) - // Comparison operators + .def("__repr__", [](siren::dataclasses::ParticleID const & id) { return to_repr(id); }) + .def("__str__", [](siren::dataclasses::ParticleID const & id) { return to_str(id); }) + .def("is_set", &siren::dataclasses::ParticleID::IsSet) + .def("set", &siren::dataclasses::ParticleID::SetID, py::arg("major"), py::arg("minor")) .def(py::self == py::self) .def(py::self != py::self) .def(py::self < py::self) - // String representation - .def("__repr__", - [](const siren::dataclasses::ParticleID &id) { - std::ostringstream oss; - oss << id; - return oss.str(); - } - ) - // Optional: Serialize method if needed in Python - // .def("serialize", &siren::dataclasses::ParticleID::serialize) + .def_static("generate_id", &siren::dataclasses::ParticleID::GenerateID) ; py::class_> particle(m, "Particle"); diff --git a/projects/dataclasses/public/SIREN/dataclasses/ParticleID.h b/projects/dataclasses/public/SIREN/dataclasses/ParticleID.h index c01b8a841..e2d6d9321 100644 --- a/projects/dataclasses/public/SIREN/dataclasses/ParticleID.h +++ b/projects/dataclasses/public/SIREN/dataclasses/ParticleID.h @@ -16,6 +16,8 @@ namespace siren { namespace dataclasses { class ParticleID; } } std::ostream& operator<<(std::ostream& os, siren::dataclasses::ParticleID const& record); +std::string to_str(siren::dataclasses::ParticleID const & record); +std::string to_repr(siren::dataclasses::ParticleID const & record); namespace siren { namespace dataclasses { @@ -45,7 +47,10 @@ class ParticleID { bool operator<(ParticleID const & other) const; bool operator==(ParticleID const & other) const; + bool operator!=(ParticleID const & other) const; friend std::ostream& ::operator<<(std::ostream& os, ParticleID const& record); + friend std::string (::to_str)(ParticleID const & record); + friend std::string (::to_repr)(ParticleID const & record); template void serialize(Archive & archive, std::uint32_t const version) { if(version == 0) { From ef147b693138df42b163b535960f9d94b549d7bd Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 13 Sep 2024 21:03:50 -0600 Subject: [PATCH 55/85] Use lambda in favor of separate function pointer --- .../detector/private/pybindings/DetectorSector.h | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/projects/detector/private/pybindings/DetectorSector.h b/projects/detector/private/pybindings/DetectorSector.h index f54349c9a..9285ee662 100644 --- a/projects/detector/private/pybindings/DetectorSector.h +++ b/projects/detector/private/pybindings/DetectorSector.h @@ -6,19 +6,17 @@ #include "../../public/SIREN/detector/DetectorModel.h" -std::string to_str(siren::detector::DetectorSector const & sector) { - std::stringstream ss; - sector.Print(ss); - return ss.str(); -} - void register_DetectorSector(pybind11::module_ & m) { using namespace pybind11; using namespace siren::detector; class_>(m, "DetectorSector") .def(init<>()) - .def("__str__", &to_str) + .def("__str__", [](const DetectorSector & sector) { + std::stringstream ss; + sector.Print(ss); + return ss.str(); + }) .def_readwrite("name",&DetectorSector::name) .def_readwrite("material_id",&DetectorSector::material_id) .def_readwrite("level", &DetectorSector::level) From 5d8339a95363b65e2a63d9f17e38ca3b10d5f778 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 13 Sep 2024 22:10:28 -0600 Subject: [PATCH 56/85] __str__ and __repr__ for InteractionRecord --- .../dataclasses/private/InteractionRecord.cxx | 134 ++++++++++++------ projects/dataclasses/private/ParticleID.cxx | 2 +- .../private/pybindings/dataclasses.cxx | 33 +++-- .../SIREN/dataclasses/InteractionRecord.h | 14 ++ .../utilities/private/StringManipulation.cxx | 20 ++- .../SIREN/utilities/StringManipulation.h | 1 + 6 files changed, 136 insertions(+), 68 deletions(-) diff --git a/projects/dataclasses/private/InteractionRecord.cxx b/projects/dataclasses/private/InteractionRecord.cxx index 48c52530b..1a166db10 100644 --- a/projects/dataclasses/private/InteractionRecord.cxx +++ b/projects/dataclasses/private/InteractionRecord.cxx @@ -4,6 +4,8 @@ #include // for tie, operator==, tuple #include #include // for operator<<, basic_ostream, char_traits, endl, ost... + // +#include "SIREN/utilities/StringManipulation.h" // for tab std::ostream& operator<<(std::ostream& os, siren::dataclasses::InteractionRecord const& record); std::ostream& operator<<(std::ostream& os, siren::dataclasses::PrimaryDistributionRecord const& record); @@ -1099,61 +1101,101 @@ std::ostream& operator<<(std::ostream& os, siren::dataclasses::SecondaryDistribu return os; } -std::ostream& operator<<(std::ostream& os, siren::dataclasses::InteractionRecord const& record) { - std::stringstream ss; - ss << "InteractionRecord (" << &record << ") "; - os << ss.str() << '\n'; - os << "Signature(" << &record.signature << "): " << record.signature.primary_type << " + " << record.signature.target_type << " ->"; - for(auto secondary: record.signature.secondary_types) { - os << " " << secondary; - } - os << "\n"; +std::ostream& operator<<(std::ostream& os, siren::dataclasses::InteractionRecord const & record) { + os << to_repr(record); + return os; +} - ss.str(std::string()); - std::string id_str; - ss << record.primary_id; - id_str = ss.str(); - std::string from = "\n"; - std::string to = "\n "; - size_t start_pos = 0; - while((start_pos = id_str.find(from, start_pos)) != std::string::npos) { - id_str.replace(start_pos, from.length(), to); - start_pos += to.length(); // Handles case where 'to' is a substring of 'from' - } - ss << "PrimaryID: " << id_str << "\n"; - os << "PrimaryInitialPosition: " << record.primary_initial_position.at(0) << " " << record.primary_initial_position.at(1) << " " << record.primary_initial_position.at(2) << "\n"; - os << "InteractionVertex: " << record.interaction_vertex.at(0) << " " << record.interaction_vertex.at(1) << " " << record.interaction_vertex.at(2) << "\n"; - os << "PrimaryMass: " << record.primary_mass << "\n"; - os << "PrimaryMomentum: " << record.primary_momentum.at(0) << " " << record.primary_momentum.at(1) << " " << record.primary_momentum.at(2) << " " << record.primary_momentum.at(3) << "\n"; - os << "TargetID: " << record.target_id << "\n"; - os << "TargetMass: " << record.target_mass << "\n"; - os << "SecondaryIDs:\n"; +std::string to_str(siren::dataclasses::InteractionRecord const & record) { + using siren::utilities::tab; + std::stringstream ss; + ss << "[ InteractionRecord (" << &record << "):\n"; + ss << tab << "InteractionSignature: " << record.signature.primary_type << " + " << record.signature.target_type << " ->"; + if(record.signature.secondary_types.size() > 3) { + ss << '\n'; + ss << tab << tab; + } + for(auto secondary: record.signature.secondary_types) + ss << " " << secondary; + ss << '\n'; + + ss << tab << "PrimaryID: " << to_repr(record.primary_id) << '\n'; + ss << tab << "PrimaryInitialPosition: " << record.primary_initial_position.at(0) << " " << record.primary_initial_position.at(1) << " " << record.primary_initial_position.at(2) << '\n'; + ss << tab << "InteractionVertex: " << record.interaction_vertex.at(0) << " " << record.interaction_vertex.at(1) << " " << record.interaction_vertex.at(2) << '\n'; + ss << tab << "PrimaryMass: " << record.primary_mass << '\n'; + ss << tab << "PrimaryMomentum: " << record.primary_momentum.at(0) << " " << record.primary_momentum.at(1) << " " << record.primary_momentum.at(2) << " " << record.primary_momentum.at(3) << '\n'; + ss << tab << "TargetID: " << to_repr(record.target_id) << '\n'; + ss << tab << "TargetMass: " << record.target_mass << '\n'; + ss << tab << "SecondaryIDs:\n"; for(auto const & secondary: record.secondary_ids) { - ss.str(std::string()); - id_str.clear(); - ss << secondary; - id_str = ss.str(); - start_pos = 0; - while((start_pos = id_str.find(from, start_pos)) != std::string::npos) { - id_str.replace(start_pos, from.length(), to); - start_pos += to.length(); // Handles case where 'to' is a substring of 'from' - } - os << "\t" << id_str << "\n"; + ss << tab << tab << to_repr(secondary) << '\n'; } - os << "SecondaryMomenta:\n"; + ss << tab << "SecondaryMomenta:\n"; for(auto const & secondary: record.secondary_momenta) { - os << "\t" << secondary.at(0) << " " << secondary.at(1) << " " << secondary.at(2) << " " << secondary.at(3) << "\n"; + ss << tab << tab << secondary.at(0) << " " << secondary.at(1) << " " << secondary.at(2) << " " << secondary.at(3) << '\n'; } - os << "SecondaryMasses:\n"; + ss << tab << "SecondaryMasses:\n"; for(auto const & secondary: record.secondary_masses) { - os << "\t" << secondary << "\n"; + ss << tab << tab << secondary << '\n'; } - os << "InteractionParameters:\n"; + ss << tab << "InteractionParameters:\n"; for(std::pair const & param : record.interaction_parameters) { - os << "\t\"" << param.first << "\": " << param.second << "\n"; + ss << tab << tab << '\"' << param.first << "\": " << param.second << '\n'; } - os << std::endl; + ss << ']'; - return os; + return ss.str(); +} + +std::string to_repr(siren::dataclasses::InteractionRecord const & record) { + using siren::utilities::tab; + std::stringstream ss; + ss << "InteractionRecord("; + ss << record.signature.primary_type << " + " << record.signature.target_type << " ->"; + for(auto secondary: record.signature.secondary_types) + ss << " " << secondary; + ss << ", "; + ss << "primary_id=" << to_repr(record.primary_id) << ", "; + ss << "primary_initial_position=(" << record.primary_initial_position.at(0) << ", " << record.primary_initial_position.at(1) << ", " << record.primary_initial_position.at(2) << "), "; + ss << "interaction_vertex=(" << record.interaction_vertex.at(0) << ", " << record.interaction_vertex.at(1) << ", " << record.interaction_vertex.at(2) << "), "; + ss << "primary_mass=" << record.primary_mass << ", "; + ss << "primary_momentum=(" << record.primary_momentum.at(0) << ", " << record.primary_momentum.at(1) << ", " << record.primary_momentum.at(2) << ", " << record.primary_momentum.at(3) << "), "; + ss << "target_id=" << to_repr(record.target_id) << ", "; + ss << "target_mass=" << record.target_mass << ", "; + ss << "secondary_ids=["; + if(record.secondary_ids.size() > 0) { + ss << to_repr(record.secondary_ids.at(0)); + for(size_t i=1; i 0) { + ss << "(" << record.secondary_momenta.at(0).at(0) << ", " << record.secondary_momenta.at(0).at(1) << ", " << record.secondary_momenta.at(0).at(2) << ", " << record.secondary_momenta.at(0).at(3) << ")"; + for(size_t i=1; i 0) { + ss << record.secondary_masses.at(0); + for(size_t i=1; i 0) { + auto it = record.interaction_parameters.begin(); + ss << '\"' << it->first << "\": " << it->second; + for(++it; it != record.interaction_parameters.end(); ++it) { + ss << ", \"" << it->first << "\": " << it->second; + } + } + ss << "}"; + ss << ")"; + return ss.str(); } diff --git a/projects/dataclasses/private/ParticleID.cxx b/projects/dataclasses/private/ParticleID.cxx index 666abb78d..e6e3268af 100644 --- a/projects/dataclasses/private/ParticleID.cxx +++ b/projects/dataclasses/private/ParticleID.cxx @@ -8,7 +8,7 @@ #include "SIREN/utilities/StringManipulation.h" -std::ostream& operator<<(std::ostream& os, siren::dataclasses::ParticleID const& id) { +std::ostream& operator<<(std::ostream& os, siren::dataclasses::ParticleID const & id) { os << to_repr(id); return os; } diff --git a/projects/dataclasses/private/pybindings/dataclasses.cxx b/projects/dataclasses/private/pybindings/dataclasses.cxx index b80199ba8..04709bfab 100644 --- a/projects/dataclasses/private/pybindings/dataclasses.cxx +++ b/projects/dataclasses/private/pybindings/dataclasses.cxx @@ -19,9 +19,7 @@ PYBIND11_MODULE(dataclasses, m) { using namespace siren::dataclasses; // Create a Python class binding for siren::dataclasses::ParticleID - py::class_ particle_id(m, "ParticleID"); - - particle_id + py::class_(m, "ParticleID") .def(py::init<>()) .def(py::init(), py::arg("major"), py::arg("minor")) .def(py::init(), py::arg("other")) @@ -39,9 +37,8 @@ PYBIND11_MODULE(dataclasses, m) { .def_static("generate_id", &siren::dataclasses::ParticleID::GenerateID) ; - py::class_> particle(m, "Particle"); - - particle.def(py::init<>()) + py::class_>(m, "Particle") + .def(py::init<>()) .def(py::init()) .def(py::init, std::array, double, double>()) .def(py::init, std::array, double, double>()) @@ -53,7 +50,8 @@ PYBIND11_MODULE(dataclasses, m) { .def_readwrite("position",&Particle::position) .def_readwrite("length",&Particle::length) .def_readwrite("helicity",&Particle::helicity) - .def("generate_id",&Particle::GenerateID); + .def("generate_id",&Particle::GenerateID) + ; py::enum_(m, "ParticleType", py::arithmetic()) #define X(a, b) .value( #a , ParticleType:: a ) @@ -67,7 +65,8 @@ PYBIND11_MODULE(dataclasses, m) { .def("__repr__", [](InteractionSignature const & s) { return to_repr(s); }) .def_readwrite("primary_type",&InteractionSignature::primary_type) .def_readwrite("target_type",&InteractionSignature::target_type) - .def_readwrite("secondary_types",&InteractionSignature::secondary_types); + .def_readwrite("secondary_types",&InteractionSignature::secondary_types) + ; py::class_>(m, "PrimaryDistributionRecord") .def(py::init()) @@ -106,7 +105,8 @@ PYBIND11_MODULE(dataclasses, m) { .def_property("three_momentum", ((std::array const & (SecondaryParticleRecord::*)())(&SecondaryParticleRecord::GetThreeMomentum)), &SecondaryParticleRecord::SetThreeMomentum) .def_property("four_momentum", ((std::array (SecondaryParticleRecord::*)())(&SecondaryParticleRecord::GetFourMomentum)), &SecondaryParticleRecord::SetFourMomentum) .def_property("helicity", ((double const & (SecondaryParticleRecord::*)())(&SecondaryParticleRecord::GetHelicity)), &SecondaryParticleRecord::SetHelicity) - .def("finalize", &SecondaryParticleRecord::Finalize); + .def("finalize", &SecondaryParticleRecord::Finalize) + ; py::class_>(m, "CrossSectionDistributionRecord") .def(py::init()) @@ -144,12 +144,14 @@ PYBIND11_MODULE(dataclasses, m) { .def("get_econdary_particle_records", [](siren::dataclasses::CrossSectionDistributionRecord & cdr) -> std::vector & {return cdr.GetSecondaryParticleRecords();}, py::return_value_policy::reference_internal) - .def("finalize", &CrossSectionDistributionRecord::Finalize); + .def("finalize", &CrossSectionDistributionRecord::Finalize) + ; py::class_>(m, "InteractionRecord") .def(py::init<>()) - .def("__str__", [](InteractionRecord const & r) { std::stringstream ss; ss << r; return ss.str(); }) + .def("__str__", [](InteractionRecord const & r) { return to_str(r); }) + .def("__repr__", [](InteractionRecord const & r) { return to_str(r); }) .def_readwrite("signature",&InteractionRecord::signature) .def_readwrite("primary_mass",&InteractionRecord::primary_mass) .def_readwrite("primary_momentum",&InteractionRecord::primary_momentum) @@ -160,20 +162,23 @@ PYBIND11_MODULE(dataclasses, m) { .def_readwrite("secondary_masses",&InteractionRecord::secondary_masses) .def_readwrite("secondary_momenta",&InteractionRecord::secondary_momenta) .def_readwrite("secondary_helicities",&InteractionRecord::secondary_helicities) - .def_readwrite("interaction_parameters",&InteractionRecord::interaction_parameters); + .def_readwrite("interaction_parameters",&InteractionRecord::interaction_parameters) + ; py::class_>(m, "InteractionTreeDatum") .def(py::init()) .def_readwrite("record",&InteractionTreeDatum::record) .def_readwrite("parent",&InteractionTreeDatum::parent) .def_readwrite("daughters",&InteractionTreeDatum::daughters) - .def("depth",&InteractionTreeDatum::depth); + .def("depth",&InteractionTreeDatum::depth) + ; py::class_>(m, "InteractionTree") .def(py::init<>()) .def_readwrite("tree",&InteractionTree::tree) .def("add_entry",static_cast (InteractionTree::*)(InteractionTreeDatum&,std::shared_ptr)>(&InteractionTree::add_entry)) - .def("add_entry",static_cast (InteractionTree::*)(InteractionRecord&,std::shared_ptr)>(&InteractionTree::add_entry)); + .def("add_entry",static_cast (InteractionTree::*)(InteractionRecord&,std::shared_ptr)>(&InteractionTree::add_entry)) + ; m.def("SaveInteractionTrees",&SaveInteractionTrees); m.def("LoadInteractionTrees",&LoadInteractionTrees, py::return_value_policy::reference); diff --git a/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h b/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h index 061ee0118..8cf47a89b 100644 --- a/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h +++ b/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h @@ -37,6 +37,18 @@ std::ostream& operator<<(std::ostream& os, siren::dataclasses::SecondaryParticle std::ostream& operator<<(std::ostream& os, siren::dataclasses::CrossSectionDistributionRecord const & record); std::ostream& operator<<(std::ostream& os, siren::dataclasses::SecondaryDistributionRecord const & record); +std::string to_str(siren::dataclasses::InteractionRecord const & record); +std::string to_str(siren::dataclasses::PrimaryDistributionRecord const & record); +std::string to_str(siren::dataclasses::SecondaryParticleRecord const & record); +std::string to_str(siren::dataclasses::CrossSectionDistributionRecord const & record); +std::string to_str(siren::dataclasses::SecondaryDistributionRecord const & record); + +std::string to_repr(siren::dataclasses::InteractionRecord const & record); +std::string to_repr(siren::dataclasses::PrimaryDistributionRecord const & record); +std::string to_repr(siren::dataclasses::SecondaryParticleRecord const & record); +std::string to_repr(siren::dataclasses::CrossSectionDistributionRecord const & record); +std::string to_repr(siren::dataclasses::SecondaryDistributionRecord const & record); + namespace siren { namespace dataclasses { @@ -265,6 +277,8 @@ class InteractionRecord { bool operator==(InteractionRecord const & other) const; bool operator<(InteractionRecord const & other) const; friend std::ostream& ::operator<<(std::ostream& os, InteractionRecord const& record); + friend std::string (::to_str)(InteractionRecord const & record); + friend std::string (::to_repr)(InteractionRecord const & record); template void save(Archive & archive, std::uint32_t const version) const { diff --git a/projects/utilities/private/StringManipulation.cxx b/projects/utilities/private/StringManipulation.cxx index 96f009f65..8d4e4dd38 100644 --- a/projects/utilities/private/StringManipulation.cxx +++ b/projects/utilities/private/StringManipulation.cxx @@ -15,27 +15,33 @@ std::string add_prefix(std::string const & input, std::string const & prefix) { size_t line_number = 0; // Read each line and track the last non-empty line - while (std::getline(iss, line)) { + while(std::getline(iss, line)) { lines.push_back(line); - if (!line.empty()) { + if (not line.empty()) { last_non_empty_line = line_number; } - line_number++; + ++line_number; } std::ostringstream oss; // Add prefix to each line up to the last non-empty line - if (last_non_empty_line >= 0) { - for (size_t i = 0; i <= static_cast(last_non_empty_line); ++i) { + if(last_non_empty_line >= 0) { + for (size_t i = 0; i < static_cast(last_non_empty_line); ++i) { oss << prefix << lines[i] << '\n'; } - // Ensure the string ends with an empty newline - oss << '\n'; + oss << prefix << lines[last_non_empty_line]; } return oss.str(); } +std::string indent(std::string const & input, size_t n_indent) { + std::stringstream ss; + for(size_t i = 0; i < n_indent; ++i) + ss << tab; + return add_prefix(input, ss.str()); +} + } // namespace utilities } // namespace siren diff --git a/projects/utilities/public/SIREN/utilities/StringManipulation.h b/projects/utilities/public/SIREN/utilities/StringManipulation.h index ef3d90a76..e5902f737 100644 --- a/projects/utilities/public/SIREN/utilities/StringManipulation.h +++ b/projects/utilities/public/SIREN/utilities/StringManipulation.h @@ -10,6 +10,7 @@ namespace utilities { constexpr char const * tab = " "; std::string add_prefix(std::string const & input, std::string const & prefix); +std::string indent(std::string const & input, size_t n_indent = 1); } // namespace utilities } // namespace siren From e7aeb09180a708840cb38e84be13f41fad1954b2 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 13 Sep 2024 22:15:48 -0600 Subject: [PATCH 57/85] to_str --> to_repr --- projects/dataclasses/private/pybindings/dataclasses.cxx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/projects/dataclasses/private/pybindings/dataclasses.cxx b/projects/dataclasses/private/pybindings/dataclasses.cxx index 04709bfab..fe082afff 100644 --- a/projects/dataclasses/private/pybindings/dataclasses.cxx +++ b/projects/dataclasses/private/pybindings/dataclasses.cxx @@ -151,7 +151,7 @@ PYBIND11_MODULE(dataclasses, m) { py::class_>(m, "InteractionRecord") .def(py::init<>()) .def("__str__", [](InteractionRecord const & r) { return to_str(r); }) - .def("__repr__", [](InteractionRecord const & r) { return to_str(r); }) + .def("__repr__", [](InteractionRecord const & r) { return to_repr(r); }) .def_readwrite("signature",&InteractionRecord::signature) .def_readwrite("primary_mass",&InteractionRecord::primary_mass) .def_readwrite("primary_momentum",&InteractionRecord::primary_momentum) From 83e609b8e1877fcd9fc43583eaa224f9d462805c Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 13 Sep 2024 23:20:21 -0600 Subject: [PATCH 58/85] __str__ and __repr__ for CrossSectionDistributionRecord and SecondaryParticleRecord --- .../dataclasses/private/InteractionRecord.cxx | 300 +++++++++++------- .../private/pybindings/dataclasses.cxx | 4 + .../SIREN/dataclasses/InteractionRecord.h | 6 + 3 files changed, 200 insertions(+), 110 deletions(-) diff --git a/projects/dataclasses/private/InteractionRecord.cxx b/projects/dataclasses/private/InteractionRecord.cxx index 1a166db10..c8ccef439 100644 --- a/projects/dataclasses/private/InteractionRecord.cxx +++ b/projects/dataclasses/private/InteractionRecord.cxx @@ -929,137 +929,217 @@ std::ostream & operator<<(std::ostream & os, siren::dataclasses::PrimaryDistribu return os; } -std::ostream & operator<<(std::ostream & os, siren::dataclasses::CrossSectionDistributionRecord const & record) { - std::stringstream ss; - ss << "CrossSectionDistributionRecord (" << &record << ") "; - os << ss.str() << '\n'; - - ss.str(std::string()); - std::string id_str; - ss << record.GetPrimaryID(); - id_str = ss.str(); - std::string from = "\n"; - std::string to = "\n "; - size_t start_pos = 0; - while((start_pos = id_str.find(from, start_pos)) != std::string::npos) { - id_str.replace(start_pos, from.length(), to); - start_pos += to.length(); // Handles case where 'to' is a substring of 'from' - } - os << "PrimaryID: " << id_str << "\n"; - - os << "PrimaryType: " << record.primary_type << "\n"; - - os << "PrimaryInitialPosition: " << record.primary_initial_position.at(0) << " " << record.primary_initial_position.at(1) << " " << record.primary_initial_position.at(2) << "\n"; - - os << "PrimaryMass: " << record.primary_mass << "\n"; - - os << "PrimaryMomentum: " << record.primary_momentum.at(0) << " " << record.primary_momentum.at(1) << " " << record.primary_momentum.at(2) << " " << record.primary_momentum.at(3) << "\n"; - - os << "PrimaryHelicity: " << record.primary_helicity << "\n"; - - os << "InteractionVertex: " << record.interaction_vertex.at(0) << " " << record.interaction_vertex.at(1) << " " << record.interaction_vertex.at(2) << "\n"; - - ss.str(std::string()); - ss << record.GetTargetID(); - id_str = ss.str(); - start_pos = 0; - while((start_pos = id_str.find(from, start_pos)) != std::string::npos) { - id_str.replace(start_pos, from.length(), to); - start_pos += to.length(); // Handles case where 'to' is a substring of 'from' - } - os << "TargetID: " << id_str << "\n"; - - os << "TargetType: " << record.target_type << "\n"; +std::ostream& operator<<(std::ostream& os, siren::dataclasses::CrossSectionDistributionRecord const& record) { + os << to_repr(record); + return os; +} - os << "TargetMass: " << record.target_mass << "\n"; +std::string to_str(siren::dataclasses::CrossSectionDistributionRecord const & record) { + using siren::utilities::tab; + using siren::utilities::indent; + std::stringstream ss; - os << "TargetHelicity: " << record.target_helicity << "\n"; + ss << "[ CrossSectionDistributionRecord (" << &record << "):\n"; + ss << tab << "PrimaryID: " << to_repr(record.GetPrimaryID()) << '\n'; + ss << tab << "PrimaryType: " << record.primary_type << '\n'; + ss << tab << "PrimaryInitialPosition: " + << record.primary_initial_position.at(0) << " " + << record.primary_initial_position.at(1) << " " + << record.primary_initial_position.at(2) << '\n'; + ss << tab << "PrimaryMass: " << record.primary_mass << '\n'; + ss << tab << "PrimaryMomentum: " + << record.primary_momentum.at(0) << " " + << record.primary_momentum.at(1) << " " + << record.primary_momentum.at(2) << " " + << record.primary_momentum.at(3) << '\n'; + ss << tab << "PrimaryHelicity: " << record.primary_helicity << '\n'; + ss << tab << "InteractionVertex: " + << record.interaction_vertex.at(0) << " " + << record.interaction_vertex.at(1) << " " + << record.interaction_vertex.at(2) << '\n'; + ss << tab << "TargetID: " << to_repr(record.GetTargetID()) << '\n'; + ss << tab << "TargetType: " << record.target_type << '\n'; + ss << tab << "TargetMass: " << record.target_mass << '\n'; + ss << tab << "TargetHelicity: " << record.target_helicity << '\n'; - if(record.interaction_parameters.size() > 0) { - os << "InteractionParameters:\n"; - for(auto const & parameter: record.interaction_parameters) { - os << "\t" << parameter.first << ": " << parameter.second << "\n"; + ss << tab << "InteractionParameters:\n"; + if (!record.interaction_parameters.empty()) { + for (const auto& parameter : record.interaction_parameters) { + ss << tab << tab << parameter.first << ": " << parameter.second << '\n'; } - } else { - os << "InteractionParameters: " << "None" << "\n"; } - os << "SecondaryParticles:\n"; - std::string secondary_str; - for(size_t i = 0; i < record.signature.secondary_types.size(); ++i) { - ss.str(std::string()); - ss << record.GetSecondaryParticleRecord(i); - secondary_str = ss.str(); - start_pos = 0; - while((start_pos = secondary_str.find(from, start_pos)) != std::string::npos) { - secondary_str.replace(start_pos, from.length(), to); - start_pos += to.length(); // Handles case where 'to' is a substring of 'from' - } - os << secondary_str << "\n"; + ss << tab << "SecondaryParticles:\n"; + for(size_t i=0; ifirst << "\": " << it->second; + for (++it; it != record.interaction_parameters.end(); ++it) { + ss << ", \"" << it->first << "\": " << it->second; + } } + ss << "}, "; + + // Secondary Particles + ss << "secondary_particles=["; + for (size_t i = 0; i < record.signature.secondary_types.size(); ++i) { + siren::dataclasses::SecondaryParticleRecord const & secondary = record.secondary_particles[i]; + if (i > 0) ss << ", "; + ss << "{"; + ss << "index=" << secondary.secondary_index << ", "; + ss << "id=" << to_repr(secondary.id) << ", "; + ss << "type=" << secondary.type << ", "; + ss << "initial_position=(" + << secondary.initial_position.at(0) << ", " + << secondary.initial_position.at(1) << ", " + << secondary.initial_position.at(2) << ")"; + if (secondary.mass_set) + ss << ", mass=" << secondary.mass; + if (secondary.energy_set) + ss << ", energy=" << secondary.energy; + if (secondary.kinetic_energy_set) + ss << ", kinetic_energy=" << secondary.kinetic_energy; + if (secondary.direction_set) + ss << ", direction=(" + << secondary.direction.at(0) << ", " + << secondary.direction.at(1) << ", " + << secondary.direction.at(2) << ")"; + if (secondary.momentum_set) + ss << ", momentum=(" + << secondary.momentum.at(0) << ", " + << secondary.momentum.at(1) << ", " + << secondary.momentum.at(2) << ")"; + if (secondary.helicity_set) + ss << ", helicity=" << secondary.helicity; + ss << "}"; + } + ss << "]"; - if(record.energy_set) { - os << "Energy: " << record.energy << "\n"; - } else { - os << "Energy: " << "None" << "\n"; - } + ss << ")"; - if(record.kinetic_energy_set) { - os << "KineticEnergy: " << record.kinetic_energy << "\n"; - } else { - os << "KineticEnergy: " << "None" << "\n"; - } + return ss.str(); +} - if(record.direction_set) { - os << "Direction: " << record.direction.at(0) << " " << record.direction.at(1) << " " << record.direction.at(2) << "\n"; - } else { - os << "Direction: " << "None" << "\n"; - } - if(record.momentum_set) { - os << "Momentum: " << record.momentum.at(0) << " " << record.momentum.at(1) << " " << record.momentum.at(2) << "\n"; - } else { - os << "Momentum: " << "None" << "\n"; - } +std::ostream & operator<<(std::ostream & os, siren::dataclasses::SecondaryParticleRecord const & record) { + os << to_repr(record); + return os; +} - os << "InitialPosition: " << record.initial_position.at(0) << " " << record.initial_position.at(1) << " " << record.initial_position.at(2) << "\n"; +std::string to_str(siren::dataclasses::SecondaryParticleRecord const& record) { + using siren::utilities::tab; + using siren::utilities::indent; + std::stringstream ss; + ss << "[ SecondaryParticleRecord (" << &record << "):\n"; + ss << tab << "Index: " << record.secondary_index << '\n'; + ss << tab << "ID: " << to_repr(record.id) << '\n'; + ss << tab << "Type: " << record.type << '\n'; + ss << tab << "InitialPosition: " + << record.initial_position.at(0) << " " + << record.initial_position.at(1) << " " + << record.initial_position.at(2) << '\n'; + ss << tab << "Mass: "; + if (record.mass_set) + ss << record.mass << '\n'; + else + ss << "unset\n"; + ss << tab << "Energy: "; + if (record.energy_set) + ss << record.energy << '\n'; + else + ss << "unset\n"; + ss << tab << "KineticEnergy: "; + if (record.kinetic_energy_set) + ss << record.kinetic_energy << '\n'; + else + ss << "unset\n"; + ss << tab << "Direction: "; + if (record.direction_set) + ss << record.direction.at(0) << " " << record.direction.at(1) << " " << record.direction.at(2) << '\n'; + else + ss << "unset\n"; + ss << tab << "Momentum: "; + if (record.momentum_set) + ss << record.momentum.at(0) << " " << record.momentum.at(1) << " " << record.momentum.at(2) << '\n'; + else + ss << "unset\n"; + ss << tab << "Helicity: "; + if (record.helicity_set) + ss << record.helicity << '\n'; + else + ss << "unset\n"; + ss << ']'; - if(record.helicity_set) { - os << "Helicity: " << record.helicity << "\n"; - } else { - os << "Helicity: " << "None" << "\n"; - } + return ss.str(); +} - return os; +std::string to_repr(siren::dataclasses::SecondaryParticleRecord const& record) { + std::stringstream ss; + ss << "SecondaryParticleRecord("; + ss << "index=" << record.secondary_index << ", "; + ss << "id=" << to_repr(record.id) << ", "; + ss << "type=" << record.type << ", "; + ss << "initial_position=(" + << record.initial_position.at(0) << ", " + << record.initial_position.at(1) << ", " + << record.initial_position.at(2) << ")"; + if (record.mass_set) + ss << ", mass=" << record.mass; + if (record.energy_set) + ss << ", energy=" << record.energy; + if (record.kinetic_energy_set) + ss << ", kinetic_energy=" << record.kinetic_energy; + if (record.direction_set) + ss << ", direction=(" + << record.direction.at(0) << ", " + << record.direction.at(1) << ", " + << record.direction.at(2) << ")"; + if (record.momentum_set) + ss << ", momentum=(" + << record.momentum.at(0) << ", " + << record.momentum.at(1) << ", " + << record.momentum.at(2) << ")"; + if (record.helicity_set) + ss << ", helicity=" << record.helicity; + ss << ")"; + return ss.str(); } std::ostream& operator<<(std::ostream& os, siren::dataclasses::SecondaryDistributionRecord const& record) { diff --git a/projects/dataclasses/private/pybindings/dataclasses.cxx b/projects/dataclasses/private/pybindings/dataclasses.cxx index fe082afff..7cf9e2990 100644 --- a/projects/dataclasses/private/pybindings/dataclasses.cxx +++ b/projects/dataclasses/private/pybindings/dataclasses.cxx @@ -90,6 +90,8 @@ PYBIND11_MODULE(dataclasses, m) { py::class_>(m, "SecondaryParticleRecord") .def(py::init()) + .def("__str__", [](SecondaryParticleRecord const & spr) { return to_str(spr); }) + .def("__repr__", [](SecondaryParticleRecord const & spr) { return to_repr(spr); }) .def_property_readonly("id", [](siren::dataclasses::SecondaryParticleRecord const & spr) {siren::dataclasses::ParticleID id = spr.id; return id;}) .def_property_readonly("type", @@ -110,6 +112,8 @@ PYBIND11_MODULE(dataclasses, m) { py::class_>(m, "CrossSectionDistributionRecord") .def(py::init()) + .def("__str__", [](CrossSectionDistributionRecord const & cdr) { return to_str(cdr); }) + .def("__repr__", [](CrossSectionDistributionRecord const & cdr) { return to_repr(cdr); }) .def_property_readonly("record", [](siren::dataclasses::CrossSectionDistributionRecord const & cdr) {siren::dataclasses::InteractionRecord ir = cdr.record; return ir;}) .def_property_readonly("signature", diff --git a/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h b/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h index 8cf47a89b..9388d25e1 100644 --- a/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h +++ b/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h @@ -153,6 +153,10 @@ class SecondaryParticleRecord { mutable double helicity = 0; public: friend std::ostream& ::operator<<(std::ostream& os, SecondaryParticleRecord const& record); + friend std::string (::to_str)(SecondaryParticleRecord const & record); + friend std::string (::to_repr)(SecondaryParticleRecord const & record); + friend std::string (::to_str)(CrossSectionDistributionRecord const & record); + friend std::string (::to_repr)(CrossSectionDistributionRecord const & record); SecondaryParticleRecord(SecondaryParticleRecord const & other) = delete; SecondaryParticleRecord(SecondaryParticleRecord && other) = default; @@ -217,6 +221,8 @@ class CrossSectionDistributionRecord { std::vector secondary_particles; public: friend std::ostream& ::operator<<(std::ostream& os, CrossSectionDistributionRecord const& record); + friend std::string (::to_str)(CrossSectionDistributionRecord const & record); + friend std::string (::to_repr)(CrossSectionDistributionRecord const & record); CrossSectionDistributionRecord(CrossSectionDistributionRecord const & other) = delete; CrossSectionDistributionRecord(CrossSectionDistributionRecord && other) = default; From 19778c4c8deeef8a81d492298867449f34019e26 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sat, 14 Sep 2024 11:18:40 -0600 Subject: [PATCH 59/85] Specify namespace for args of forward declared output functions. --- .../public/SIREN/dataclasses/InteractionRecord.h | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h b/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h index 9388d25e1..c5c7da68f 100644 --- a/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h +++ b/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h @@ -153,10 +153,10 @@ class SecondaryParticleRecord { mutable double helicity = 0; public: friend std::ostream& ::operator<<(std::ostream& os, SecondaryParticleRecord const& record); - friend std::string (::to_str)(SecondaryParticleRecord const & record); - friend std::string (::to_repr)(SecondaryParticleRecord const & record); - friend std::string (::to_str)(CrossSectionDistributionRecord const & record); - friend std::string (::to_repr)(CrossSectionDistributionRecord const & record); + friend std::string (::to_str)(siren::dataclasses::SecondaryParticleRecord const & record); + friend std::string (::to_repr)(siren::dataclasses::SecondaryParticleRecord const & record); + friend std::string (::to_str)(siren::dataclasses::CrossSectionDistributionRecord const & record); + friend std::string (::to_repr)(siren::dataclasses::CrossSectionDistributionRecord const & record); SecondaryParticleRecord(SecondaryParticleRecord const & other) = delete; SecondaryParticleRecord(SecondaryParticleRecord && other) = default; From 764f63d04542c0a4f64afa45ffe24c76ac577a91 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sat, 14 Sep 2024 11:38:45 -0600 Subject: [PATCH 60/85] Add output functions as friends. More specific namespaces. --- .../SIREN/dataclasses/InteractionRecord.h | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h b/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h index c5c7da68f..5ee85fd48 100644 --- a/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h +++ b/projects/dataclasses/public/SIREN/dataclasses/InteractionRecord.h @@ -80,7 +80,9 @@ class PrimaryDistributionRecord { mutable std::array interaction_vertex; mutable double helicity = 0; public: - friend std::ostream& ::operator<<(std::ostream& os, PrimaryDistributionRecord const& record); + friend std::ostream& ::operator<<(std::ostream& os, siren::dataclasses::PrimaryDistributionRecord const& record); + friend std::string (::to_str)(siren::dataclasses::PrimaryDistributionRecord const & record); + friend std::string (::to_repr)(siren::dataclasses::PrimaryDistributionRecord const & record); PrimaryDistributionRecord(PrimaryDistributionRecord const & other) = delete; PrimaryDistributionRecord(PrimaryDistributionRecord && other) = default; @@ -152,7 +154,7 @@ class SecondaryParticleRecord { mutable std::array momentum = {0, 0, 0}; mutable double helicity = 0; public: - friend std::ostream& ::operator<<(std::ostream& os, SecondaryParticleRecord const& record); + friend std::ostream& ::operator<<(std::ostream& os, siren::dataclasses::SecondaryParticleRecord const& record); friend std::string (::to_str)(siren::dataclasses::SecondaryParticleRecord const & record); friend std::string (::to_repr)(siren::dataclasses::SecondaryParticleRecord const & record); friend std::string (::to_str)(siren::dataclasses::CrossSectionDistributionRecord const & record); @@ -220,9 +222,9 @@ class CrossSectionDistributionRecord { private: std::vector secondary_particles; public: - friend std::ostream& ::operator<<(std::ostream& os, CrossSectionDistributionRecord const& record); - friend std::string (::to_str)(CrossSectionDistributionRecord const & record); - friend std::string (::to_repr)(CrossSectionDistributionRecord const & record); + friend std::ostream& ::operator<<(std::ostream& os, siren::dataclasses::CrossSectionDistributionRecord const& record); + friend std::string (::to_str)(siren::dataclasses::CrossSectionDistributionRecord const & record); + friend std::string (::to_repr)(siren::dataclasses::CrossSectionDistributionRecord const & record); CrossSectionDistributionRecord(CrossSectionDistributionRecord const & other) = delete; CrossSectionDistributionRecord(CrossSectionDistributionRecord && other) = default; @@ -282,9 +284,9 @@ class InteractionRecord { bool operator==(InteractionRecord const & other) const; bool operator<(InteractionRecord const & other) const; - friend std::ostream& ::operator<<(std::ostream& os, InteractionRecord const& record); - friend std::string (::to_str)(InteractionRecord const & record); - friend std::string (::to_repr)(InteractionRecord const & record); + friend std::ostream& ::operator<<(std::ostream& os, siren::dataclasses::InteractionRecord const& record); + friend std::string (::to_str)(siren::dataclasses::InteractionRecord const & record); + friend std::string (::to_repr)(siren::dataclasses::InteractionRecord const & record); template void save(Archive & archive, std::uint32_t const version) const { From 4c298f1abdef660cbc977db4fe89ce312d46d9e6 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sat, 14 Sep 2024 11:42:39 -0600 Subject: [PATCH 61/85] __str__ and __repr__ for PrimaryDistributionRecord --- .../dataclasses/private/InteractionRecord.cxx | 148 ++++++++++-------- .../private/pybindings/dataclasses.cxx | 2 + 2 files changed, 89 insertions(+), 61 deletions(-) diff --git a/projects/dataclasses/private/InteractionRecord.cxx b/projects/dataclasses/private/InteractionRecord.cxx index c8ccef439..bb0b6aab6 100644 --- a/projects/dataclasses/private/InteractionRecord.cxx +++ b/projects/dataclasses/private/InteractionRecord.cxx @@ -853,80 +853,106 @@ bool InteractionRecord::operator<(InteractionRecord const & other) const { } // namespace siren std::ostream & operator<<(std::ostream & os, siren::dataclasses::PrimaryDistributionRecord const & record) { + os << to_repr(record); + return os; +} + +std::string to_str(siren::dataclasses::PrimaryDistributionRecord const & record) { + using siren::utilities::tab; std::stringstream ss; - ss << "PrimaryDistributionRecord (" << &record << ") "; - os << ss.str() << '\n'; + ss << "[ PrimaryDistributionRecord (" << &record << ")\n"; - ss.str(std::string()); - std::string id_str; - ss << record.GetID(); - id_str = ss.str(); - std::string from = "\n"; - std::string to = "\n "; - size_t start_pos = 0; - while((start_pos = id_str.find(from, start_pos)) != std::string::npos) { - id_str.replace(start_pos, from.length(), to); - start_pos += to.length(); // Handles case where 'to' is a substring of 'from' - } - os << "ID: " << id_str << "\n"; + ss << tab << "ID: " << to_repr(record.id) << "\n"; + ss << tab << "Type: " << record.type << "\n"; - os << "Type: " << record.GetType() << "\n"; + ss << tab << "Mass: "; + if(record.mass_set) + ss << record.mass << '\n'; + else + ss << "unset\n"; - if(record.mass_set) { - os << "Mass: " << record.GetMass() << "\n"; - } else { - os << "Mass: " << "None" << "\n"; - } + ss << tab << "Energy: "; + if(record.energy_set) + ss << record.energy << '\n'; + else + ss << "unset\n"; - if(record.energy_set) { - os << "Energy: " << record.GetEnergy() << "\n"; - } else { - os << "Energy: " << "None" << "\n"; - } + ss << tab << "KineticEnergy: "; + if(record.kinetic_energy_set) + ss << record.kinetic_energy << '\n'; + else + ss << "unset\n"; - if(record.kinetic_energy_set) { - os << "KineticEnergy: " << record.GetKineticEnergy() << "\n"; - } else { - os << "KineticEnergy: " << "None" << "\n"; - } + ss << tab << "Direction: "; + if(record.direction_set) + ss << record.direction.at(0) << " " << record.direction.at(1) << " " << record.direction.at(2) << '\n'; + else + ss << "unset\n"; - if(record.direction_set) { - os << "Direction: " << record.GetDirection().at(0) << " " << record.GetDirection().at(1) << " " << record.GetDirection().at(2) << "\n"; - } else { - os << "Direction: " << "None" << "\n"; - } + ss << tab << "Momentum: "; + if(record.momentum_set) + ss << record.momentum.at(0) << " " << record.momentum.at(1) << " " << record.momentum.at(2) << '\n'; + else + ss << "unset\n"; - if(record.momentum_set) { - os << "Momentum: " << record.GetThreeMomentum().at(0) << " " << record.GetThreeMomentum().at(1) << " " << record.GetThreeMomentum().at(2) << "\n"; - } else { - os << "Momentum: " << "None" << "\n"; - } + ss << tab << "Length: "; + if(record.length_set) + ss << record.length << '\n'; + else + ss << "unset\n"; - if(record.length_set) { - os << "Length: " << record.GetLength() << "\n"; - } else { - os << "Length: " << "None" << "\n"; - } + ss << tab << "InitialPosition: "; + if(record.initial_position_set) + ss << record.initial_position.at(0) << " " << record.initial_position.at(1) << " " << record.initial_position.at(2) << '\n'; + else + ss << "unset\n"; - if(record.initial_position_set) { - os << "InitialPosition: " << record.GetInitialPosition().at(0) << " " << record.GetInitialPosition().at(1) << " " << record.GetInitialPosition().at(2) << "\n"; - } else { - os << "InitialPosition: " << "None" << "\n"; - } + ss << tab << "InteractionVertex: "; + if(record.interaction_vertex_set) + ss << record.interaction_vertex.at(0) << " " << record.interaction_vertex.at(1) << " " << record.interaction_vertex.at(2) << '\n'; + else + ss << "unset\n"; - if(record.interaction_vertex_set) { - os << "InteractionVertex: " << record.GetInteractionVertex().at(0) << " " << record.GetInteractionVertex().at(1) << " " << record.GetInteractionVertex().at(2) << "\n"; - } else { - os << "InteractionVertex: " << "None" << "\n"; - } + ss << tab << "Helicity: "; + if(record.helicity_set) + ss << record.helicity << '\n'; + else + ss << "unset\n"; - if(record.helicity_set) { - os << "Helicity: " << record.GetHelicity() << "\n"; - } else { - os << "Helicity: " << "None" << "\n"; - } + ss << "]"; - return os; + return ss.str(); +} + +std::string to_repr(siren::dataclasses::PrimaryDistributionRecord const & record) { + std::stringstream ss; + ss << "PrimaryDistributionRecord("; + + ss << "id=" << to_repr(record.GetID()) << ", "; + ss << "type=" << record.GetType(); + + if(record.mass_set) + ss << ", mass=" << record.mass; + if(record.energy_set) + ss << ", energy=" << record.energy; + if(record.kinetic_energy_set) + ss << ", kinetic_energy=" << record.kinetic_energy; + if(record.direction_set) + ss << ", direction=(" << record.direction.at(0) << ", " << record.direction.at(1) << ", " << record.direction.at(2) << ")"; + if(record.momentum_set) + ss << ", momentum=(" << record.momentum.at(0) << ", " << record.momentum.at(1) << ", " << record.momentum.at(2) << ")"; + if(record.length_set) + ss << ", length=" << record.length; + if(record.initial_position_set) + ss << ", initial_position=(" << record.initial_position.at(0) << ", " << record.initial_position.at(1) << ", " << record.initial_position.at(2) << ")"; + if(record.interaction_vertex_set) + ss << ", interaction_vertex=(" << record.interaction_vertex.at(0) << ", " << record.interaction_vertex.at(1) << ", " << record.interaction_vertex.at(2) << ")"; + if(record.helicity_set) + ss << ", helicity=" << record.helicity; + + ss << ")"; + + return ss.str(); } std::ostream& operator<<(std::ostream& os, siren::dataclasses::CrossSectionDistributionRecord const& record) { diff --git a/projects/dataclasses/private/pybindings/dataclasses.cxx b/projects/dataclasses/private/pybindings/dataclasses.cxx index 7cf9e2990..81c2820b0 100644 --- a/projects/dataclasses/private/pybindings/dataclasses.cxx +++ b/projects/dataclasses/private/pybindings/dataclasses.cxx @@ -70,6 +70,8 @@ PYBIND11_MODULE(dataclasses, m) { py::class_>(m, "PrimaryDistributionRecord") .def(py::init()) + .def("__str__", [](PrimaryDistributionRecord const & pdr) { return to_str(pdr); }) + .def("__repr__", [](PrimaryDistributionRecord const & pdr) { return to_repr(pdr); }) .def_property_readonly("id", [](siren::dataclasses::PrimaryDistributionRecord const & pdr) {siren::dataclasses::ParticleID id = pdr.id; return id;}) .def_property_readonly("type", From aee543146a7f4ea6304a1d33f2336ce9d16642d5 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sat, 14 Sep 2024 11:57:57 -0600 Subject: [PATCH 62/85] __str__ and __repr__ for Particle --- .../dataclasses/private/InteractionRecord.cxx | 2 +- projects/dataclasses/private/Particle.cxx | 51 ++++++++++++------- .../private/pybindings/dataclasses.cxx | 3 +- .../public/SIREN/dataclasses/Particle.h | 4 ++ 4 files changed, 39 insertions(+), 21 deletions(-) diff --git a/projects/dataclasses/private/InteractionRecord.cxx b/projects/dataclasses/private/InteractionRecord.cxx index bb0b6aab6..4724e9be7 100644 --- a/projects/dataclasses/private/InteractionRecord.cxx +++ b/projects/dataclasses/private/InteractionRecord.cxx @@ -860,7 +860,7 @@ std::ostream & operator<<(std::ostream & os, siren::dataclasses::PrimaryDistribu std::string to_str(siren::dataclasses::PrimaryDistributionRecord const & record) { using siren::utilities::tab; std::stringstream ss; - ss << "[ PrimaryDistributionRecord (" << &record << ")\n"; + ss << "[ PrimaryDistributionRecord (" << &record << "):\n"; ss << tab << "ID: " << to_repr(record.id) << "\n"; ss << tab << "Type: " << record.type << "\n"; diff --git a/projects/dataclasses/private/Particle.cxx b/projects/dataclasses/private/Particle.cxx index 60154e49d..e970ccb84 100644 --- a/projects/dataclasses/private/Particle.cxx +++ b/projects/dataclasses/private/Particle.cxx @@ -4,6 +4,7 @@ #include #include #include +#include #include #include #include @@ -11,30 +12,42 @@ #include #include "SIREN/utilities/Constants.h" +#include "SIREN/utilities/StringManipulation.h" std::ostream& operator<<(std::ostream& os, siren::dataclasses::Particle const& p) { - os << "Particle (" << &p << ")\n"; + os << to_repr(p); + return os; +} +std::string to_str(siren::dataclasses::Particle const& p) { + using siren::utilities::tab; std::stringstream ss; - ss << p.id; - std::string id_str = ss.str(); - std::string from = "\n"; - std::string to = "\n "; - size_t start_pos = 0; - while((start_pos = id_str.find(from, start_pos)) != std::string::npos) { - id_str.replace(start_pos, from.length(), to); - start_pos += to.length(); // Handles case where 'to' is a substring of 'from' - } - - os << "ID: " << id_str << "\n"; - os << "Type: " << p.type << "\n"; - os << "Mass: " << p.mass << "\n"; - os << "Momentum: " << p.momentum.at(0) << " " << p.momentum.at(1) << " " << p.momentum.at(2) << " " << p.momentum.at(3) << "\n"; - os << "Position: " << p.position.at(0) << " " << p.position.at(1) << " " << p.position.at(2) << "\n"; - os << "Length: " << p.length << "\n"; - os << "Helicity: " << p.helicity; + ss << "[ Particle (" << &p << "):\n"; + ss << tab << "ID: " << to_repr(p.id) << '\n'; + ss << tab << "Type: " << p.type << '\n'; + ss << tab << "Mass: " << p.mass << '\n'; + ss << tab << "Momentum: " << p.momentum.at(0) << ' ' << p.momentum.at(1) << ' ' << p.momentum.at(2) << ' ' << p.momentum.at(3) << '\n'; + ss << tab << "Position: " << p.position.at(0) << ' ' << p.position.at(1) << ' ' << p.position.at(2) << '\n'; + ss << tab << "Length: " << p.length << '\n'; + ss << tab << "Helicity: " << p.helicity << '\n'; + ss << ']'; + + return ss.str(); +} - return os; +std::string to_repr(siren::dataclasses::Particle const& p) { + std::stringstream ss; + ss << "Particle("; + ss << "id=" << to_repr(p.id) << ", "; + ss << "type=" << p.type << ", "; + ss << "mass=" << p.mass << ", "; + ss << "momentum=(" << p.momentum.at(0) << ", " << p.momentum.at(1) << ", " << p.momentum.at(2) << ", " << p.momentum.at(3) << "), "; + ss << "position=(" << p.position.at(0) << ", " << p.position.at(1) << ", " << p.position.at(2) << "), "; + ss << "length=" << p.length << ", "; + ss << "helicity=" << p.helicity; + ss << ')'; + + return ss.str(); } namespace siren { diff --git a/projects/dataclasses/private/pybindings/dataclasses.cxx b/projects/dataclasses/private/pybindings/dataclasses.cxx index 81c2820b0..ec1196e69 100644 --- a/projects/dataclasses/private/pybindings/dataclasses.cxx +++ b/projects/dataclasses/private/pybindings/dataclasses.cxx @@ -42,7 +42,8 @@ PYBIND11_MODULE(dataclasses, m) { .def(py::init()) .def(py::init, std::array, double, double>()) .def(py::init, std::array, double, double>()) - .def("__str__", [](Particle const & p) { std::stringstream ss; ss << p; return ss.str(); }) + .def("__str__", [](Particle const & p) { return to_str(p); }) + .def("__repr__", [](Particle const & p) { return to_repr(p); }) .def_readwrite("id",&Particle::id) .def_readwrite("type",&Particle::type) .def_readwrite("mass",&Particle::mass) diff --git a/projects/dataclasses/public/SIREN/dataclasses/Particle.h b/projects/dataclasses/public/SIREN/dataclasses/Particle.h index 94a61693f..6373a730d 100644 --- a/projects/dataclasses/public/SIREN/dataclasses/Particle.h +++ b/projects/dataclasses/public/SIREN/dataclasses/Particle.h @@ -27,6 +27,8 @@ namespace siren { namespace dataclasses { class Particle; } } std::ostream & operator<<(std::ostream & os, siren::dataclasses::Particle const & p); +std::string to_str(siren::dataclasses::Particle const & p); +std::string to_repr(siren::dataclasses::Particle const & p); namespace siren { namespace dataclasses { @@ -52,6 +54,8 @@ class Particle { ParticleID & GenerateID(); friend std::ostream & ::operator<<(std::ostream & os, siren::dataclasses::Particle const & p); + friend std::string (::to_str)(siren::dataclasses::Particle const & p); + friend std::string (::to_repr)(siren::dataclasses::Particle const & p); template void serialize(Archive & archive, std::uint32_t const version) { From a597bcb1280e7718610b499707cca708af8586ce Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sun, 15 Sep 2024 14:53:34 -0600 Subject: [PATCH 63/85] Move detector files --- .../detectors/CCM/{densities_CCM-v2.dat => CCM-v2/densities.dat} | 0 .../detectors/CCM/{materials_CCM-v2.dat => CCM-v2/materials.dat} | 0 .../ND280UPGRD/ND280UPGRD-v1/densities.dat} | 0 .../ND280UPGRD/ND280UPGRD-v1/materials.dat} | 0 resources/{Detectors => detectors}/visuals/Det_Visual_Examples.nb | 0 resources/{Detectors => detectors}/visuals/DetectorVisuals.wl | 0 resources/{Detectors => detectors}/visuals/README.md | 0 .../{Fluxes => fluxes}/T2K_NEAR/T2K_NEAR-v1.0/FluxCalculator.py | 0 .../{Fluxes => fluxes}/T2K_NEAR/T2K_NEAR-v1.0/T2KOUT_PLUS_nue.dat | 0 .../T2K_NEAR/T2K_NEAR-v1.0/T2KOUT_PLUS_numu.dat | 0 .../{Fluxes => fluxes}/T2K_NEAR/T2K_NEAR-v1.0/T2K_MINUS_250kA.dat | 0 .../{Fluxes => fluxes}/T2K_NEAR/T2K_NEAR-v1.0/T2K_PLUS_250kA.dat | 0 12 files changed, 0 insertions(+), 0 deletions(-) rename resources/detectors/CCM/{densities_CCM-v2.dat => CCM-v2/densities.dat} (100%) rename resources/detectors/CCM/{materials_CCM-v2.dat => CCM-v2/materials.dat} (100%) rename resources/{Detectors/ND280UPGRD/densities_ND280UPGRD-v1.dat => detectors/ND280UPGRD/ND280UPGRD-v1/densities.dat} (100%) rename resources/{Detectors/ND280UPGRD/materials_ND280UPGRD-v1.dat => detectors/ND280UPGRD/ND280UPGRD-v1/materials.dat} (100%) rename resources/{Detectors => detectors}/visuals/Det_Visual_Examples.nb (100%) rename resources/{Detectors => detectors}/visuals/DetectorVisuals.wl (100%) rename resources/{Detectors => detectors}/visuals/README.md (100%) rename resources/{Fluxes => fluxes}/T2K_NEAR/T2K_NEAR-v1.0/FluxCalculator.py (100%) rename resources/{Fluxes => fluxes}/T2K_NEAR/T2K_NEAR-v1.0/T2KOUT_PLUS_nue.dat (100%) rename resources/{Fluxes => fluxes}/T2K_NEAR/T2K_NEAR-v1.0/T2KOUT_PLUS_numu.dat (100%) rename resources/{Fluxes => fluxes}/T2K_NEAR/T2K_NEAR-v1.0/T2K_MINUS_250kA.dat (100%) rename resources/{Fluxes => fluxes}/T2K_NEAR/T2K_NEAR-v1.0/T2K_PLUS_250kA.dat (100%) diff --git a/resources/detectors/CCM/densities_CCM-v2.dat b/resources/detectors/CCM/CCM-v2/densities.dat similarity index 100% rename from resources/detectors/CCM/densities_CCM-v2.dat rename to resources/detectors/CCM/CCM-v2/densities.dat diff --git a/resources/detectors/CCM/materials_CCM-v2.dat b/resources/detectors/CCM/CCM-v2/materials.dat similarity index 100% rename from resources/detectors/CCM/materials_CCM-v2.dat rename to resources/detectors/CCM/CCM-v2/materials.dat diff --git a/resources/Detectors/ND280UPGRD/densities_ND280UPGRD-v1.dat b/resources/detectors/ND280UPGRD/ND280UPGRD-v1/densities.dat similarity index 100% rename from resources/Detectors/ND280UPGRD/densities_ND280UPGRD-v1.dat rename to resources/detectors/ND280UPGRD/ND280UPGRD-v1/densities.dat diff --git a/resources/Detectors/ND280UPGRD/materials_ND280UPGRD-v1.dat b/resources/detectors/ND280UPGRD/ND280UPGRD-v1/materials.dat similarity index 100% rename from resources/Detectors/ND280UPGRD/materials_ND280UPGRD-v1.dat rename to resources/detectors/ND280UPGRD/ND280UPGRD-v1/materials.dat diff --git a/resources/Detectors/visuals/Det_Visual_Examples.nb b/resources/detectors/visuals/Det_Visual_Examples.nb similarity index 100% rename from resources/Detectors/visuals/Det_Visual_Examples.nb rename to resources/detectors/visuals/Det_Visual_Examples.nb diff --git a/resources/Detectors/visuals/DetectorVisuals.wl b/resources/detectors/visuals/DetectorVisuals.wl similarity index 100% rename from resources/Detectors/visuals/DetectorVisuals.wl rename to resources/detectors/visuals/DetectorVisuals.wl diff --git a/resources/Detectors/visuals/README.md b/resources/detectors/visuals/README.md similarity index 100% rename from resources/Detectors/visuals/README.md rename to resources/detectors/visuals/README.md diff --git a/resources/Fluxes/T2K_NEAR/T2K_NEAR-v1.0/FluxCalculator.py b/resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/FluxCalculator.py similarity index 100% rename from resources/Fluxes/T2K_NEAR/T2K_NEAR-v1.0/FluxCalculator.py rename to resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/FluxCalculator.py diff --git a/resources/Fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2KOUT_PLUS_nue.dat b/resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2KOUT_PLUS_nue.dat similarity index 100% rename from resources/Fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2KOUT_PLUS_nue.dat rename to resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2KOUT_PLUS_nue.dat diff --git a/resources/Fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2KOUT_PLUS_numu.dat b/resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2KOUT_PLUS_numu.dat similarity index 100% rename from resources/Fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2KOUT_PLUS_numu.dat rename to resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2KOUT_PLUS_numu.dat diff --git a/resources/Fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2K_MINUS_250kA.dat b/resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2K_MINUS_250kA.dat similarity index 100% rename from resources/Fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2K_MINUS_250kA.dat rename to resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2K_MINUS_250kA.dat diff --git a/resources/Fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2K_PLUS_250kA.dat b/resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2K_PLUS_250kA.dat similarity index 100% rename from resources/Fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2K_PLUS_250kA.dat rename to resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/T2K_PLUS_250kA.dat From 4cc6e93dee1fd44e4dbe5368590c0238eb6a9c84 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sun, 15 Sep 2024 15:19:22 -0600 Subject: [PATCH 64/85] Fix paths in visualization to match updated directory structure --- .../detectors/visuals/Det_Visual_Examples.nb | 575 +++++++----------- .../detectors/visuals/DetectorVisuals.wl | 6 +- resources/detectors/visuals/README.md | 8 +- 3 files changed, 224 insertions(+), 365 deletions(-) diff --git a/resources/detectors/visuals/Det_Visual_Examples.nb b/resources/detectors/visuals/Det_Visual_Examples.nb index 6710d4051..f4b823f38 100644 --- a/resources/detectors/visuals/Det_Visual_Examples.nb +++ b/resources/detectors/visuals/Det_Visual_Examples.nb @@ -10,10 +10,10 @@ NotebookFileLineBreakTest NotebookFileLineBreakTest NotebookDataPosition[ 158, 7] -NotebookDataLength[ 41346, 848] -NotebookOptionsPosition[ 38267, 784] -NotebookOutlinePosition[ 38662, 800] -CellTagsIndexPosition[ 38619, 797] +NotebookDataLength[ 32117, 707] +NotebookOptionsPosition[ 29036, 643] +NotebookOutlinePosition[ 29434, 659] +CellTagsIndexPosition[ 29391, 656] WindowFrame->Normal*) (* Beginning of Notebook Content *) @@ -58,14 +58,14 @@ Cell[BoxData[ CellChangeTimes->{{3.930845768512031*^9, 3.930845780337503*^9}, { 3.93084581693408*^9, 3.930845853905582*^9}, {3.93091362763332*^9, 3.9309136464460287`*^9}}, - CellLabel->"In[2]:=",ExpressionUUID->"01061750-99ce-4ee2-ba52-6ccdbfbf6f3b"], + CellLabel->"In[1]:=",ExpressionUUID->"01061750-99ce-4ee2-ba52-6ccdbfbf6f3b"], Cell[BoxData[ RowBox[{"<<", "DetectorVisuals`"}]], "Input", CellChangeTimes->{{3.930845856938705*^9, 3.930845892210829*^9}, { 3.9308460624868727`*^9, 3.930846074171977*^9}, {3.9309136516632137`*^9, 3.930913666071559*^9}}, - CellLabel->"In[35]:=",ExpressionUUID->"a6dc3d65-b39c-4f5d-8e40-9ac1c63e10c6"] + CellLabel->"In[2]:=",ExpressionUUID->"a6dc3d65-b39c-4f5d-8e40-9ac1c63e10c6"] }, Open ]], Cell[CellGroupData[{ @@ -87,104 +87,72 @@ Cell[CellGroupData[{ Cell[BoxData[ RowBox[{"Visuals", "[", - RowBox[{"\"\\"", ",", + RowBox[{"\"\\"", ",", "\"\\"", ",", RowBox[{"{", RowBox[{"\"\\"", ",", "\"\\""}], "}"}]}], "]"}]], "Input", CellChangeTimes->{{3.9308461111501207`*^9, 3.930846132819652*^9}, { 3.9308462468135443`*^9, 3.930846247147883*^9}, {3.930846995520549*^9, - 3.9308470458198357`*^9}}, - CellLabel->"In[4]:=",ExpressionUUID->"758f5770-9701-4c2e-9c28-f1655b0a6965"], + 3.9308470458198357`*^9}, {3.935423845589867*^9, 3.935423846508115*^9}}, + CellLabel->"In[3]:=",ExpressionUUID->"758f5770-9701-4c2e-9c28-f1655b0a6965"], Cell[BoxData[ Graphics3DBox[{ - {RGBColor[0.35093217511999275`, 0.3525201340616513, 0.8276327252716125], - Opacity[0.1], CuboidBox[{-2.8, -3.05, -3.8}, {2.8, 3.05, 3.8}]}, - {RGBColor[0.8477780541436477, 0.3160906000834609, 0.219018325039134], - Opacity[0.1], CuboidBox[{-1.75, -2., -3.8}, {1.75, 2., 3.8}]}, - {RGBColor[0.22330177948027363`, 0.31121718977849944`, 0.08734595030416648], - Opacity[0.1], CuboidBox[{-1.75, -2., -3.7}, {1.75, 2., 3.7}]}, - {RGBColor[0.8477780541436477, 0.3160906000834609, 0.219018325039134], - Opacity[0.1], CuboidBox[{-1.75, -1.8, -3.5}, {1.75, 1.8, 3.5}]}, - {RGBColor[0.5014698837210463, 0.8279721286735073, 0.25805733064978975`], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-2.8, -3.05, -3.8}, {2.8, 3.05, 3.8}]}, + {Opacity[0.1], CuboidBox[{-1.75, -2., -3.8}, {1.75, 2., 3.8}]}, + {Opacity[0.1], CuboidBox[{-1.75, -2., -3.7}, {1.75, 2., 3.7}]}, + {Opacity[0.1], CuboidBox[{-1.75, -1.8, -3.5}, {1.75, 1.8, 3.5}]}, + {Opacity[0.1], CuboidBox[{-0.8975, 0.34249999999999997`, -3.205}, {0.8975, 1.1375, -1.205}]}, - {RGBColor[0.6907727880028507, 0.11798104164182699`, 0.5416072831768857], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-0.85, 0.39, -3.205}, {0.85, 1.0899999999999999`, -1.205}]}, - {RGBColor[0.5014698837210463, 0.8279721286735073, 0.25805733064978975`], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-0.8975, -1.1375, -3.205}, { 0.8975, -0.34249999999999997`, -1.205}]}, - {RGBColor[0.6907727880028507, 0.11798104164182699`, 0.5416072831768857], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-0.85, -1.0899999999999999`, -3.205}, { 0.85, -0.39, -1.205}]}, - {RGBColor[0.20715118379631448`, 0.30858200457649687`, 0.3717365440678644], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-1.009, -0.32, -3.214}, {1.009, 0.32, -1.1960000000000002`}]}, - {RGBColor[0.20715118379631448`, 0.30858200457649687`, 0.3717365440678644], - Opacity[0.1], CuboidBox[{-0.96, -0.28, -3.165}, {0.96, 0.28, -1.245}]}, - {RGBColor[0.25586105262190606`, 0.2633788353773121, 0.2901412060568902], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-0.96, -0.28, -3.165}, {0.96, 0.28, -1.245}]}, + {Opacity[0.1], CuboidBox[{-1.15, 1.1600000000000001`, -3.355}, {1.15, 1.17, -1.0550000000000002`}]}, - {RGBColor[0.25586105262190606`, 0.2633788353773121, 0.2901412060568902], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-1.15, -1.17, -3.355}, { 1.15, -1.1600000000000001`, -1.0550000000000002`}]}, - {RGBColor[0.25586105262190606`, 0.2633788353773121, 0.2901412060568902], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{1.1900000000000002`, -1.15, -3.355}, {1.2, 1.15, -1.0550000000000002`}]}, - {RGBColor[0.25586105262190606`, 0.2633788353773121, 0.2901412060568902], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-1.2, -1.15, -3.355}, {-1.1900000000000002`, 1.15, -1.0550000000000002`}]}, - {RGBColor[0.25586105262190606`, 0.2633788353773121, 0.2901412060568902], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-1.15, -1.15, -3.2649999999999997`}, {1.15, 1.15, -3.255}]}, - {RGBColor[0.25586105262190606`, 0.2633788353773121, 0.2901412060568902], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-1.15, -1.15, -1.1749999999999998`}, {1.15, 1.15, -1.165}]}, - {RGBColor[0.7458525322351903, 0.7847146789223423, 0.9181634201906055], - Opacity[0.1], CuboidBox[{-1.25, -1.25, -0.875}, {1.25, 1.25, 0.125}]}, - {RGBColor[0.6907727880028507, 0.11798104164182699`, 0.5416072831768857], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-1.25, -1.25, -0.875}, {1.25, 1.25, 0.125}]}, + {Opacity[0.1], CuboidBox[{-1.235, -1.235, -0.86}, {1.235, 1.235, 0.10999999999999999`}]}, - {RGBColor[0.7458525322351903, 0.7847146789223423, 0.9181634201906055], - Opacity[0.1], CuboidBox[{-1.25, -1.25, 0.49}, {1.25, 1.25, 1.49}]}, - {RGBColor[0.6907727880028507, 0.11798104164182699`, 0.5416072831768857], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-1.25, -1.25, 0.49}, {1.25, 1.25, 1.49}]}, + {Opacity[0.1], CuboidBox[{-1.235, -1.235, 0.505}, {1.235, 1.235, 1.475}]}, - {RGBColor[0.7458525322351903, 0.7847146789223423, 0.9181634201906055], - Opacity[0.1], CuboidBox[{-1.25, -1.25, 1.855}, {1.25, 1.25, 2.855}]}, - {RGBColor[0.6907727880028507, 0.11798104164182699`, 0.5416072831768857], - Opacity[0.1], CuboidBox[{-1.235, -1.235, 1.87}, {1.235, 1.235, 2.84}]}, - {RGBColor[0.20715118379631448`, 0.30858200457649687`, 0.3717365440678644], - Opacity[0.1], CuboidBox[{-1.02, -1.02, -3.75}, {1.02, 1.02, -3.25}]}, - {RGBColor[0.20715118379631448`, 0.30858200457649687`, 0.3717365440678644], - Opacity[0.1], CuboidBox[{-1.02, -1.02, 3.}, {1.02, 1.02, 3.5}]}, - {RGBColor[0.20715118379631448`, 0.30858200457649687`, 0.3717365440678644], - Opacity[0.1], CuboidBox[{1.25, -1.18, -0.875}, {1.75, 1.18, 2.965}]}, - {RGBColor[0.20715118379631448`, 0.30858200457649687`, 0.3717365440678644], - Opacity[0.1], CuboidBox[{-1.75, -1.18, -0.875}, {-1.25, 1.18, 2.965}]}, - {RGBColor[0.20715118379631448`, 0.30858200457649687`, 0.3717365440678644], - Opacity[0.1], CuboidBox[{-0.76, 1.25, -0.875}, {0.76, 1.75, 2.965}]}, - {RGBColor[0.20715118379631448`, 0.30858200457649687`, 0.3717365440678644], - Opacity[0.1], CuboidBox[{-0.76, -1.75, -0.875}, {0.76, -1.25, 2.965}]}, - {RGBColor[0.7212050307872955, 0.3081619831157636, 0.9100551807755652], - Opacity[0.1], CuboidBox[{-1.15, -1.2, 0.125}, {1.15, 1.2, 0.49}]}, - {RGBColor[0.20715118379631448`, 0.30858200457649687`, 0.3717365440678644], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-1.25, -1.25, 1.855}, {1.25, 1.25, 2.855}]}, + {Opacity[0.1], CuboidBox[{-1.235, -1.235, 1.87}, {1.235, 1.235, 2.84}]}, + {Opacity[0.1], CuboidBox[{-1.02, -1.02, -3.75}, {1.02, 1.02, -3.25}]}, + {Opacity[0.1], CuboidBox[{-1.02, -1.02, 3.}, {1.02, 1.02, 3.5}]}, + {Opacity[0.1], CuboidBox[{1.25, -1.18, -0.875}, {1.75, 1.18, 2.965}]}, + {Opacity[0.1], CuboidBox[{-1.75, -1.18, -0.875}, {-1.25, 1.18, 2.965}]}, + {Opacity[0.1], CuboidBox[{-0.76, 1.25, -0.875}, {0.76, 1.75, 2.965}]}, + {Opacity[0.1], CuboidBox[{-0.76, -1.75, -0.875}, {0.76, -1.25, 2.965}]}, + {Opacity[0.1], CuboidBox[{-1.15, -1.2, 0.125}, {1.15, 1.2, 0.49}]}, + {Opacity[0.1], CuboidBox[{-0.9215, -0.9215, 0.1635}, {0.9215, 0.9215, 0.4515}]}, - {RGBColor[0.7212050307872955, 0.3081619831157636, 0.9100551807755652], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-1.15, -1.2, 1.4900000000000002`}, {1.15, 1.2, 1.855}]}, - {RGBColor[0.20715118379631448`, 0.30858200457649687`, 0.3717365440678644], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-0.9215, -0.9215, 1.564}, {0.9215, 0.9215, 1.7810000000000001`}]}}, ImageSize->{326.05528778058215`, 276.80632953762915`}, @@ -201,133 +169,109 @@ Cell[BoxData[ 3.930995881446875*^9, 3.930995996055388*^9, 3.930996573196425*^9, 3.9309969008185797`*^9, 3.930997477608238*^9, 3.93100477589196*^9, 3.931005380832015*^9, {3.931005967116878*^9, 3.931005978085596*^9}, - 3.931008220329236*^9, 3.931008284734171*^9}, - CellLabel->"Out[4]=",ExpressionUUID->"8a3d7fe4-05dd-4061-b3db-44a8b527c5fa"] + 3.931008220329236*^9, 3.931008284734171*^9, {3.93542387743606*^9, + 3.93542389950345*^9}}, + CellLabel->"Out[3]=",ExpressionUUID->"5609e820-8728-41eb-804e-82e36e1de646"] }, Open ]], Cell[CellGroupData[{ Cell[BoxData[ RowBox[{"Visuals", "[", - RowBox[{"\"\\"", ",", + RowBox[{"\"\\"", ",", "\"\\"", ",", RowBox[{"{", RowBox[{"\"\\"", ",", "\"\\""}], "}"}]}], "]"}]], "Input", CellChangeTimes->{{3.9308464581373568`*^9, 3.9308464652613783`*^9}, { - 3.930847248876378*^9, 3.9308472858524933`*^9}}, - CellLabel->"In[39]:=",ExpressionUUID->"85f2f25e-059b-47e1-b7b7-1cd9a7fb936b"], + 3.930847248876378*^9, 3.9308472858524933`*^9}, {3.935423851296529*^9, + 3.9354238540901823`*^9}}, + CellLabel->"In[4]:=",ExpressionUUID->"85f2f25e-059b-47e1-b7b7-1cd9a7fb936b"], Cell[BoxData[ Graphics3DBox[{ - {RGBColor[0.41718218051431455`, 0.6517933847257689, 0.5388337863559993], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{5.0001999999999995`, -2.9465, -2.}, {11.1978, 2.9465, -1.3900000000000001`}]}, - {RGBColor[0.41718218051431455`, 0.6517933847257689, 0.5388337863559993], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{5., -0.9144, -1.3900000000000001`}, {5.660399999999999, 0.9144, 0.464}]}, - {RGBColor[0.41718218051431455`, 0.6517933847257689, 0.5388337863559993], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{5.6604, -0.9398, -1.3900000000000001`}, {6.574800000000001, 0.9398, 0.464}]}, - {RGBColor[0.32506606707200136`, 0.23990300858724734`, 0.486881531810764], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{6.5748, -0.9398, -1.3900000000000001`}, {6.7780000000000005`, 0.9398, 0.464}]}, - {RGBColor[0.41718218051431455`, 0.6517933847257689, 0.5388337863559993], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{6.778, -1.2446, -1.3900000000000001`}, {7.2352, 1.2446, 0.464}]}, - {RGBColor[0.521412842051586, 0.4962344456641059, 0.6541153328529785], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{7.2352, -1.8288, -1.3900000000000001`}, {7.3114, 1.8288, 0.464}]}, - {RGBColor[0.41718218051431455`, 0.6517933847257689, 0.5388337863559993], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{8.311399999999999, -1.8288, -1.3900000000000001`}, {8.7686, 1.8288, 0.464}]}, - {RGBColor[0.32506606707200136`, 0.23990300858724734`, 0.486881531810764], - Opacity[0.1], + {Opacity[0.1], CuboidBox[NCache[{18., Rational[-5, 2], -2.}, {18., -2.5, -2.}], NCache[{19., Rational[5, 2], 1.}, {19., 2.5, 1.}]]}, - {RGBColor[0.41718218051431455`, 0.6517933847257689, 0.5388337863559993], - Opacity[0.1], + {Opacity[0.1], CuboidBox[NCache[{19., Rational[-5, 2], -2.}, {19., -2.5, -2.}], NCache[{21., Rational[5, 2], 1.}, {21., 2.5, 1.}]]}, - {RGBColor[0.41718218051431455`, 0.6517933847257689, 0.5388337863559993], - Opacity[0.1], CuboidBox[{21., 4., -2.}, {25., 5., 1.}]}, - {RGBColor[0.41718218051431455`, 0.6517933847257689, 0.5388337863559993], - Opacity[0.1], CuboidBox[{21., -5., -2.}, {25., -4., 1.}]}, - {RGBColor[0.32506606707200136`, 0.23990300858724734`, 0.486881531810764], - Opacity[0.1], CylinderBox[{{0, 0, -2}, {0, 0, 2}}, 5]}, - {RGBColor[0.32506606707200136`, 0.23990300858724734`, 0.486881531810764], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{21., 4., -2.}, {25., 5., 1.}]}, + {Opacity[0.1], CuboidBox[{21., -5., -2.}, {25., -4., 1.}]}, + {Opacity[0.1], CylinderBox[{{0, 0, -2}, {0, 0, 2}}, 5]}, + {Opacity[0.1], CylinderBox[{{0, 0, -0.6299999999999999}, {0, 0, 1.}}, 0.83]}, - {RGBColor[0.0848019125038495, 0.8968379325252431, 0.05613755639699236], - Opacity[0.1], + {Opacity[0.1], CylinderBox[{{0, 0, -0.44999999999999996`}, {0, 0, 0.79}}, 0.55]}, - {RGBColor[0.7911505744190059, 0.18559734430518615`, 0.2453924328324959], - Opacity[0.1], CylinderBox[{{0, 0, -0.391}, {0, 0, -0.241}}, 0.3]}, - {RGBColor[0.7911505744190059, 0.18559734430518615`, 0.2453924328324959], - Opacity[0.1], + {Opacity[0.1], CylinderBox[{{0, 0, -0.391}, {0, 0, -0.241}}, 0.3]}, + {Opacity[0.1], CylinderBox[{{0, 0, -0.241}, {0, 0, -0.09100000000000001}}, 0.3]}, - {RGBColor[0.7911505744190059, 0.18559734430518615`, 0.2453924328324959], - Opacity[0.1], + {Opacity[0.1], CylinderBox[{{0, 0, 0.07999999999999999}, {0, 0, 0.5}}, 0.3]}, - {RGBColor[0.3351807133351079, 0.2763153120327406, 0.7227930518683081], - Opacity[0.1], + {Opacity[0.1], CylinderBox[{{0, 0, 0.09200000000000001}, {0, 0, 0.183}}, 0.05]}, - {RGBColor[0.3351807133351079, 0.2763153120327406, 0.7227930518683081], - Opacity[0.1], CylinderBox[{{0, 0, -0.39}, {0, 0, -0.092}}, 0.05]}, - {RGBColor[0.32506606707200136`, 0.23990300858724734`, 0.486881531810764], - Opacity[0.1], CylinderBox[{{23, 0, -1.96}, {23, 0, 0.66}}, 1.38]}, - {RGBColor[0.009276515470650892, 0.6652402538800535, 0.2557510553491851], - Opacity[0.1], + {Opacity[0.1], CylinderBox[{{0, 0, -0.39}, {0, 0, -0.092}}, 0.05]}, + {Opacity[0.1], CylinderBox[{{23, 0, -1.96}, {23, 0, 0.66}}, 1.38]}, + {Opacity[0.1], CylinderBox[{{23, 0, -1.9100000000000001`}, {23, 0, 0.61}}, 1.35]}, - {RGBColor[0.32506606707200136`, 0.23990300858724734`, 0.486881531810764], - Opacity[0.1], + {Opacity[0.1], CylinderBox[{{23, 0, -1.85}, {23, 0, 0.5499999999999999}}, 1.25]}, - {RGBColor[0.6493869773926908, 0.16552659229722266`, 0.1134446897889001], - Opacity[0.1], + {Opacity[0.1], CylinderBox[{{23, 0, -1.7999999999999998`}, {23, 0, 0.4999999999999999}}, 1.2]}, - {RGBColor[0.3086780992491793, 0.10261299404488455`, 0.5061987340813581], - Opacity[0.1], - CylinderBox[{{23, 0, -1.4}, {23, 0, 0.09999999999999998}}, 1.06]}, - {RGBColor[0.6493869773926908, 0.16552659229722266`, 0.1134446897889001], - Opacity[0.1], - CylinderBox[{{23, 0, -1.266}, {23, 0, -0.03400000000000003}}, - 0.96]}}]], "Output", + {Opacity[0.1], + CylinderBox[{{23, 0, -1.271305}, {23, 0, -0.028695000000000026`}}, + 1.130076]}, + {Opacity[0.1], + CylinderBox[{{23, 0, -1.2698}, {23, 0, -0.030200000000000005`}}, + 1.12776]}}]], "Output", CellChangeTimes->{ 3.930847286401828*^9, 3.9309138898128967`*^9, {3.930995702475547*^9, 3.9309957057441807`*^9}, 3.930996904682377*^9, 3.9309974796076202`*^9, 3.931005982557057*^9, 3.93100621941457*^9, 3.9310087892293873`*^9, 3.9310089841360407`*^9, 3.93100907290653*^9, 3.931009196389038*^9, - 3.9310093752491703`*^9}, - CellLabel->"Out[39]=",ExpressionUUID->"5a905629-0c22-4620-b59c-02655db9bf79"] + 3.9310093752491703`*^9, {3.935423877536092*^9, 3.935423899603882*^9}}, + CellLabel->"Out[4]=",ExpressionUUID->"22bf564d-0ceb-4fae-ab91-6dd16c0c0d80"] }, Open ]], Cell[CellGroupData[{ Cell[BoxData[ RowBox[{"Visuals", "[", - RowBox[{"\"\\"", ",", + RowBox[{"\"\\"", ",", "\"\\"", ",", RowBox[{"{", RowBox[{"\"\\"", ",", "\"\\""}], "}"}]}], "]"}]], "Input", CellChangeTimes->{{3.9309951152387*^9, 3.930995153174532*^9}, 3.930997063585266*^9, 3.931005817453622*^9, {3.9310092389209433`*^9, - 3.9310092757902718`*^9}, {3.9310093189732447`*^9, - 3.931009348185121*^9}},ExpressionUUID->"45186205-4e3c-43da-8cf6-\ -942bd6843b62"], + 3.9310092757902718`*^9}, {3.9310093189732447`*^9, 3.931009348185121*^9}, + 3.9354238524438887`*^9}, + CellLabel->"In[5]:=",ExpressionUUID->"45186205-4e3c-43da-8cf6-942bd6843b62"], Cell[BoxData[ Graphics3DBox[{ - {RGBColor[0.2298911743818579, 0.31209133471148953`, 0.7146612930358287], - Opacity[0.1], CuboidBox[{-1, -1, -1.0375}, {1, 1, -0.9625}]}, - {RGBColor[0.16452635096365742`, 0.23517942424424865`, 0.9345244778129824], - Opacity[0.1], + {Opacity[0.1], CuboidBox[{-1, -1, -1.0375}, {1, 1, -0.9625}]}, + {Opacity[0.1], PolyhedronBox[{{{0., 1.23553, -0.0027999999999996916`}, {1.07, 0.61776, -0.0027999999999996916`}, { 1.07, -0.61776, -0.0027999999999996916`}, { @@ -336,60 +280,51 @@ Cell[BoxData[ 1.23553, 4.1372}, {1.07, 0.61776, 4.1372}, {1.07, -0.61776, 4.1372}, { 0., -1.23553, 4.1372}, {-1.07, -0.61776, 4.1372}, {-1.07, 0.61776, 4.1372}}}]}, - {RGBColor[0.3275193083067711, 0.5033666810361557, 0.1674300296182314], - Opacity[0.1], + {Opacity[0.1], PolyhedronBox[{{{0., 1.06232, 0.}, {0.92, 0.53116, 0.}, {0.92, -0.53116, 0.}, {0., -1.06232, 0.}, {-0.92, -0.53116, 0.}, {-0.92, 0.53116, 0.}}, {{ 0., 1.06232, 4.1344}, {0.92, 0.53116, 4.1344}, {0.92, -0.53116, 4.1344}, {0., -1.06232, 4.1344}, {-0.92, -0.53116, 4.1344}, {-0.92, 0.53116, 4.1344}}}]}, - {RGBColor[0.2298911743818579, 0.31209133471148953`, 0.7146612930358287], - Opacity[0.1], + {Opacity[0.1], PolyhedronBox[{{{-0.2825, 0.8992, 0.136}, {0.6375, -0.6942, 0.136}, { 0., -1.06232, 0.136}, {-0.92, -0.53116, 0.136}, {-0.92, 0.53116, 0.136}}, {{-0.2825, 0.8992, 0.16167}, {0.6375, -0.6942, 0.16167}, { 0., -1.06232, 0.16167}, {-0.92, -0.53116, 0.16167}, {-0.92, 0.53116, 0.16167}}}]}, - {RGBColor[0.8775095682131331, 0.5860897175157826, 0.24202862693045568`], - Opacity[0.1], + {Opacity[0.1], PolyhedronBox[{{{0.6375, -0.6942, 0.136}, {-0.2825, 0.8992, 0.136}, {0., 1.06232, 0.136}, {0.92, 0.53116, 0.136}, {0.92, -0.53116, 0.136}}, {{ 0.6375, -0.6942, 0.16178}, {-0.2825, 0.8992, 0.16178}, {0., 1.06232, 0.16178}, {0.92, 0.53116, 0.16178}, {0.92, -0.53116, 0.16178}}}]}, - {RGBColor[0.2298911743818579, 0.31209133471148953`, 0.7146612930358287], - Opacity[0.1], + {Opacity[0.1], PolyhedronBox[{{{-0.6375, -0.6942, 0.313}, {0.2825, 0.8992, 0.313}, {0.92, 0.53116, 0.313}, {0.92, -0.53116, 0.313}, {0., -1.06232, 0.313}}, {{-0.6375, -0.6942, 0.33863}, {0.2825, 0.8992, 0.33863}, {0.92, 0.53116, 0.33863}, {0.92, -0.53116, 0.33863}, {0., -1.06232, 0.33863}}}]}, - {RGBColor[0.8775095682131331, 0.5860897175157826, 0.24202862693045568`], - Opacity[0.1], + {Opacity[0.1], PolyhedronBox[{{{0.2825, 0.8992, 0.313}, {-0.6375, -0.6942, 0.313}, {-0.92, -0.53116, 0.313}, {-0.92, 0.53116, 0.313}, {0., 1.06232, 0.313}}, {{0.2825, 0.8992, 0.33881}, {-0.6375, -0.6942, 0.33881}, {-0.92, -0.53116, 0.33881}, {-0.92, 0.53116, 0.33881}, {0., 1.06232, 0.33881}}}]}, - {RGBColor[0.2654164649210198, 0.39657426393543016`, 0.2847295993909016], - Opacity[0.1], + {Opacity[0.1], PolyhedronBox[{{{0., 1.06232, 0.534}, {0.92, 0.53116, 0.534}, {-0.92, -0.53116, 0.534}, {-0.92, 0.53116, 0.534}}, {{0., 1.06232, 0.6102000000000001}, {0.92, 0.53116, 0.6102000000000001}, {-0.92, -0.53116, 0.6102000000000001}, {-0.92, 0.53116, 0.6102000000000001}}}]}, - {RGBColor[0.2298911743818579, 0.31209133471148953`, 0.7146612930358287], - Opacity[0.1], + {Opacity[0.1], PolyhedronBox[{{{0., 0., 0.534}, {0.92, 0.53116, 0.534}, {0.92, -0.53116, 0.534}, {0., -1.06232, 0.534}}, {{0., 0., 0.5597300000000001}, {0.92, 0.53116, 0.5597300000000001}, {0.92, -0.53116, 0.5597300000000001}, { 0., -1.06232, 0.5597300000000001}}}]}, - {RGBColor[0.8775095682131331, 0.5860897175157826, 0.24202862693045568`], - Opacity[0.1], + {Opacity[0.1], PolyhedronBox[{{{0., 0., 0.534}, {0., -1.06232, 0.534}, {-0.92, -0.53116, 0.534}}, {{0., 0., 0.5596300000000001}, {0., -1.06232, 0.5596300000000001}, {-0.92, -0.53116, 0.5596300000000001}}}]}, - {RGBColor[0.8332620257988352, 0.5659513355920436, 0.30336281249235686`], - Opacity[0.1], + {Opacity[0.1], PolyhedronBox[{{{0., 1.06232, 0.895}, {0.92, 0.53116, 0.895}, { 0.92, -0.53116, 0.895}, {0., -1.06232, 0.895}, {-0.92, -0.53116, 0.895}, {-0.92, 0.53116, 0.895}}, {{0., 1.06232, 1.0756000000000001`}, { @@ -397,22 +332,19 @@ Cell[BoxData[ 1.0756000000000001`}, {0., -1.06232, 1.0756000000000001`}, {-0.92, -0.53116, 1.0756000000000001`}, {-0.92, 0.53116, 1.0756000000000001`}}}]}, - {RGBColor[0.8775095682131331, 0.5860897175157826, 0.24202862693045568`], - Opacity[0.1], + {Opacity[0.1], PolyhedronBox[{{{0., 1.06232, 1.256}, {0.92, 0.53116, 1.256}, { 0.92, -0.53116, 1.256}, {0., -1.06232, 1.256}, {-0.92, -0.53116, 1.256}, {-0.92, 0.53116, 1.256}}, {{0., 1.06232, 1.26395}, {0.92, 0.53116, 1.26395}, {0.92, -0.53116, 1.26395}, {0., -1.06232, 1.26395}, {-0.92, -0.53116, 1.26395}, {-0.92, 0.53116, 1.26395}}}]}, - {RGBColor[0.2298911743818579, 0.31209133471148953`, 0.7146612930358287], - Opacity[0.1], + {Opacity[0.1], PolyhedronBox[{{{-0.2825, 0.8992, 1.389}, {0.6375, -0.6942, 1.389}, { 0., -1.06232, 1.389}, {-0.92, -0.53116, 1.389}, {-0.92, 0.53116, 1.389}}, {{-0.2825, 0.8992, 1.40189}, {0.6375, -0.6942, 1.40189}, { 0., -1.06232, 1.40189}, {-0.92, -0.53116, 1.40189}, {-0.92, 0.53116, 1.40189}}}]}, - {RGBColor[0.8775095682131331, 0.5860897175157826, 0.24202862693045568`], - Opacity[0.1], + {Opacity[0.1], PolyhedronBox[{{{0.6375, -0.6942, 1.389}, {-0.2825, 0.8992, 1.389}, {0., 1.06232, 1.389}, {0.92, 0.53116, 1.389}, {0.92, -0.53116, 1.389}}, {{ 0.6375, -0.6942, 1.40217}, {-0.2825, 0.8992, 1.40217}, {0., 1.06232, @@ -434,33 +366,33 @@ Cell[BoxData[ 3.931008289070985*^9, {3.931008607676939*^9, 3.931008627241765*^9}, 3.931008698778192*^9, 3.93100874878646*^9, 3.931008989014098*^9, 3.93100908020753*^9, 3.9310091171045313`*^9, 3.931009199983571*^9, { - 3.931009253670957*^9, 3.931009276283464*^9}}, - CellLabel->"Out[38]=",ExpressionUUID->"7aa043b8-c09a-4a90-8f3b-914c780b39b6"] + 3.931009253670957*^9, 3.931009276283464*^9}, {3.935423877628962*^9, + 3.935423899702532*^9}}, + CellLabel->"Out[5]=",ExpressionUUID->"e0160564-d0ed-4637-bf6a-db4a523dd2d7"] }, Open ]], Cell[CellGroupData[{ Cell[BoxData[ RowBox[{"Visuals", "[", - RowBox[{"\"\\"", ",", + RowBox[{"\"\\"", ",", "\"\\"", ",", RowBox[{"{", "\"\\"", "}"}]}], "]"}]], "Input", - CellChangeTimes->{{3.9309213638291893`*^9, 3.930921408318699*^9}}, - CellLabel->"In[40]:=",ExpressionUUID->"623517c9-0b0a-43b7-941c-607589cb47ec"], + CellChangeTimes->{{3.9309213638291893`*^9, 3.930921408318699*^9}, + 3.9354238562198133`*^9}, + CellLabel->"In[6]:=",ExpressionUUID->"623517c9-0b0a-43b7-941c-607589cb47ec"], Cell[BoxData[ Graphics3DBox[{ - {RGBColor[0.9999669336121517, 0.957806585512984, 0.28830420784733946`], - Opacity[0.1], SphereBox[{0, 0, 0}, 9.1]}, - {RGBColor[0.3248443069364404, 0.6150261091426228, 0.006485477421444896], - Opacity[0.1], SphereBox[{0, 0, 0}, 6.1]}}]], "Output", + {Opacity[0.1], SphereBox[{0, 0, 0}, 9.1]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6.1]}}]], "Output", CellChangeTimes->{ 3.93092140928104*^9, 3.930995708508224*^9, 3.930995999878187*^9, 3.930996576802916*^9, 3.930996907051785*^9, 3.930997612674156*^9, 3.931005382662628*^9, {3.931008610552994*^9, 3.931008629581517*^9}, 3.9310086961250677`*^9, 3.931008747296633*^9, 3.931008986563098*^9, 3.9310090761618843`*^9, 3.931009114241016*^9, 3.93100919847633*^9, - 3.931009377767623*^9}, - CellLabel->"Out[40]=",ExpressionUUID->"928e908c-d246-44d0-8e3f-99106d65c374"] + 3.931009377767623*^9, {3.935423877714003*^9, 3.935423899792832*^9}}, + CellLabel->"Out[6]=",ExpressionUUID->"457c7840-21be-418e-b2d7-20c799d31837"] }, Open ]] }, Open ]], @@ -478,10 +410,10 @@ Cell[BoxData[ RowBox[{"{", RowBox[{ RowBox[{"Visuals", "[", - RowBox[{"\"\\"", ",", + RowBox[{"\"\\"", ",", "\"\\"", ",", RowBox[{"{", "}"}]}], "]"}], ",", "\[IndentingNewLine]", RowBox[{"Visuals", "[", - RowBox[{"\"\\"", ",", + RowBox[{"\"\\"", ",", "\"\\"", ",", RowBox[{"{", RowBox[{ "\"\\"", ",", "\"\\"", ",", @@ -492,56 +424,32 @@ Cell[BoxData[ "}"}]}], "]"}]}], "}"}], "]"}]], "Input", CellChangeTimes->{{3.9308473708407297`*^9, 3.9308473846505938`*^9}, { 3.9308474813020153`*^9, 3.930847490639382*^9}, {3.930847780142088*^9, - 3.93084785043423*^9}, {3.9308478814387417`*^9, 3.9308479120262003`*^9}}, + 3.93084785043423*^9}, {3.9308478814387417`*^9, 3.9308479120262003`*^9}, { + 3.935423858727695*^9, 3.935423860187827*^9}}, CellLabel->"In[7]:=",ExpressionUUID->"d7a1ae22-5013-434c-9f40-0ccdc86a3ce7"], Cell[BoxData[ GraphicsBox[{{}, {InsetBox[ Graphics3DBox[{ - {RGBColor[ - 0.33604086192392857`, 0.9653758014770579, 0.8082011998173695], - Opacity[0.1], SphereBox[{0, 0, 0}, 6478000]}, - {RGBColor[ - 0.7454100290394681, 0.7665909595605325, 0.16392086034407471`], - Opacity[0.1], SphereBox[{0, 0, 0}, 6371324]}, - {RGBColor[ - 0.7454100290394681, 0.7665909595605325, 0.16392086034407471`], - Opacity[0.1], SphereBox[{0, 0, 0}, 6356000]}, - {RGBColor[ - 0.17011058031535153`, 0.08842194355669797, 0.22208792588679205`], - Opacity[0.1], SphereBox[{0, 0, 0}, 6346600]}, - {RGBColor[ - 0.17011058031535153`, 0.08842194355669797, 0.22208792588679205`], - Opacity[0.1], SphereBox[{0, 0, 0}, 6151000]}, - {RGBColor[ - 0.17011058031535153`, 0.08842194355669797, 0.22208792588679205`], - Opacity[0.1], SphereBox[{0, 0, 0}, 5971000]}, - {RGBColor[ - 0.17011058031535153`, 0.08842194355669797, 0.22208792588679205`], - Opacity[0.1], SphereBox[{0, 0, 0}, 5771000]}, - {RGBColor[ - 0.17011058031535153`, 0.08842194355669797, 0.22208792588679205`], - Opacity[0.1], SphereBox[{0, 0, 0}, 5701000]}, - {RGBColor[0.7125743490533816, 0.6497192986538929, 0.3566876197914912], - Opacity[0.1], SphereBox[{0, 0, 0}, 3480000]}, - {RGBColor[0.805721607371412, 0.9209617322598633, 0.8867540239707707], - Opacity[0.1], SphereBox[{0, 0, 0}, 1221500]}, - {RGBColor[ - 0.33604086192392857`, 0.9653758014770579, 0.8082011998173695], - Opacity[0.1], CylinderBox[{{0, 0, 6371223}, {0, 0, 6371245}}, 11]}, - {RGBColor[ - 0.3987342262275555, 0.45800648525877863`, 0.0818749710267983], - Opacity[0.1], + {Opacity[0.1], SphereBox[{0, 0, 0}, 6478000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6371324]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6356000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6346600]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6151000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 5971000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 5771000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 5701000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 3480000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 1221500]}, + {Opacity[0.1], CylinderBox[{{0, 0, 6371223}, {0, 0, 6371245}}, 11]}, + {Opacity[0.1], CylinderBox[{{0, 0, 6.37122785*^6}, {0, 0, 6.37124015*^6}}, 3.82]}}], {192., -205.275}, ImageScaled[{0.5, 0.5}], {360, 391}, BaseStyle->{Graphics3DBoxOptions -> {SphericalRegion -> False}}, ContentSelectable->True], InsetBox[ Graphics3DBox[{ - {RGBColor[0.5589070573347059, 0.5978054217068245, 0.7989344230742892], - Opacity[0.1], CylinderBox[{{0, 0, 6371223}, {0, 0, 6371245}}, 11]}, - {RGBColor[ - 0.48018618880404773`, 0.0759986548288627, 0.5876392247182236], - Opacity[0.1], + {Opacity[0.1], CylinderBox[{{0, 0, 6371223}, {0, 0, 6371245}}, 11]}, + {Opacity[0.1], CylinderBox[{{0, 0, 6.37122785*^6}, {0, 0, 6.37124015*^6}}, 3.82]}}], {576., -205.275}, ImageScaled[{0.5, 0.5}], {360, 391}, BaseStyle->{Graphics3DBoxOptions -> {SphericalRegion -> False}}, @@ -552,8 +460,9 @@ Cell[BoxData[ PlotRangePadding->{6, 5}]], "Output", CellChangeTimes->{{3.930847372845573*^9, 3.930847385329747*^9}, 3.9308474913439817`*^9, 3.93084785162274*^9, {3.930847888148061*^9, - 3.9308479131871243`*^9}, 3.930913893848425*^9}, - CellLabel->"Out[7]=",ExpressionUUID->"eb22797f-f400-4d3d-8fbd-b073c6b6fe6c"] + 3.9308479131871243`*^9}, 3.930913893848425*^9, {3.935423878748316*^9, + 3.9354239008174686`*^9}}, + CellLabel->"Out[7]=",ExpressionUUID->"2dbd9e98-8740-42be-9dc3-422765729334"] }, Open ]], Cell[CellGroupData[{ @@ -563,10 +472,10 @@ Cell[BoxData[ RowBox[{"{", RowBox[{ RowBox[{"Visuals", "[", - RowBox[{"\"\\"", ",", + RowBox[{"\"\\"", ",", "\"\\"", ",", RowBox[{"{", "}"}]}], "]"}], ",", "\[IndentingNewLine]", RowBox[{"Visuals", "[", - RowBox[{"\"\\"", ",", + RowBox[{"\"\\"", ",", "\"\\"", ",", RowBox[{"{", RowBox[{ "\"\\"", ",", "\"\\"", ",", @@ -575,43 +484,29 @@ Cell[BoxData[ "\"\\"", ",", "\"\\"", ",", "\"\\"", ",", "\"\\""}], "}"}]}], "]"}]}], "}"}], "]"}]], "Input", - CellChangeTimes->{{3.93084793508619*^9, 3.9308479993874903`*^9}}, + CellChangeTimes->{{3.93084793508619*^9, 3.9308479993874903`*^9}, { + 3.935423862154442*^9, 3.935423863098934*^9}}, CellLabel->"In[8]:=",ExpressionUUID->"c2600694-ff87-45e8-aa45-7e0762877b49"], Cell[BoxData[ GraphicsBox[{{}, {InsetBox[ Graphics3DBox[{ - {RGBColor[0.3854333319682095, 0.2892742647377069, 0.8094932070709475], - Opacity[0.1], CuboidBox[{-7, -29.1, 6369838}, {7, 29.1, 6369850}]}, - {RGBColor[0.4929121197143007, 0.8219087232339506, 0.05781061434027657], - Opacity[0.1], SphereBox[{0, 0, 0}, 6478000]}, - {RGBColor[ - 0.7307987869233605, 0.8513437211581723, 0.31005230536151496`], - Opacity[0.1], SphereBox[{0, 0, 0}, 6371324]}, - {RGBColor[ - 0.7307987869233605, 0.8513437211581723, 0.31005230536151496`], - Opacity[0.1], SphereBox[{0, 0, 0}, 6356000]}, - {RGBColor[0.1964479760083393, 0.8963638500949567, 0.5827067954797789], - Opacity[0.1], SphereBox[{0, 0, 0}, 6346600]}, - {RGBColor[0.1964479760083393, 0.8963638500949567, 0.5827067954797789], - Opacity[0.1], SphereBox[{0, 0, 0}, 6151000]}, - {RGBColor[0.1964479760083393, 0.8963638500949567, 0.5827067954797789], - Opacity[0.1], SphereBox[{0, 0, 0}, 5971000]}, - {RGBColor[0.1964479760083393, 0.8963638500949567, 0.5827067954797789], - Opacity[0.1], SphereBox[{0, 0, 0}, 5771000]}, - {RGBColor[0.1964479760083393, 0.8963638500949567, 0.5827067954797789], - Opacity[0.1], SphereBox[{0, 0, 0}, 5701000]}, - {RGBColor[ - 0.7506626862180743, 0.5253325729662548, 0.44264896991070013`], - Opacity[0.1], SphereBox[{0, 0, 0}, 3480000]}, - {RGBColor[0.786628620543359, 0.6894960380159745, 0.5808208645108668], - Opacity[0.1], SphereBox[{0, 0, 0}, 1221500]}}], {192., -205.275}, + {Opacity[0.1], CuboidBox[{-7, -29.1, 6369838}, {7, 29.1, 6369850}]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6478000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6371324]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6356000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6346600]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6151000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 5971000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 5771000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 5701000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 3480000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 1221500]}}], {192., -205.275}, ImageScaled[{0.5, 0.5}], {360, 391}, BaseStyle->{Graphics3DBoxOptions -> {SphericalRegion -> False}}, ContentSelectable->True], InsetBox[ Graphics3DBox[ - {RGBColor[0.7680744425973096, 0.829550618397942, 0.35629067827826955`], - Opacity[0.1], CuboidBox[{-7, -29.1, 6369838}, {7, 29.1, 6369850}]}], {576., -205.275}, + {Opacity[0.1], CuboidBox[{-7, -29.1, 6369838}, {7, 29.1, 6369850}]}], {576., -205.275}, ImageScaled[{0.5, 0.5}], {360, 391}, BaseStyle->{Graphics3DBoxOptions -> {SphericalRegion -> False}}, ContentSelectable->True]}, {}}, @@ -620,8 +515,9 @@ Cell[BoxData[ PlotRange->{{0, 768.}, {-410.55, 0}}, PlotRangePadding->{6, 5}]], "Output", CellChangeTimes->{{3.930847943143111*^9, 3.930847949745967*^9}, { - 3.930847986070044*^9, 3.930848000974144*^9}, 3.930913896769115*^9}, - CellLabel->"Out[8]=",ExpressionUUID->"f81cbe95-f330-439e-bb2a-5afd2a6189ed"] + 3.930847986070044*^9, 3.930848000974144*^9}, 3.930913896769115*^9, { + 3.935423879951643*^9, 3.935423902016954*^9}}, + CellLabel->"Out[8]=",ExpressionUUID->"2cf3f729-3dc6-4032-b2a2-e3db2c56bfd4"] }, Open ]], Cell[CellGroupData[{ @@ -631,10 +527,10 @@ Cell[BoxData[ RowBox[{"{", RowBox[{ RowBox[{"Visuals", "[", - RowBox[{"\"\\"", ",", + RowBox[{"\"\\"", ",", "\"\\"", ",", RowBox[{"{", "}"}]}], "]"}], ",", "\[IndentingNewLine]", RowBox[{"Visuals", "[", - RowBox[{"\"\\"", ",", + RowBox[{"\"\\"", ",", "\"\\"", ",", RowBox[{"{", RowBox[{ "\"\\"", ",", "\"\\"", ",", @@ -643,48 +539,30 @@ Cell[BoxData[ "\"\\"", ",", "\"\\"", ",", "\"\\"", ",", "\"\\""}], "}"}]}], "]"}]}], "}"}], "]"}]], "Input", - CellChangeTimes->{{3.930848012050405*^9, 3.930848018088457*^9}}, + CellChangeTimes->{{3.930848012050405*^9, 3.930848018088457*^9}, { + 3.935423864989311*^9, 3.935423865685445*^9}}, CellLabel->"In[9]:=",ExpressionUUID->"bed39234-fab0-4316-b468-3b5240c92626"], Cell[BoxData[ GraphicsBox[{{}, {InsetBox[ Graphics3DBox[{ - {RGBColor[ - 0.7265281738369762, 0.23654605801110473`, 0.4513669482600129], - Opacity[0.1], SphereBox[{0, 0, 0}, 6478000]}, - {RGBColor[0.8285668626756055, 0.6123076208047464, 0.3756755558784768], - Opacity[0.1], SphereBox[{0, 0, 0}, 6371324]}, - {RGBColor[0.8285668626756055, 0.6123076208047464, 0.3756755558784768], - Opacity[0.1], SphereBox[{0, 0, 0}, 6356000]}, - {RGBColor[ - 0.9161148893717073, 0.27921300626842793`, 0.6143387394926221], - Opacity[0.1], SphereBox[{0, 0, 0}, 6346600]}, - {RGBColor[ - 0.9161148893717073, 0.27921300626842793`, 0.6143387394926221], - Opacity[0.1], SphereBox[{0, 0, 0}, 6151000]}, - {RGBColor[ - 0.9161148893717073, 0.27921300626842793`, 0.6143387394926221], - Opacity[0.1], SphereBox[{0, 0, 0}, 5971000]}, - {RGBColor[ - 0.9161148893717073, 0.27921300626842793`, 0.6143387394926221], - Opacity[0.1], SphereBox[{0, 0, 0}, 5771000]}, - {RGBColor[ - 0.9161148893717073, 0.27921300626842793`, 0.6143387394926221], - Opacity[0.1], SphereBox[{0, 0, 0}, 5701000]}, - {RGBColor[0.05484206767198696, 0.5458487029750789, 0.4295754281321804], - Opacity[0.1], SphereBox[{0, 0, 0}, 3480000]}, - {RGBColor[ - 0.5357009384874412, 0.9447267464809403, 0.19597814354442122`], - Opacity[0.1], SphereBox[{0, 0, 0}, 1221500]}, - {RGBColor[0.2516965757577794, 0.619361342638632, 0.9908314833376688], - Opacity[0.1], + {Opacity[0.1], SphereBox[{0, 0, 0}, 6478000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6371324]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6356000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6346600]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6151000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 5971000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 5771000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 5701000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 3480000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 1221500]}, + {Opacity[0.1], CylinderBox[{{0, 0, 6370644}, {0, 0, 6370704}}, 37]}}], {192., -205.275}, ImageScaled[{0.5, 0.5}], {360, 391}, BaseStyle->{Graphics3DBoxOptions -> {SphericalRegion -> False}}, ContentSelectable->True], InsetBox[ Graphics3DBox[ - {RGBColor[0.3817965102765275, 0.560240567502444, 0.9641154785852963], - Opacity[0.1], CylinderBox[{{0, 0, 6370644}, {0, 0, 6370704}}, 37]}], {576., -205.275}, + {Opacity[0.1], CylinderBox[{{0, 0, 6370644}, {0, 0, 6370704}}, 37]}], {576., -205.275}, ImageScaled[{0.5, 0.5}], {360, 391}, BaseStyle->{Graphics3DBoxOptions -> {SphericalRegion -> False}}, ContentSelectable->True]}, {}}, @@ -692,8 +570,10 @@ Cell[BoxData[ UpTo[600], Automatic}, PlotRange->{{0, 768.}, {-410.55, 0}}, PlotRangePadding->{6, 5}]], "Output", - CellChangeTimes->{3.930848021055759*^9, 3.930913899366457*^9}, - CellLabel->"Out[9]=",ExpressionUUID->"b92d23cd-00b8-4bec-934c-3505cc26fb17"] + CellChangeTimes->{ + 3.930848021055759*^9, 3.930913899366457*^9, {3.935423881025281*^9, + 3.935423903319018*^9}}, + CellLabel->"Out[9]=",ExpressionUUID->"cc5e32c9-e802-4f29-bdca-7e0da8312b3d"] }, Open ]], Cell[CellGroupData[{ @@ -703,10 +583,10 @@ Cell[BoxData[ RowBox[{"{", RowBox[{ RowBox[{"Visuals", "[", - RowBox[{"\"\\"", ",", + RowBox[{"\"\\"", ",", "\"\\"", ",", RowBox[{"{", "}"}]}], "]"}], ",", "\[IndentingNewLine]", RowBox[{"Visuals", "[", - RowBox[{"\"\\"", ",", + RowBox[{"\"\\"", ",", "\"\\"", ",", RowBox[{"{", RowBox[{ "\"\\"", ",", "\"\\"", ",", @@ -717,66 +597,45 @@ Cell[BoxData[ "\"\\"", ",", "\"\\""}], "}"}]}], "]"}]}], "}"}], "]"}]], "Input", CellChangeTimes->{{3.930848034137143*^9, 3.930848038040841*^9}, { - 3.930848069237928*^9, 3.930848095894021*^9}}, + 3.930848069237928*^9, 3.930848095894021*^9}, {3.935423867541347*^9, + 3.935423870895969*^9}}, CellLabel->"In[10]:=",ExpressionUUID->"8d947ccf-7f8c-4f79-90f3-3e4c0822da4d"], Cell[BoxData[ GraphicsBox[{{}, {InsetBox[ Graphics3DBox[{ - {RGBColor[0.1452511204960758, 0.1857823034997803, 0.8210871966901028], - Opacity[0.1], SphereBox[{0, 0, 0}, 6478000]}, - {RGBColor[ - 0.009264329959300932, 0.6010366846363033, 0.9777323081774445], - Opacity[0.1], SphereBox[{0, 0, 0}, 6374134]}, - {RGBColor[ - 0.009264329959300932, 0.6010366846363033, 0.9777323081774445], - Opacity[0.1], SphereBox[{0, 0, 0}, 6373934]}, - {RGBColor[0.8885868418790182, 0.0606888172852702, 0.1352350531160138], - Opacity[0.1], SphereBox[{0, 0, 0}, 6371324]}, - {RGBColor[0.8885868418790182, 0.0606888172852702, 0.1352350531160138], - Opacity[0.1], SphereBox[{0, 0, 0}, 6356000]}, - {RGBColor[ - 0.24378268391346758`, 0.9872974256150269, 0.9000805642525402], - Opacity[0.1], SphereBox[{0, 0, 0}, 6346600]}, - {RGBColor[ - 0.24378268391346758`, 0.9872974256150269, 0.9000805642525402], - Opacity[0.1], SphereBox[{0, 0, 0}, 6151000]}, - {RGBColor[ - 0.24378268391346758`, 0.9872974256150269, 0.9000805642525402], - Opacity[0.1], SphereBox[{0, 0, 0}, 5971000]}, - {RGBColor[ - 0.24378268391346758`, 0.9872974256150269, 0.9000805642525402], - Opacity[0.1], SphereBox[{0, 0, 0}, 5771000]}, - {RGBColor[ - 0.24378268391346758`, 0.9872974256150269, 0.9000805642525402], - Opacity[0.1], SphereBox[{0, 0, 0}, 5701000]}, - {RGBColor[ - 0.14910337100201443`, 0.12436833265249714`, 0.4709167434095338], - Opacity[0.1], SphereBox[{0, 0, 0}, 3480000]}, - {RGBColor[ - 0.14682213790113297`, 0.8224387401991233, 0.15218462186818482`], - Opacity[0.1], SphereBox[{0, 0, 0}, 1221500]}, - {RGBColor[ - 0.009264329959300932, 0.6010366846363033, 0.9777323081774445], - Opacity[0.1], CylinderBox[{{0, 0, 6371684}, {0, 0, 6372684}}, 564.19], - {Opacity[0.1], CylinderBox[{{0, 0, -500}, {0, 0, 500}}, 564.19]}}}], {186.03333333333333, -226.8}, - ImageScaled[{0.5, 0.5}], {360, 432}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6478000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6374134]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6373934]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6371324]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6356000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6346600]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 6151000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 5971000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 5771000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 5701000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 3480000]}, + {Opacity[0.1], SphereBox[{0, 0, 0}, 1221500]}, + {Opacity[0.1], + CylinderBox[{{0, 0, 6371684}, {0, 0, 6372684}}, 564.19]}}], {192., -205.275}, + ImageScaled[{0.5, 0.5}], {360, 391}, BaseStyle->{Graphics3DBoxOptions -> {SphericalRegion -> False}}, ContentSelectable->True], InsetBox[ Graphics3DBox[ - {RGBColor[0.3678945716866675, 0.40177799905618894`, 0.4900159272427156], - Opacity[0.1], CylinderBox[{{0, 0, 6371684}, {0, 0, 6372684}}, 564.19], - {Opacity[0.1], CylinderBox[{{0, 0, -500}, {0, 0, 500}}, 564.19]}}], {379.1, -226.8}, - ImageScaled[{0.5, 0.5}], {2, 432}, + {Opacity[0.1], + CylinderBox[{{0, 0, 6371684}, {0, 0, 6372684}}, 564.19]}], {576., -205.275}, + ImageScaled[{0.5, 0.5}], {360, 391}, BaseStyle->{Graphics3DBoxOptions -> {SphericalRegion -> False}}, ContentSelectable->True]}, {}}, ImageSize->{ UpTo[600], Automatic}, - PlotRange->{{0, 386.1333333333333}, {-453.6, 0}}, + PlotRange->{{0, 768.}, {-410.55, 0}}, PlotRangePadding->{6, 5}]], "Output", - CellChangeTimes->{3.9308480391473513`*^9, 3.930848097045261*^9, - 3.930848147809525*^9, 3.9308483503991203`*^9, 3.930913901638027*^9}, - CellLabel->"Out[10]=",ExpressionUUID->"83fa702f-558a-482f-8040-ab82e433ee6c"] + CellChangeTimes->{ + 3.9308480391473513`*^9, 3.930848097045261*^9, 3.930848147809525*^9, + 3.9308483503991203`*^9, 3.930913901638027*^9, {3.9354238821424294`*^9, + 3.935423904408852*^9}}, + CellLabel->"Out[10]=",ExpressionUUID->"f8fb5630-ccc4-485a-9544-6a2b09e4f261"] }, Open ]] }, Open ]] }, Open ]] @@ -784,7 +643,7 @@ Cell[BoxData[ }, WindowSize->{849, 1027}, WindowMargins->{{0, Automatic}, {Automatic, -124}}, -FrontEndVersion->"13.3 for Mac OS X ARM (64-bit) (July 24, 2023)", +FrontEndVersion->"13.2 for Mac OS X ARM (64-bit) (January 30, 2023)", StyleDefinitions->"Default.nb", ExpressionUUID->"180ddf86-c472-42f5-b29b-4c869749c105" ] @@ -806,46 +665,46 @@ Cell[CellGroupData[{ Cell[1136, 40, 255, 4, 45, "Subsubsection",ExpressionUUID->"1828e000-2b55-4658-a39d-d3ec0e65319c"], Cell[1394, 46, 237, 4, 35, "Text",ExpressionUUID->"b85c7e71-3fe5-4770-92d2-3b8310f025c5"], Cell[1634, 52, 390, 8, 30, "Input",ExpressionUUID->"01061750-99ce-4ee2-ba52-6ccdbfbf6f3b"], -Cell[2027, 62, 307, 5, 30, "Input",ExpressionUUID->"a6dc3d65-b39c-4f5d-8e40-9ac1c63e10c6"] +Cell[2027, 62, 306, 5, 30, "Input",ExpressionUUID->"a6dc3d65-b39c-4f5d-8e40-9ac1c63e10c6"] }, Open ]], Cell[CellGroupData[{ -Cell[2371, 72, 266, 4, 53, "Subsection",ExpressionUUID->"5b67d770-d2b6-44d9-871c-110d0d66dc83"], +Cell[2370, 72, 266, 4, 53, "Subsection",ExpressionUUID->"5b67d770-d2b6-44d9-871c-110d0d66dc83"], Cell[CellGroupData[{ -Cell[2662, 80, 169, 3, 45, "Subsubsection",ExpressionUUID->"b4bb631f-eef9-40d0-aa86-e66af2411a5f"], +Cell[2661, 80, 169, 3, 45, "Subsubsection",ExpressionUUID->"b4bb631f-eef9-40d0-aa86-e66af2411a5f"], Cell[CellGroupData[{ -Cell[2856, 87, 432, 9, 30, "Input",ExpressionUUID->"758f5770-9701-4c2e-9c28-f1655b0a6965"], -Cell[3291, 98, 6350, 106, 294, "Output",ExpressionUUID->"8a3d7fe4-05dd-4061-b3db-44a8b527c5fa"] +Cell[2855, 87, 497, 9, 30, "Input",ExpressionUUID->"758f5770-9701-4c2e-9c28-f1655b0a6965"], +Cell[3355, 98, 3927, 75, 294, "Output",ExpressionUUID->"5609e820-8728-41eb-804e-82e36e1de646"] }, Open ]], Cell[CellGroupData[{ -Cell[9678, 209, 372, 8, 30, "Input",ExpressionUUID->"85f2f25e-059b-47e1-b7b7-1cd9a7fb936b"], -Cell[10053, 219, 4820, 88, 212, "Output",ExpressionUUID->"5a905629-0c22-4620-b59c-02655db9bf79"] +Cell[7319, 178, 441, 9, 30, "Input",ExpressionUUID->"85f2f25e-059b-47e1-b7b7-1cd9a7fb936b"], +Cell[7763, 189, 2966, 64, 211, "Output",ExpressionUUID->"22bf564d-0ceb-4fae-ab91-6dd16c0c0d80"] }, Open ]], Cell[CellGroupData[{ -Cell[14910, 312, 456, 10, 30, "Input",ExpressionUUID->"45186205-4e3c-43da-8cf6-942bd6843b62"], -Cell[15369, 324, 6834, 113, 226, "Output",ExpressionUUID->"7aa043b8-c09a-4a90-8f3b-914c780b39b6"] +Cell[10766, 258, 520, 10, 30, "Input",ExpressionUUID->"45186205-4e3c-43da-8cf6-942bd6843b62"], +Cell[11289, 270, 5805, 100, 226, "Output",ExpressionUUID->"e0160564-d0ed-4637-bf6a-db4a523dd2d7"] }, Open ]], Cell[CellGroupData[{ -Cell[22240, 442, 287, 5, 30, "Input",ExpressionUUID->"623517c9-0b0a-43b7-941c-607589cb47ec"], -Cell[22530, 449, 767, 13, 408, "Output",ExpressionUUID->"928e908c-d246-44d0-8e3f-99106d65c374"] +Cell[17131, 375, 333, 6, 30, "Input",ExpressionUUID->"623517c9-0b0a-43b7-941c-607589cb47ec"], +Cell[17467, 383, 659, 11, 408, "Output",ExpressionUUID->"457c7840-21be-418e-b2d7-20c799d31837"] }, Open ]] }, Open ]], Cell[CellGroupData[{ -Cell[23346, 468, 175, 3, 45, "Subsubsection",ExpressionUUID->"7e1653cb-c490-4521-8bec-792810d94dea"], +Cell[18175, 400, 175, 3, 45, "Subsubsection",ExpressionUUID->"7e1653cb-c490-4521-8bec-792810d94dea"], Cell[CellGroupData[{ -Cell[23546, 475, 1015, 20, 94, "Input",ExpressionUUID->"d7a1ae22-5013-434c-9f40-0ccdc86a3ce7"], -Cell[24564, 497, 2968, 58, 341, "Output",ExpressionUUID->"eb22797f-f400-4d3d-8fbd-b073c6b6fe6c"] +Cell[18375, 407, 1102, 21, 94, "Input",ExpressionUUID->"d7a1ae22-5013-434c-9f40-0ccdc86a3ce7"], +Cell[19480, 430, 1779, 34, 341, "Output",ExpressionUUID->"2dbd9e98-8740-42be-9dc3-422765729334"] }, Open ]], Cell[CellGroupData[{ -Cell[27569, 560, 865, 18, 94, "Input",ExpressionUUID->"c2600694-ff87-45e8-aa45-7e0762877b49"], -Cell[28437, 580, 2446, 43, 341, "Output",ExpressionUUID->"f81cbe95-f330-439e-bb2a-5afd2a6189ed"] +Cell[21296, 469, 952, 19, 94, "Input",ExpressionUUID->"c2600694-ff87-45e8-aa45-7e0762877b49"], +Cell[22251, 490, 1515, 29, 341, "Output",ExpressionUUID->"2cf3f729-3dc6-4032-b2a2-e3db2c56bfd4"] }, Open ]], Cell[CellGroupData[{ -Cell[30920, 628, 864, 18, 94, "Input",ExpressionUUID->"bed39234-fab0-4316-b468-3b5240c92626"], -Cell[31787, 648, 2423, 47, 341, "Output",ExpressionUUID->"b92d23cd-00b8-4bec-934c-3505cc26fb17"] +Cell[23803, 524, 951, 19, 94, "Input",ExpressionUUID->"bed39234-fab0-4316-b468-3b5240c92626"], +Cell[24757, 545, 1453, 30, 341, "Output",ExpressionUUID->"cc5e32c9-e802-4f29-bdca-7e0da8312b3d"] }, Open ]], Cell[CellGroupData[{ -Cell[34247, 700, 991, 20, 115, "Input",ExpressionUUID->"8d947ccf-7f8c-4f79-90f3-3e4c0822da4d"], -Cell[35241, 722, 2974, 56, 449, "Output",ExpressionUUID->"83fa702f-558a-482f-8040-ab82e433ee6c"] +Cell[26247, 580, 1078, 21, 115, "Input",ExpressionUUID->"8d947ccf-7f8c-4f79-90f3-3e4c0822da4d"], +Cell[27328, 603, 1656, 34, 341, "Output",ExpressionUUID->"f8fb5630-ccc4-485a-9544-6a2b09e4f261"] }, Open ]] }, Open ]] }, Open ]] diff --git a/resources/detectors/visuals/DetectorVisuals.wl b/resources/detectors/visuals/DetectorVisuals.wl index e27833df8..f0f1b006e 100644 --- a/resources/detectors/visuals/DetectorVisuals.wl +++ b/resources/detectors/visuals/DetectorVisuals.wl @@ -6,10 +6,10 @@ Visuals::usage = "Visuals[Experiment, Modules] displays the experiment as a 3D m Begin["`Private`"]; -Visuals[Experiment_, Modules_] := +Visuals[Experiment_, Version_, Modules_] := Module[{exp = Experiment, mods = Modules, dimensionFile, materialFile, keysDim, materials, color, boxes, polygons, cylinders, spheres,numd,visual}, - dimensionFile = Import[FileNames[All, FileNameJoin[{NotebookDirectory[], "/../densities", exp}]][[-1]]]; - materialFile = Import[FileNames[All, FileNameJoin[{NotebookDirectory[], "/../materials", exp}]][[-1]]]; + dimensionFile = Import[FileNames[FileNameJoin[{NotebookDirectory[], "../", exp, StringJoin[exp,"-",Version], StringJoin["/*densities*.dat"]}]][[-1]]]; + materialFile = Import[FileNames[FileNameJoin[{NotebookDirectory[], "../", exp, StringJoin[exp,"-",Version], StringJoin["/*materials_*.dat"]}]][[-1]]]; dimensionFile = Delete[dimensionFile, Position[dimensionFile, _?(#[[1]] == "#" &)]]; dimensionFile = Delete[dimensionFile, Position[dimensionFile, {}]]; diff --git a/resources/detectors/visuals/README.md b/resources/detectors/visuals/README.md index 94b38be0d..79e3f8794 100644 --- a/resources/detectors/visuals/README.md +++ b/resources/detectors/visuals/README.md @@ -2,21 +2,21 @@ `DetectorVisuals.wl` is a Wolfram Language package that takes the detector dimension files in `/densities/` and material file in `/materials/` to create a 3D model in `MATHEMATICA`. -To call functions in the package, save the working notebook in `\visuals\` and add `\path\to\visuals` to `$Path` so `MATHEMATICA` can find the package file. +To call functions in the package, save the working notebook in `\visuals\` and add `\path\to\visuals` to `$Path` so `MATHEMATICA` can find the package file. -If the notebook directory is correctly saved in `\visuals\`, run +If the notebook directory is correctly saved in `\visuals\`, run ``` AppendTo[$Path, NotebookDirectory[]]; ``` -to append `\path\to\visuals\` to `MATHEMATICA` so it has access to the package. Next, run +to append `\path\to\visuals\` to `MATHEMATICA` so it has access to the package. Next, run ``` << DetectorVisuals` ``` -to import the package. To create visualisation for an experiment, run +to import the package. To create visualisation for an experiment, run ``` Visuals["YourExperiment", {Excluded Modules}] From 1a35e9deb7f091e49b03bde111238236f716417a Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sun, 15 Sep 2024 15:23:54 -0600 Subject: [PATCH 65/85] Moving paper plots --- .../PaperPlots.ipynb | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename resources/examples/{AdditionalPaperPlots => additional_paper_plots}/PaperPlots.ipynb (100%) diff --git a/resources/examples/AdditionalPaperPlots/PaperPlots.ipynb b/resources/examples/additional_paper_plots/PaperPlots.ipynb similarity index 100% rename from resources/examples/AdditionalPaperPlots/PaperPlots.ipynb rename to resources/examples/additional_paper_plots/PaperPlots.ipynb From fb0bdab9d614ef8c10bede7854524c81bc6e9c6d Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sun, 15 Sep 2024 16:34:35 -0600 Subject: [PATCH 66/85] primary_type should be an argument --- python/Injector.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/python/Injector.py b/python/Injector.py index 8ca27cbff..d7096b594 100644 --- a/python/Injector.py +++ b/python/Injector.py @@ -34,6 +34,7 @@ def __init__( number_of_events: Optional[int] = None, detector_model: Optional[_detector.DetectorModel] = None, seed: Optional[int] = None, + primary_type: Optional[_dataclasses.ParticleType] = None, primary_interactions: Dict[_dataclasses.ParticleType, List[Union[_interactions.CrossSection, _interactions.Decay]]] = None, primary_injection_distributions: List[_distributions.PrimaryInjectionDistribution] = None, secondary_interactions: Optional[Dict[_dataclasses.ParticleType, List[Union[_interactions.CrossSection, _interactions.Decay]]]] = None, @@ -60,6 +61,8 @@ def __init__( self.__number_of_events = number_of_events if detector_model is not None: self.__detector_model = detector_model + if primary_type is not None: + self.__primary_type = primary_type if primary_interactions is not None: self.__primary_interactions = primary_interactions if primary_injection_distributions is not None: From 253954a1603a3530cfa87a992eb0eae45bc88c79 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sun, 15 Sep 2024 16:35:05 -0600 Subject: [PATCH 67/85] Add wrapper for Weighter --- python/Weighter.py | 310 +++++++++++++++++++++++++++++++++++++++++++++ python/__init__.py | 6 + 2 files changed, 316 insertions(+) create mode 100644 python/Weighter.py diff --git a/python/Weighter.py b/python/Weighter.py new file mode 100644 index 000000000..145080bda --- /dev/null +++ b/python/Weighter.py @@ -0,0 +1,310 @@ +from . import utilities as _utilities +from . import math as _math +from . import dataclasses as _dataclasses +from . import geometry as _geometry +from . import detector as _detector +from . import interactions as _interactions +from . import distributions as _distributions +from . import injection as _injection +from . import Injector as _Injector_module + +from typing import Tuple, List, Dict, Optional, Union, Callable +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + import siren + +_Injector = _injection.Injector +_Weighter = _injection.Weighter + +_PyInjector = _Injector_module.Injector + +ParticleType = _dataclasses.ParticleType +CrossSection = _interactions.CrossSection +Decay = _interactions.Decay +DetectorModel = _detector.DetectorModel +InteractionTree = _dataclasses.InteractionTree + +class Weighter: + """ + A wrapper for the C++ Weighter class, handling event weight calculations. + """ + + def __init__(self, + injectors: Optional[List[_Injector]] = None, + detector_model: Optional[DetectorModel] = None, + primary_type: Optional[_dataclasses.ParticleType] = None, + primary_interactions: Optional[Dict[_dataclasses.ParticleType, List[Union[_interactions.CrossSection, _interactions.Decay]]]] = None, + primary_physical_distributions: Optional[List[_distributions.WeightableDistribution]] = None, + secondary_interactions: Optional[Dict[_dataclasses.ParticleType, List[Union[_interactions.CrossSection, _interactions.Decay]]]] = None, + secondary_physical_distributions: Optional[Dict[_dataclasses.ParticleType, List[_distributions.WeightableDistribution]]] = None, + ): + """ + Initialize the Weighter with interactions and physical processes. + + Args: + injectors: List of injector objects. + detector_model: The detector model. + primary_type: The primary particle type. + primary_interactions: Dictionary of primary particle interactions. + primary_physical_distributions: List of primary physical distributions. + secondary_interactions: Dictionary of secondary particle interactions. + secondary_physical_distributions: Dictionary of secondary physical distributions. + + Note: + All parameters are optional and can be set later using property setters. + """ + + self.__injectors = None + self.__detector_model = None + + self.__primary_type = None + self.__primary_interactions = [] + self.__primary_physical_distributions = [] + + self.__secondary_interactions = {} + self.__secondary_physical_distributions = {} + + self.__weighter = None + + if injectors is not None: + self.injectors = injectors + if detector_model is not None: + self.__detector_model = detector_model + if primary_type is not None: + self.__primary_type = primary_type + if primary_interactions is not None: + self.__primary_interactions = primary_interactions + if primary_physical_distributions is not None: + self.__primary_physical_distributions = primary_physical_distributions + if secondary_interactions is not None: + self.__secondary_interactions = secondary_interactions + if secondary_physical_distributions is not None: + self.__secondary_physical_distributions = secondary_physical_distributions + + def __initialize_weighter(self): + """ + Initialize the internal C++ Weighter object. + + This method creates the C++ Weighter object using the configured parameters. + It is called automatically when needed and should not be called directly. + + Raises: + ValueError: If any required attributes are not set. + """ + + if self.__injectors is None: + raise ValueError("Injectors have not been set.") + if self.__detector_model is None: + raise ValueError("Detector model has not been set.") + if self.__primary_type is None: + raise ValueError("Primary type has not been set.") + if len(self.__primary_interactions) == 0: + raise ValueError("Primary interactions have not been set.") + if len(self.__primary_physical_distributions) == 0: + raise ValueError("Primary physical distributions have not been set.") + + injectors = [injector._Injector__injector if isinstance(injector, _PyInjector) else injector for injector in self.__injectors] + + primary_type = self.primary_type + primary_interaction_collection = _interactions.InteractionCollection( + primary_type, self.primary_interactions + ) + primary_process = _injection.PhysicalProcess( + primary_type, primary_interaction_collection + ) + primary_process.distributions = self.primary_physical_distributions + + secondary_interactions = self.secondary_interactions + secondary_physical_distributions = self.secondary_physical_distributions + + secondary_processes = [] + for secondary_type, secondary_interactions in secondary_interactions.items(): + secondary_interaction_collection = _interactions.InteractionCollection( + secondary_type, secondary_interactions + ) + secondary_process = _injection.PhysicalProcess( + secondary_type, secondary_interaction_collection + ) + if secondary_type in secondary_physical_distributions: + secondary_process.distributions = secondary_physical_distributions[secondary_type] + else: + secondary_process.distributions = [] + secondary_processes.append(secondary_process) + + self.__weighter = _Weighter( + injectors, + self.detector_model, + primary_process, + secondary_processes, + ) + + @property + def injectors(self) -> List[_Injector]: + """ + Get the list of injectors. + + Returns: + List[_Injector]: The current list of injector objects. + """ + return self.__injectors + + @injectors.setter + def injectors(self, injectors: List[_Injector]): + """ + Set the list of injectors. + + Args: + injectors: A list of Injector objects. + + Raises: + ValueError: If the weighter has already been initialized. + TypeError: If the input is not a list of Injector objects. + ValueError: If any of the injectors are not initialized. + """ + + if self.__weighter is not None: + raise ValueError("Cannot set injectors after weighter has been initialized.") + if not isinstance(injectors, list): + raise TypeError("Injectors must be a list.") + if not all(isinstance(injector, (_Injector, _PyInjector)) for injector in injectors): + raise TypeError("All injectors must be of type Injector.") + if not all(injector._Injector__injector is not None for injector in injectors if isinstance(injector, _PyInjector)): + raise ValueError("All injectors must be initialized.") + self.__injectors = injectors + + @property + def detector_model(self) -> DetectorModel: + """ + Get the detector model. + + Returns: + DetectorModel: The current detector model. + """ + return self.__detector_model + + @detector_model.setter + def detector_model(self, detector_model: DetectorModel): + """ + Set the detector model. + + Args: + detector_model: The DetectorModel object to set. + + Raises: + ValueError: If the weighter has already been initialized. + TypeError: If the input is not a DetectorModel object. + """ + + if self.__weighter is not None: + raise ValueError("Cannot set detector model after weighter has been initialized.") + if not isinstance(detector_model, DetectorModel): + raise TypeError("Detector model must be of type DetectorModel.") + self.__detector_model = detector_model + + @property + def primary_type(self) -> ParticleType: + return self.__primary_type + + @primary_type.setter + def primary_type(self, primary_type: ParticleType): + if self.__weighter is not None: + raise ValueError("Cannot set primary type after weighter has been initialized.") + if not isinstance(primary_type, ParticleType): + raise TypeError("Primary type must be of type ParticleType.") + self.__primary_type = primary_type + + @property + def primary_interactions(self) -> Dict[ParticleType, List[Union[CrossSection, Decay]]]: + return self.__primary_interactions + + @primary_interactions.setter + def primary_interactions(self, primary_interactions: List[Union[CrossSection, Decay]]): + if self.__weighter is not None: + raise ValueError("Cannot set primary interactions after weighter has been initialized.") + if not isinstance(primary_interactions, list): + raise TypeError("Primary interactions must be a list.") + if not all(isinstance(interaction, (CrossSection, Decay)) for interaction in primary_interactions): + raise TypeError("All interactions in primary interactions must be of type CrossSection or Decay.") + self.__primary_interactions = primary_interactions + + @property + def primary_physical_distributions(self) -> List[_distributions.WeightableDistribution]: + return self.__primary_physical_distributions + + @primary_physical_distributions.setter + def primary_physical_distributions(self, primary_physical_distributions: List[_distributions.WeightableDistribution]): + if self.__weighter is not None: + raise ValueError("Cannot set primary physical distributions after weighter has been initialized.") + if not isinstance(primary_physical_distributions, list): + raise TypeError("Primary physical distributions must be a list.") + if not all(isinstance(distribution, _distributions.WeightableDistribution) for distribution in primary_physical_distributions): + raise TypeError("All distributions in primary physical distributions must be of type WeightableDistribution.") + self.__primary_physical_distributions = primary_physical_distributions + + @property + def secondary_interactions(self) -> Dict[ParticleType, List[Union[CrossSection, Decay]]]: + return self.__secondary_interactions + + @secondary_interactions.setter + def secondary_interactions(self, secondary_interactions: Dict[ParticleType, List[Union[CrossSection, Decay]]]): + if self.__weighter is not None: + raise ValueError("Cannot set secondary interactions after weighter has been initialized.") + if not isinstance(secondary_interactions, dict): + raise TypeError("Secondary interactions must be a dictionary.") + if not all(isinstance(particle_type, ParticleType) for particle_type in secondary_interactions.keys()): + raise TypeError("All keys in secondary interactions must be of type ParticleType.") + if not all(isinstance(interactions, list) for interactions in secondary_interactions.values()): + raise TypeError("All values in secondary interactions must be lists.") + if not all(isinstance(interaction, (CrossSection, Decay)) for interactions in secondary_interactions.values() for interaction in interactions): + raise TypeError("All interactions in secondary interactions must be of type CrossSection or Decay.") + self.__secondary_interactions = secondary_interactions + + @property + def secondary_physical_distributions(self) -> Dict[ParticleType, List[_distributions.WeightableDistribution]]: + return self.__secondary_physical_distributions + + @secondary_physical_distributions.setter + def secondary_physical_distributions(self, secondary_physical_distributions: Dict[ParticleType, List[_distributions.WeightableDistribution]]): + if self.__weighter is not None: + raise ValueError("Cannot set secondary physical distributions after weighter has been initialized.") + if not isinstance(secondary_physical_distributions, dict): + raise TypeError("Secondary physical distributions must be a dictionary.") + if not all(isinstance(particle_type, ParticleType) for particle_type in secondary_physical_distributions.keys()): + raise TypeError("All keys in secondary physical distributions must be of type ParticleType.") + if not all(isinstance(distributions, list) for distributions in secondary_physical_distributions.values()): + raise TypeError("All values in secondary physical distributions must be lists.") + if not all(isinstance(distribution, _distributions.WeightableDistribution) for distributions in secondary_physical_distributions.values() for distribution in distributions): + raise TypeError("All distributions in secondary physical distributions must be of type WeightableDistribution.") + self.__secondary_physical_distributions = secondary_physical_distributions + + def __call__(self, interaction_tree: InteractionTree) -> float: + """ + Calculate the event weight for a given interaction tree. + + This method initializes the weighter if necessary and then calculates the event weight. + + Args: + interaction_tree: The interaction tree to weight. + + Returns: + float: The calculated event weight. + """ + + if self.__weighter is None: + self.__initialize_weighter() + return self.__weighter.EventWeight(interaction_tree) + + def event_weight(self, interaction_tree: InteractionTree) -> float: + """ + Calculate the event weight for a given interaction tree. + + This method is an alias for __call__ and provides the same functionality. + + Args: + interaction_tree: The interaction tree to weight. + + Returns: + float: The calculated event weight. + """ + return self(interaction_tree) diff --git a/python/__init__.py b/python/__init__.py index 587cce09a..0dd24eeac 100644 --- a/python/__init__.py +++ b/python/__init__.py @@ -9,6 +9,7 @@ from . import _util from . import Injector +from . import Weighter # Intropspect package version import sys @@ -35,6 +36,11 @@ injection.Injector = Injector.Injector del Injector +# Override the Weighter with the python wrapper +injection._Weighter = injection.Weighter +injection.Weighter = Weighter.Weighter +del Weighter + dataclasses.Particle.ParticleType = dataclasses.ParticleType def darknews_version(): From ceae573db1bc38ac0acc2ee2c6c9948a07269d6e Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sun, 15 Sep 2024 16:58:12 -0600 Subject: [PATCH 68/85] Re-write the DIS_ATLAS example with the new interface. Still need to save events --- resources/examples/example1/DIS_ATLAS.py | 126 +++++++++++++++-------- 1 file changed, 83 insertions(+), 43 deletions(-) diff --git a/resources/examples/example1/DIS_ATLAS.py b/resources/examples/example1/DIS_ATLAS.py index a64e06791..add646ed4 100644 --- a/resources/examples/example1/DIS_ATLAS.py +++ b/resources/examples/example1/DIS_ATLAS.py @@ -1,7 +1,12 @@ import os - import siren -from siren.SIREN_Controller import SIREN_Controller +try: + from tqdm import tqdm as tqdm +except ImportError: + print("Importing tqdm failed, using default range") + tqdm = lambda x: x + +seed = 99 # Number of events to inject events_to_inject = int(1e5) @@ -9,62 +14,97 @@ # Expeirment to run experiment = "ATLAS" -# Define the controller -controller = SIREN_Controller(events_to_inject, experiment, seed=99) +# Load the detector model +detector_model = siren.utilities.load_detector(experiment) # Particle to inject primary_type = siren.dataclasses.Particle.ParticleType.NuMu cross_section_model = "CSMSDISSplines" -xsfiledir = siren.utilities.get_cross_section_model_path(cross_section_model) - # Cross Section Model target_type = siren.dataclasses.Particle.ParticleType.Nucleon -DIS_xs = siren.interactions.DISFromSpline( - os.path.join(xsfiledir, "dsdxdy_nu_CC_iso.fits"), - os.path.join(xsfiledir, "sigma_nu_CC_iso.fits"), - [primary_type], - [target_type], "m" +primary_processes, secondary_processes = siren.utilities.load_processes( + cross_section_model, + primary_types = [primary_type], + target_types = [target_type], + isoscalar = True, + process_types = ["CC"], ) -primary_xs = siren.interactions.InteractionCollection(primary_type, [DIS_xs]) -controller.SetInteractions(primary_xs) - -# Primary distributions -primary_injection_distributions = {} -primary_physical_distributions = {} - -# energy distribution -# HE SN flux from ATLAS paper -edist = siren.utilities.load_flux("HE_SN", tag="numu", min_energy=100, max_energy=1e6, physically_normalized=True) -edist_gen = siren.utilities.load_flux("HE_SN", tag="numu", min_energy=100, max_energy=1e6, physically_normalized=False) - -primary_injection_distributions["energy"] = edist_gen -primary_physical_distributions["energy"] = edist - -# direction distribution +# Choose the direction we will inject from # let's just inject upwards injection_dir = siren.math.Vector3D(0, 0, 1) injection_dir.normalize() -direction_distribution = siren.distributions.FixedDirection(injection_dir) -primary_injection_distributions["direction"] = direction_distribution -primary_physical_distributions["direction"] = direction_distribution - -# position distribution -position_distribution = controller.GetCylinderVolumePositionDistributionFromSector("tilecal") -primary_injection_distributions["position"] = position_distribution - -# SetProcesses -controller.SetProcesses( - primary_type, primary_injection_distributions, primary_physical_distributions -) - -controller.Initialize() -events = controller.GenerateEvents() +# Build the position distribution using properties of the geometry +fiducial_volume_name = "tilecal" +geo = None +for sector in detector_model.Sectors: + if sector.name == fiducial_volume_name: + geo = sector.geo + break + +det_position = detector_model.GeoPositionToDetPosition(siren.detector.GeometryPosition(geo.placement.Position)) +det_rotation = geo.placement.Quaternion +det_placement = siren.geometry.Placement(det_position.get(), det_rotation) +cylinder = siren.geometry.Cylinder(det_placement, geo.Radius, geo.InnerRadius, geo.Z) + +position_distribution = siren.distributions.CylinderVolumePositionDistribution(cylinder) + +primary_injection_distributions = [ + siren.distributions.PrimaryMass(0), + # energy distribution + # HE SN flux from ATLAS paper + siren.utilities.load_flux( + "HE_SN", + tag="numu", + min_energy=100, + max_energy=1e6, + physically_normalized=True), + siren.distributions.FixedDirection(injection_dir), + position_distribution, +] + +primary_physical_distributions = [ + # energy distribution + # HE SN flux from ATLAS paper + siren.utilities.load_flux( + "HE_SN", + tag="numu", + min_energy=100, + max_energy=1e6, + physically_normalized=False), + siren.distributions.FixedDirection(injection_dir), +] + +injector = siren.injection.Injector() +injector.seed = seed +injector.number_of_events = events_to_inject +injector.detector_model = detector_model +injector.primary_type = primary_type +injector.primary_interactions = primary_processes[primary_type] +injector.primary_injection_distributions = primary_injection_distributions +injector.secondary_interactions = {} +injector.secondary_injection_distributions = {} + +print("Generating events") +events = [injector.generate_event() for _ in tqdm(range(events_to_inject))] + +weighter = siren.injection.Weighter() +weighter.injectors = [injector] +weighter.detector_model = detector_model +weighter.primary_type = primary_type +weighter.primary_interactions = primary_processes[primary_type] +weighter.primary_physical_distributions = primary_physical_distributions +weighter.secondary_interactions = {} +weighter.secondary_physical_distributions = {} + +print("Weighting events") +weights = [weighter(event) for event in tqdm(events)] os.makedirs("output", exist_ok=True) -controller.SaveEvents("output/ATLAS_DIS") +#TODO save the events and weights + From 1622a4a583f76baad27361de9edc6352f7079f4c Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sun, 15 Sep 2024 17:41:40 -0600 Subject: [PATCH 69/85] Utilities for exploring and getting docs --- python/_util.py | 138 ++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 116 insertions(+), 22 deletions(-) diff --git a/python/_util.py b/python/_util.py index abbea95ae..cb49bda22 100644 --- a/python/_util.py +++ b/python/_util.py @@ -2,6 +2,7 @@ import re import sys import uuid +import pydoc import pathlib import importlib @@ -259,6 +260,11 @@ def load_module(name, path, persist=True): + r"))?" ) +_model_regex = re.compile( + r"^\s*" + _MODEL_PATTERN + r"\s*$", + re.VERBOSE | re.IGNORECASE, +) + def decompose_version(version): # Break the version string into its components matches = _version_regex.match(version) @@ -521,7 +527,7 @@ def _get_model_file_name(version, model_versions, model_files, model_name, suffi return f"{model_name}-v{version}{suffix}" def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exist=True, specific_file=None): - _model_regex = re.compile( + model_regex = re.compile( r"^\s*" + _MODEL_PATTERN + ("" if suffix is None else r"(?:" + suffix + r")?") + r"\s*$", re.VERBOSE | re.IGNORECASE, ) @@ -530,7 +536,7 @@ def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exi resources_dir = resource_package_dir() base_dir = _get_base_directory(resources_dir, prefix) - d = _model_regex.match(model_name) + d = model_regex.match(model_name) if d is None: raise ValueError(f"Invalid model name: {model_name}") d = d.groupdict() @@ -542,7 +548,7 @@ def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exi return os.path.dirname(specific_file_path) model_files = _get_model_files(base_dir, model_name, is_file, folder_exists, version) - model_versions = _extract_model_versions(model_files, _model_regex, model_name) + model_versions = _extract_model_versions(model_files, model_regex, model_name) if len(model_versions) == 0 and must_exist: if specific_file_path: @@ -591,7 +597,7 @@ def _get_model_subfolders(base_dir, model_regex): def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exist=True, specific_file=None): - _model_regex = re.compile( + model_regex = re.compile( r"^\s*" + _MODEL_PATTERN + ("" if suffix is None else r"(?:" + suffix + r")?") + r"\s*$", re.VERBOSE | re.IGNORECASE, ) @@ -600,7 +606,7 @@ def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exi resources_dir = resource_package_dir() base_dir = _get_base_directory(resources_dir, prefix) - d = _model_regex.match(model_name) + d = model_regex.match(model_name) if d is None: raise ValueError(f"Invalid model name: {model_name}") d = d.groupdict() @@ -621,7 +627,7 @@ def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exi return model_dir - model_subfolders = _get_model_subfolders(model_dir, _model_regex) + model_subfolders = _get_model_subfolders(model_dir, model_regex) if len(model_subfolders) == 0: if must_exist: @@ -635,7 +641,7 @@ def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exi models_and_versions = [] for f in model_subfolders: - d = _model_regex.match(f).groupdict() + d = model_regex.match(f).groupdict() if d["version"] is not None: models_and_versions.append((f, normalize_version(d["version"]))) @@ -687,18 +693,29 @@ def get_detector_model_path(model_name, must_exist=True): def get_processes_model_path(model_name, must_exist=True): return _get_model_path(model_name, prefix=_resource_folder_by_name["processes"], is_file=False, must_exist=must_exist, specific_file="processes.py") - -def load_resource(resource_type, resource_name, *args, **kwargs): +def import_resource(resource_type, resource_name): folder = _resource_folder_by_name[resource_type] specific_file = f"{resource_type}.py" abs_dir = _get_model_path(resource_name, prefix=folder, is_file=False, must_exist=True, specific_file=specific_file) fname = os.path.join(abs_dir, f"{resource_type}.py") - print(fname) - assert(os.path.isfile(fname)) - resource_module = load_module(f"siren-{resource_type}-{resource_name}", fname, persist=False) - loader = getattr(resource_module, f"load_{resource_type}") + if not os.path.isfile(fname): + return None + return load_module(f"siren-{resource_type}-{resource_name}", fname, persist=False) + + +def get_resource_loader(resource_type, resource_name): + resource_module = import_resource(resource_type, resource_name) + if resource_module is None: + return None + return getattr(resource_module, f"load_{resource_type}") + + +def load_resource(resource_type, resource_name, *args, **kwargs): + loader = get_resource_loader(resource_type, resource_name) + if loader is None: + return None resource = loader(*args, **kwargs) return resource @@ -708,19 +725,15 @@ def load_flux(model_name, *args, **kwargs): def load_detector(model_name, *args, **kwargs): + resource = load_resource("flux", model_name, *args, **kwargs) + if resource is not None: + return resource + resource_type = "detector" resource_name = model_name folder = _resource_folder_by_name[resource_type] - specific_file = f"{resource_type}.py" - abs_dir = _get_model_path(resource_name, prefix=folder, is_file=False, must_exist=True, specific_file=specific_file) - - script_fname = os.path.join(abs_dir, f"{resource_type}.py") - if os.path.isfile(script_fname): - resource_module = load_module(f"siren-{resource_type}-{resource_name}", script_fname, persist=False) - loader = getattr(resource_module, f"load_{resource_type}") - resource = loader(*args, **kwargs) - return resource + abs_dir = _get_model_path(resource_name, prefix=folder, is_file=False, must_exist=True, specific_file=None) densities_fname = os.path.join(abs_dir, "densities.dat") materials_fname = os.path.join(abs_dir, "materials.dat") @@ -759,3 +772,84 @@ def get_fiducial_volume(experiment): from . import detector as _detector return _detector.DetectorModel.ParseFiducialVolume(fiducial_line, detector_line) return None + +def list_fluxes(): + return sorted(_get_model_subfolders(_get_base_directory(resource_package_dir(), "fluxes"), _model_regex)) + +def list_detectors(): + dirs = sorted(_get_model_subfolders(_get_base_directory(resource_package_dir(), "detectors"), _model_regex)) + dirs = [d for d in dirs if d != "visuals"] + return dirs + +def list_processes(): + return sorted(_get_model_subfolders(_get_base_directory(resource_package_dir(), "processes"), _model_regex)) + +def flux_docs(flux_name): + loader = get_resource_loader("flux", flux_name) + if loader is None: + raise ValueError(f"Could not find documentation for flux {flux_name}") + return loader.__doc__ + +def detector_docs(detector_name): + loader = get_resource_loader("detector", detector_name) + if loader is not None: + return loader.__doc__ + + resource_name = detector_name + folder = _resource_folder_by_name["detector"] + + abs_dir = _get_model_path(resource_name, prefix=folder, is_file=False, must_exist=True, specific_file=None) + + densities_fname = os.path.join(abs_dir, "densities.dat") + materials_fname = os.path.join(abs_dir, "materials.dat") + + lines = [] + if os.path.isfile(densities_fname): + with open(densities_fname) as file: + new_lines = [] + for l in file.readlines(): + l = l.strip() + if l.startswith("#"): + new_lines.append(l) + else: + break + lines.extend(new_lines) + + if os.path.isfile(materials_fname): + with open(materials_fname) as file: + new_lines = [] + for l in file.readlines(): + l = l.strip() + if l.startswith("#"): + new_lines.append(l) + else: + break + if len(lines) > 0 and len(new_lines) > 0: + lines.append("") + lines.extend(new_lines) + + doc = "\n".join(lines) + + if len(doc) == 0: + raise ValueError(f"Could not find documentation for detector {detector_name}") + + return doc + + +def process_docs(process_name): + loader = get_resource_loader("processes", process_name) + if loader is None: + raise ValueError(f"Could not find documentation for process {process_name}") + return loader.__doc__ + +def flux_help(flux_name): + doc = flux_docs(flux_name) + pydoc.pager(doc) + +def detector_help(detector_name): + doc = detector_docs(detector_name) + pydoc.pager(doc) + +def process_help(process_name): + doc = process_docs(process_name) + pydoc.pager(doc) From 0d41fb18574a9d3f5ddc887edf075fb5b57d4a1b Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sun, 15 Sep 2024 17:44:58 -0600 Subject: [PATCH 70/85] Rename and fix T2K_NEAR flux script to work with new directory structure. --- .../T2K_NEAR-v1.0/{FluxCalculator.py => flux.py} | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) rename resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/{FluxCalculator.py => flux.py} (92%) diff --git a/resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/FluxCalculator.py b/resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/flux.py similarity index 92% rename from resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/FluxCalculator.py rename to resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/flux.py index b9a040c51..4dccc2cc5 100644 --- a/resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/FluxCalculator.py +++ b/resources/fluxes/T2K_NEAR/T2K_NEAR-v1.0/flux.py @@ -1,24 +1,26 @@ import os -def MakeFluxFile(tag, abs_flux_dir): - +def load_flux(tag): + ''' Accepts the following tags: {PLUS, MINUS}_{nue,nuebar,numu,numubar} ''' enhance, particle = tag.split("_") - + if enhance not in ["MINUS", "PLUS"]: print("%s 250kA enhancement specified in tag %s is not valid"%(enhance)) exit(0) if particle not in ["numu", "numubar", "nue", "nuebar"]: print("%s particle specified in tag %s is not valid"%(particle)) exit(0) - + + abs_flux_dir = os.path.dirname(__file__) + input_flux_file = os.path.join(abs_flux_dir, "T2K_%s_250kA.dat"%(enhance)) - + output_flux_file = os.path.join(abs_flux_dir, "T2KOUT_%s.dat"%(tag)) @@ -32,4 +34,4 @@ def MakeFluxFile(tag, abs_flux_dir): E, flux = (float(row[1])+float(row[3]))/2, float(row[pid+2]) flux*=2e-16 # put flux in units of nu/m^2/GeV/POT print(E, flux, file=fout) - return output_flux_file \ No newline at end of file + return output_flux_file From 87a76bcffe85b8ed5d8ece6b44ac8c0febc2b39e Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sun, 15 Sep 2024 18:22:30 -0600 Subject: [PATCH 71/85] Enable access to resources through the module / tab-complete --- python/__init__.py | 1 + python/_util.py | 34 +++++++++++++++++++++++++++------- python/resources.py | 27 +++++++++++++++++++++++++++ 3 files changed, 55 insertions(+), 7 deletions(-) create mode 100644 python/resources.py diff --git a/python/__init__.py b/python/__init__.py index 0dd24eeac..950ddd621 100644 --- a/python/__init__.py +++ b/python/__init__.py @@ -10,6 +10,7 @@ from . import _util from . import Injector from . import Weighter +from . import resources # Intropspect package version import sys diff --git a/python/_util.py b/python/_util.py index cb49bda22..74cc7b86b 100644 --- a/python/_util.py +++ b/python/_util.py @@ -724,14 +724,9 @@ def load_flux(model_name, *args, **kwargs): return load_resource("flux", model_name, *args, **kwargs) -def load_detector(model_name, *args, **kwargs): - resource = load_resource("flux", model_name, *args, **kwargs) - if resource is not None: - return resource - - resource_type = "detector" +def _detector_file_loader(model_name): resource_name = model_name - folder = _resource_folder_by_name[resource_type] + folder = _resource_folder_by_name["detector"] abs_dir = _get_model_path(resource_name, prefix=folder, is_file=False, must_exist=True, specific_file=None) @@ -748,6 +743,13 @@ def load_detector(model_name, *args, **kwargs): raise ValueError("Could not find detector loading script \"{script_fname}\" or densities and materials files \"{densities_fname}\", \"materials_fname\"") +def load_detector(model_name, *args, **kwargs): + resource = load_resource("flux", model_name, *args, **kwargs) + if resource is not None: + return resource + return _detector_file_loader + + def load_processes(model_name, *args, **kwargs): return load_resource("processes", model_name, *args, **kwargs) @@ -853,3 +855,21 @@ def detector_help(detector_name): def process_help(process_name): doc = process_docs(process_name) pydoc.pager(doc) + +def _get_process_loader(process_name): + return get_resource_loader("processes", process_name) + +def _get_flux_loader(flux_name): + return get_resource_loader("flux", flux_name) + +def _get_detector_loader(detector_name): + loader = get_resource_loader("detector", detector_name) + if loader is not None: + return loader + + def load_detector(): + return _detector_file_loader(detector_name) + + load_detector.__doc__ = detector_docs(detector_name) + + return load_detector diff --git a/python/resources.py b/python/resources.py new file mode 100644 index 000000000..260e33eaf --- /dev/null +++ b/python/resources.py @@ -0,0 +1,27 @@ +from . import _util + +class ResourceList: + def __init__(self, resource_type, list_method, load_method): + self.__resource_type = resource_type + self.__list_method = list_method + self.__load_method = load_method + + def __getattr__(self, name): + resources = self.__list_method() + if name in resources: + return self.__load_method(name) + else: + # Default behaviour + return object.__getattribute__(self, name) + + def __dir__(self): + dirs = dir(self.__class__) + dirs += list(self.__dict__.keys()) + dirs += self.__list_method() + return sorted(dirs) + +fluxes = ResourceList('fluxes', _util.list_fluxes, _util._get_flux_loader) +detectors = ResourceList('detectors', _util.list_detectors, _util._get_detector_loader) +processes = ResourceList('processes', _util.list_processes, _util._get_process_loader) + +del ResourceList From b2eda5e341a907add9d800f457864a311e3af389 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sun, 15 Sep 2024 18:41:19 -0600 Subject: [PATCH 72/85] Add file paths to detector docs --- python/_util.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/python/_util.py b/python/_util.py index 74cc7b86b..94c658301 100644 --- a/python/_util.py +++ b/python/_util.py @@ -667,14 +667,6 @@ def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exi return os.path.join(model_dir, found_model_subfolder) -def get_detector_model_file_path(model_name, must_exist=True): - return _get_model_path(model_name, prefix="detectors/densities", suffix=".dat", is_file=True, must_exist=must_exist) - - -def get_material_model_file_path(model_name, must_exist=True): - return _get_model_path(model_name, prefix="detectors/materials", suffix=".dat", is_file=True, must_exist=must_exist) - - _resource_folder_by_name = { "flux": "fluxes", "detector": "detectors", @@ -815,6 +807,8 @@ def detector_docs(detector_name): new_lines.append(l) else: break + if len(new_lines) > 0: + lines.append(f"Detector definition: {densities_fname}") lines.extend(new_lines) if os.path.isfile(materials_fname): @@ -828,11 +822,12 @@ def detector_docs(detector_name): break if len(lines) > 0 and len(new_lines) > 0: lines.append("") + lines.append(f"Material definitions: {materials_fname}") lines.extend(new_lines) doc = "\n".join(lines) - if len(doc) == 0: + if len(lines) == 0: raise ValueError(f"Could not find documentation for detector {detector_name}") return doc From 254eb2d8855e5369f617e873dafbf32fa6efa747 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Sun, 15 Sep 2024 18:57:01 -0600 Subject: [PATCH 73/85] Expose resource loader functions in the resources submodule --- python/resources.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/python/resources.py b/python/resources.py index 260e33eaf..d64218c29 100644 --- a/python/resources.py +++ b/python/resources.py @@ -1,5 +1,11 @@ +__all__ = ["load_flux", "load_detector", "load_processes", "fluxes", "detectors", "processes"] + from . import _util +load_flux = _util.load_flux +load_detector = _util.load_detector +load_processes = _util.load_processes + class ResourceList: def __init__(self, resource_type, list_method, load_method): self.__resource_type = resource_type @@ -24,4 +30,5 @@ def __dir__(self): detectors = ResourceList('detectors', _util.list_detectors, _util._get_detector_loader) processes = ResourceList('processes', _util.list_processes, _util._get_process_loader) +del _util del ResourceList From bc0a31c26f03982743a128b6178d33434042f757 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 18 Sep 2024 12:50:50 -0600 Subject: [PATCH 74/85] Don't require a version folder if there is a top level loader --- python/_util.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/python/_util.py b/python/_util.py index 94c658301..1724c2140 100644 --- a/python/_util.py +++ b/python/_util.py @@ -621,15 +621,22 @@ def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exi if not must_exist and not folder_exists: if version is None: - version = "v1" + model_dir = os.path.join(model_dir, f"{found_model_name}-v1") + else: + model_dir = os.path.join(model_dir, f"{found_model_name}-v{version}") - model_dir = os.path.join(model_dir, f"{found_model_name}-v{version}") return model_dir + top_level_has_specific_file = specific_file is not None and os.path.isfile(os.path.join(model_dir, specific_file)) + + if version is None and top_level_has_specific_file: + return model_dir model_subfolders = _get_model_subfolders(model_dir, model_regex) if len(model_subfolders) == 0: + if top_level_has_specific_file: + return model_dir if must_exist: raise ValueError(f"No model folders found for {model_search_name}\nSearched in {model_dir}") else: @@ -653,8 +660,6 @@ def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exi elif len(matching_models) > 1: raise ValueError(f"Multiple directories found for {model_search_name} with version {version}\nSearched in {model_dir}") - top_level_has_specific_file = specific_file is not None and os.path.isfile(os.path.join(model_dir, specific_file)) - if top_level_has_specific_file: return model_dir From 24774394b627ea73525b5151d230aa6aa3aa7c38 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 18 Sep 2024 18:36:46 -0600 Subject: [PATCH 75/85] Update DIS_DUNE --- resources/examples/example1/DIS_DUNE.py | 107 +++++++++++++----------- 1 file changed, 60 insertions(+), 47 deletions(-) diff --git a/resources/examples/example1/DIS_DUNE.py b/resources/examples/example1/DIS_DUNE.py index 7bf411915..de72b9be7 100644 --- a/resources/examples/example1/DIS_DUNE.py +++ b/resources/examples/example1/DIS_DUNE.py @@ -1,67 +1,80 @@ import os - import siren -from siren.SIREN_Controller import SIREN_Controller +from siren import utilities # Number of events to inject events_to_inject = int(1e5) -# Expeirment to run +# Experiment to run experiment = "DUNEFD" - -# Define the controller -controller = SIREN_Controller(events_to_inject, experiment) - -# Particle to inject +detector_model = utilities.load_detector(experiment) primary_type = siren.dataclasses.Particle.ParticleType.NuMu - -cross_section_model = "CSMSDISSplines" - -xsfiledir = siren.utilities.get_cross_section_model_path(cross_section_model) - -# Cross Section Model target_type = siren.dataclasses.Particle.ParticleType.Nucleon -DIS_xs = siren.interactions.DISFromSpline( - os.path.join(xsfiledir, "dsdxdy_nu_CC_iso.fits"), - os.path.join(xsfiledir, "sigma_nu_CC_iso.fits"), - [primary_type], - [target_type], "m" +# Primary interactions and distributions +primary_processes, _ = utilities.load_processes( + "CSMSDISSplines", # model_name + primary_types=[primary_type], + target_types=[target_type], + isoscalar=True, # for isoscalar splines + process_types=["CC"] # specify the process type, e.g., "CC" for charged current ) -primary_xs = siren.interactions.InteractionCollection(primary_type, [DIS_xs]) -controller.SetInteractions(primary_xs) - -# Primary distributions -primary_injection_distributions = {} -primary_physical_distributions = {} - -# energy distribution +# Energy distribution edist = siren.distributions.PowerLaw(1, 1e3, 1e6) -primary_injection_distributions["energy"] = edist -primary_physical_distributions["energy"] = edist -# direction distribution +# Direction distribution direction_distribution = siren.distributions.IsotropicDirection() -primary_injection_distributions["direction"] = direction_distribution -primary_physical_distributions["direction"] = direction_distribution -# position distribution +# Position distribution muon_range_func = siren.distributions.LeptonDepthFunction() position_distribution = siren.distributions.ColumnDepthPositionDistribution( - 60, 60.0, muon_range_func, set(controller.GetDetectorModelTargets()[0]) -) -primary_injection_distributions["position"] = position_distribution - -# SetProcesses -controller.SetProcesses( - primary_type, primary_injection_distributions, primary_physical_distributions -) - -controller.Initialize() - -events = controller.GenerateEvents() - + 60, 60.0, muon_range_func) + + +# Define injection distributions +primary_injection_distributions = { + "energy": edist, + "direction": direction_distribution, + "position": position_distribution +} + +# Set up the Injector +injector = siren.injection.Injector() +injector.number_of_events = events_to_inject +injector.detector_model = detector_model +injector.primary_type = primary_type +injector.primary_interactions = primary_processes[primary_type] +injector.primary_injection_distributions = [ + siren.distributions.PrimaryMass(0), + edist, + direction_distribution, + position_distribution +] + +# Generate events +event = injector.generate_event() + +# Set up the Weighter for event weighting +weighter = siren.injection.Weighter() +weighter.injectors = [injector] +weighter.detector_model = detector_model +weighter.primary_type = primary_type +weighter.primary_interactions = primary_processes[primary_type] +weighter.primary_physical_distributions = [ + edist, + direction_distribution, + position_distribution +] + +# Compute weight +weight = weighter(event) + +# Output events and weights os.makedirs("output", exist_ok=True) +print(str(event)) +print(f"Event weight: {weight}") + +# Save events +# TODO -controller.SaveEvents("output/DUNE_DIS") \ No newline at end of file From a5cb6dfb8128d4c9db3985a38c449be34c6f6981 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 18 Sep 2024 18:46:04 -0600 Subject: [PATCH 76/85] Remove position dist from physical dists --- resources/examples/example1/DIS_DUNE.py | 1 - 1 file changed, 1 deletion(-) diff --git a/resources/examples/example1/DIS_DUNE.py b/resources/examples/example1/DIS_DUNE.py index de72b9be7..4f094479b 100644 --- a/resources/examples/example1/DIS_DUNE.py +++ b/resources/examples/example1/DIS_DUNE.py @@ -64,7 +64,6 @@ weighter.primary_physical_distributions = [ edist, direction_distribution, - position_distribution ] # Compute weight From 35c05c54fd3a25ebec452ca0d0e138eaa21dd9f3 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 18 Sep 2024 18:52:52 -0600 Subject: [PATCH 77/85] DIS_IceCube --- resources/examples/example1/DIS_IceCube.py | 106 ++++++++++----------- 1 file changed, 52 insertions(+), 54 deletions(-) diff --git a/resources/examples/example1/DIS_IceCube.py b/resources/examples/example1/DIS_IceCube.py index db9913512..f4c2bb2d3 100644 --- a/resources/examples/example1/DIS_IceCube.py +++ b/resources/examples/example1/DIS_IceCube.py @@ -1,71 +1,69 @@ import os - import siren -from siren.SIREN_Controller import SIREN_Controller +from siren import utilities # Number of events to inject events_to_inject = int(1e5) -# Expeirment to run +# Experiment to run experiment = "IceCube" - -# Define the controller -controller = SIREN_Controller(events_to_inject, experiment) +detector_model = utilities.load_detector(experiment) # Particle to inject primary_type = siren.dataclasses.Particle.ParticleType.NuMu +# Cross-section model to use cross_section_model = "CSMSDISSplines" -xsfiledir = siren.utilities.get_cross_section_model_path(cross_section_model) - -# Cross Section Model -target_type = siren.dataclasses.Particle.ParticleType.Nucleon - -DIS_xs = siren.interactions.DISFromSpline( - os.path.join(xsfiledir, "dsdxdy_nu_CC_iso.fits"), - os.path.join(xsfiledir, "sigma_nu_CC_iso.fits"), - [primary_type], - [target_type], "m" -) - -primary_xs = siren.interactions.InteractionCollection(primary_type, [DIS_xs]) -controller.SetInteractions(primary_xs) - -# Primary distributions -primary_injection_distributions = {} -primary_physical_distributions = {} - -mass_dist = siren.distributions.PrimaryMass(0) -primary_injection_distributions["mass"] = mass_dist -primary_physical_distributions["mass"] = mass_dist - -# energy distribution -edist = siren.distributions.PowerLaw(2, 1e3, 1e6) -primary_injection_distributions["energy"] = edist -primary_physical_distributions["energy"] = edist - -# direction distribution -direction_distribution = siren.distributions.IsotropicDirection() -primary_injection_distributions["direction"] = direction_distribution -primary_physical_distributions["direction"] = direction_distribution - -# position distribution -muon_range_func = siren.distributions.LeptonDepthFunction() -position_distribution = siren.distributions.ColumnDepthPositionDistribution( - 600, 600.0, muon_range_func, set(controller.GetDetectorModelTargets()[0]) -) -primary_injection_distributions["position"] = position_distribution - -# SetProcesses -controller.SetProcesses( - primary_type, primary_injection_distributions, primary_physical_distributions +# Load the cross-section model +primary_processes, _ = utilities.load_processes( + cross_section_model, + primary_types=[primary_type], + target_types=[siren.dataclasses.Particle.ParticleType.Nucleon], + isoscalar=True, + process_types=["CC"] ) -controller.Initialize() - -events = controller.GenerateEvents() - +# Extract the primary cross-sections for the primary type +primary_cross_sections = primary_processes[primary_type] + +# Set up the Injector +injector = siren.injection.Injector() +injector.number_of_events = events_to_inject +injector.detector_model = detector_model +injector.primary_type = primary_type +injector.primary_interactions = primary_cross_sections + +# Directly set the distributions +injector.primary_injection_distributions = [ + siren.distributions.PrimaryMass(0), # Mass distribution + siren.distributions.PowerLaw(2, 1e3, 1e6), # Energy distribution + siren.distributions.IsotropicDirection(), # Direction distribution + siren.distributions.ColumnDepthPositionDistribution(600, 600.0, siren.distributions.LeptonDepthFunction()) # Position distribution +] + +# Generate events +event = injector.generate_event() + +# Set up the Weighter for event weighting (without position distribution) +weighter = siren.injection.Weighter() +weighter.injectors = [injector] +weighter.detector_model = detector_model +weighter.primary_type = primary_type +weighter.primary_interactions = primary_cross_sections +weighter.primary_physical_distributions = [ + siren.distributions.PowerLaw(2, 1e3, 1e6), # Energy distribution + siren.distributions.IsotropicDirection() # Direction distribution +] + +# Compute weight +weight = weighter(event) + +# Output events and weights os.makedirs("output", exist_ok=True) +print(str(event)) +print(f"Event weight: {weight}") + +# Save events +# injector.SaveEvents("output/IceCube_DIS") -controller.SaveEvents("output/IceCube_DIS") From f48fdb3baf29851194ddcecb766b8c51b86a1358 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 18 Sep 2024 21:28:27 -0600 Subject: [PATCH 78/85] Fix method name --- projects/dataclasses/private/pybindings/dataclasses.cxx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/projects/dataclasses/private/pybindings/dataclasses.cxx b/projects/dataclasses/private/pybindings/dataclasses.cxx index ec1196e69..42b9f071d 100644 --- a/projects/dataclasses/private/pybindings/dataclasses.cxx +++ b/projects/dataclasses/private/pybindings/dataclasses.cxx @@ -145,10 +145,10 @@ PYBIND11_MODULE(dataclasses, m) { .def_property_readonly("secondary_particle_records", [](siren::dataclasses::CrossSectionDistributionRecord & cdr) -> std::vector & {return cdr.GetSecondaryParticleRecords();}, py::return_value_policy::reference_internal) - .def("get_econdary_particle_record", + .def("get_secondary_particle_record", [](siren::dataclasses::CrossSectionDistributionRecord & cdr, size_t i) -> siren::dataclasses::SecondaryParticleRecord & {return cdr.GetSecondaryParticleRecord(i);}, py::return_value_policy::reference_internal) - .def("get_econdary_particle_records", + .def("get_secondary_particle_records", [](siren::dataclasses::CrossSectionDistributionRecord & cdr) -> std::vector & {return cdr.GetSecondaryParticleRecords();}, py::return_value_policy::reference_internal) .def("finalize", &CrossSectionDistributionRecord::Finalize) From 8bf11d75f935de039ec1b32fae65a7c2988210fb Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 18 Sep 2024 21:47:25 -0600 Subject: [PATCH 79/85] Remove old function def. Load detector models correctly. --- python/_util.py | 42 ++---------------------------------------- 1 file changed, 2 insertions(+), 40 deletions(-) diff --git a/python/_util.py b/python/_util.py index 1724c2140..35572ea89 100644 --- a/python/_util.py +++ b/python/_util.py @@ -526,44 +526,6 @@ def _get_model_file_name(version, model_versions, model_files, model_name, suffi else: return f"{model_name}-v{version}{suffix}" -def _get_model_path(model_name, prefix=None, suffix=None, is_file=True, must_exist=True, specific_file=None): - model_regex = re.compile( - r"^\s*" + _MODEL_PATTERN + ("" if suffix is None else r"(?:" + suffix + r")?") + r"\s*$", - re.VERBOSE | re.IGNORECASE, - ) - suffix = "" if suffix is None else suffix - - resources_dir = resource_package_dir() - base_dir = _get_base_directory(resources_dir, prefix) - - d = model_regex.match(model_name) - if d is None: - raise ValueError(f"Invalid model name: {model_name}") - d = d.groupdict() - model_name, version = d["model_name"], d["version"] - - model_name, folder_exists, specific_file_path = _find_model_folder_and_file(base_dir, model_name, must_exist, specific_file) - - if specific_file_path and not version: - return os.path.dirname(specific_file_path) - - model_files = _get_model_files(base_dir, model_name, is_file, folder_exists, version) - model_versions = _extract_model_versions(model_files, model_regex, model_name) - - if len(model_versions) == 0 and must_exist: - if specific_file_path: - return os.path.dirname(specific_file_path) - raise ValueError(f"No model found for {model_name}\nSearched in {os.path.join(base_dir, model_name)}") - - model_file_name = _get_model_file_name(version, model_versions, model_files, model_name, suffix, must_exist) - - if version: - version_dir = os.path.join(base_dir, model_name, f"v{version}") - if os.path.isdir(version_dir): - return os.path.join(version_dir, model_file_name) - - return os.path.join(base_dir, model_name, model_file_name) - def _get_model_folder(base_dir, model_name, must_exist): model_names = [ @@ -741,10 +703,10 @@ def _detector_file_loader(model_name): def load_detector(model_name, *args, **kwargs): - resource = load_resource("flux", model_name, *args, **kwargs) + resource = load_resource("detector", model_name, *args, **kwargs) if resource is not None: return resource - return _detector_file_loader + return _detector_file_loader(model_name) def load_processes(model_name, *args, **kwargs): From 5ebb90149bd3f0970aeac1c17bd12b9815a173c1 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 18 Sep 2024 22:11:48 -0600 Subject: [PATCH 80/85] Fix sampling --- .../processes/DarkNewsTables/DarkNewsDecay.py | 155 +++++++++++++++++- 1 file changed, 154 insertions(+), 1 deletion(-) diff --git a/resources/processes/DarkNewsTables/DarkNewsDecay.py b/resources/processes/DarkNewsTables/DarkNewsDecay.py index 9c076b3d4..0b400c8cd 100644 --- a/resources/processes/DarkNewsTables/DarkNewsDecay.py +++ b/resources/processes/DarkNewsTables/DarkNewsDecay.py @@ -13,6 +13,158 @@ from siren import dataclasses from siren.dataclasses import Particle +# DarkNews methods +import DarkNews +from DarkNews.processes import FermionDileptonDecay, FermionSinglePhotonDecay +from DarkNews import processes as proc +from DarkNews import Cfourvec as Cfv +from DarkNews import phase_space + +def get_decay_momenta_from_vegas_samples(vsamples, MC_case, decay_case, PN_LAB): + """ + Construct the four momenta of all final state particles in the decay process from the + vegas weights. + + Args: + vsamples (np.ndarray): integration samples obtained from vegas + as hypercube coordinates. Always in the interval [0,1]. + + MC_case (DarkNews.process.dec_case): the decay class of DarkNews + + PN_LAB (np.ndarray): four-momentum of the upscattered N in the lab frame: [E, pX, pY, pZ] + + Returns: + dict: each key corresponds to a set of four momenta for a given particle involved, + so the values are 2D np.ndarrays with each row a different event and each column a different + four momentum component. Contains also the weights. + """ + + four_momenta = {} + + # N boost parameters + boost_scattered_N = { + "EP_LAB": PN_LAB.T[0], + "costP_LAB": Cfv.get_cosTheta(PN_LAB), + "phiP_LAB": np.arctan2(PN_LAB.T[2], PN_LAB.T[1]), + } + + ####################### + # DECAY PROCESSES + + if type(decay_case) == proc.FermionDileptonDecay: + + mh = decay_case.m_parent + mf = decay_case.m_daughter + mm = decay_case.mm + mp = decay_case.mm + + if decay_case.vector_on_shell or decay_case.scalar_on_shell: + + if decay_case.vector_on_shell and decay_case.scalar_off_shell: + m_mediator = decay_case.mzprime + elif decay_case.vector_off_shell and decay_case.scalar_on_shell: + m_mediator = decay_case.mhprime + else: + raise NotImplementedError("Both mediators on-shell is not yet implemented.") + + ######################## + ### HNL decay + N_decay_samples = {"unit_cost": np.array(vsamples[0])} + # Ni (k1) --> Nj (k2) Z' (k3) + masses_decay = { + "m1": mh, # Ni + "m2": mf, # Nj + "m3": m_mediator, # Z' + } + # Phnl, Phnl_daughter, Pz' + P1LAB_decay, P2LAB_decay, P3LAB_decay = phase_space.two_body_decay(N_decay_samples, boost=boost_scattered_N, **masses_decay, rng=MC_case.rng) + + # Z' boost parameters + boost_Z = { + "EP_LAB": P3LAB_decay.T[0], + "costP_LAB": Cfv.get_cosTheta(P3LAB_decay), + "phiP_LAB": np.arctan2(P3LAB_decay.T[2], P3LAB_decay.T[1]), + } + + ######################## + ### Z' decay + Z_decay_samples = {} # all uniform + # Z'(k1) --> ell- (k2) ell+ (k3) + masses_decay = { + "m1": m_mediator, # Ni + "m2": mp, # \ell+ + "m3": mm, # \ell- + } + # PZ', pe-, pe+ + P1LAB_decayZ, P2LAB_decayZ, P3LAB_decayZ = phase_space.two_body_decay(Z_decay_samples, boost=boost_Z, **masses_decay, rng=MC_case.rng) + + four_momenta["P_decay_N_parent"] = P1LAB_decay + four_momenta["P_decay_N_daughter"] = P2LAB_decay + four_momenta["P_decay_ell_minus"] = P2LAB_decayZ + four_momenta["P_decay_ell_plus"] = P3LAB_decayZ + + elif decay_case.vector_off_shell and decay_case.scalar_off_shell: + + ######################## + # HNL decay + N_decay_samples = { + "unit_t": vsamples[0], + "unit_u": vsamples[1], + "unit_c3": vsamples[2], + "unit_phi34": vsamples[3], + } + + # Ni (k1) --> ell-(k2) ell+(k3) Nj(k4) + masses_decay = { + "m1": mh, # Ni + "m2": mm, # ell- + "m3": mp, # ell+ + "m4": mf, + } # Nj + # Phnl, pe-, pe+, pnu + ( + P1LAB_decay, + P2LAB_decay, + P3LAB_decay, + P4LAB_decay, + ) = phase_space.three_body_decay(N_decay_samples, boost=boost_scattered_N, **masses_decay, rng=MC_case.rng) + + four_momenta["P_decay_N_parent"] = P1LAB_decay + four_momenta["P_decay_ell_minus"] = P2LAB_decay + four_momenta["P_decay_ell_plus"] = P3LAB_decay + four_momenta["P_decay_N_daughter"] = P4LAB_decay + + elif type(decay_case) == proc.FermionSinglePhotonDecay: + + mh = decay_case.m_parent + mf = decay_case.m_daughter + + ######################## + ### HNL decay + N_decay_samples = {"unit_cost": np.array(vsamples[0])} + # Ni (k1) --> Nj (k2) gamma (k3) + masses_decay = { + "m1": mh, # Ni + "m2": mf, # Nj + "m3": 0.0, # gamma + } + # Phnl, Phnl', Pgamma + P1LAB_decay, P2LAB_decay, P3LAB_decay = phase_space.two_body_decay(N_decay_samples, boost=boost_scattered_N, **masses_decay, rng=MC_case.rng) + + four_momenta["P_decay_N_parent"] = P1LAB_decay + four_momenta["P_decay_N_daughter"] = P2LAB_decay + four_momenta["P_decay_photon"] = P3LAB_decay + + return four_momenta + + +class _FakeMCInterface: + def __init__(self, random): + self.random = random + self.rng_func = np.frompyfunc(lambda x: self.random.Uniform(0, 1), 1, 1) + self.rng = lambda x: self.rng_func(np.empty(x)).astype(float) + + # A class representing a single decay_case DarkNews class # Only handles methods concerning the decay part class PyDarkNewsDecay(DarkNewsDecay): @@ -267,11 +419,12 @@ def SampleRecordFromDarkNews(self, record, random): # Expand dims required to call DarkNews function on signle sample four_momenta = get_decay_momenta_from_vegas_samples( np.expand_dims(PS, 0), + _FakeMCInterface(random), self.dec_case, np.expand_dims(np.array(record.primary_momentum), 0), ) - secondaries = record.GetSecondaryParticleRecords() + secondaries = record.get_secondary_particle_records() if isinstance(self.dec_case, FermionSinglePhotonDecay): gamma_idx = 0 From 1f0cd47832e96f31ce9980fa96f73052980e83ae Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Wed, 18 Sep 2024 22:12:11 -0600 Subject: [PATCH 81/85] DipolePortal_CCM --- .../examples/example2/DipolePortal_CCM.py | 111 ++++++++++-------- 1 file changed, 61 insertions(+), 50 deletions(-) diff --git a/resources/examples/example2/DipolePortal_CCM.py b/resources/examples/example2/DipolePortal_CCM.py index 76c6d8d9d..6bd5fa653 100644 --- a/resources/examples/example2/DipolePortal_CCM.py +++ b/resources/examples/example2/DipolePortal_CCM.py @@ -1,13 +1,12 @@ import os import numpy as np - import siren -from siren.SIREN_Controller import SIREN_Controller +from siren import utilities # Define a DarkNews model model_kwargs = { "m4": 0.0235, - "mu_tr_mu4": 6e-7, # GeV^-1 + "mu_tr_mu4": 6e-7, # GeV^-1 "UD4": 0, "Umu4": 0, "epsilon": 0.0, @@ -18,82 +17,94 @@ } # Number of events to inject -events_to_inject = 100000 +events_to_inject = 1 -# Expeirment to run +# Experiment to run experiment = "CCM" - -# Define the controller -controller = SIREN_Controller(events_to_inject, experiment) +detector_model = utilities.load_detector(experiment) # Particle to inject primary_type = siren.dataclasses.Particle.ParticleType.NuMu -xs_path = siren.utilities.get_cross_section_model_path(f"DarkNewsTables-v{siren.utilities.darknews_version()}", must_exist=False) -# Define DarkNews Model -table_dir = os.path.join( - xs_path, - "Dipole_M%2.2e_mu%2.2e" % (model_kwargs["m4"], model_kwargs["mu_tr_mu4"]), +# Load DarkNews processes +primary_processes, secondary_processes = utilities.load_processes( + f"DarkNewsTables-v{siren.utilities.darknews_version()}", + primary_type=primary_type, + detector_model = detector_model, + **model_kwargs, ) -controller.InputDarkNewsModel(primary_type, table_dir, **model_kwargs) -# Primary distributions -primary_injection_distributions = {} -primary_physical_distributions = {} +# Mass distribution +mass_ddist = siren.distributions.PrimaryMass(0) -# energy distribution +# Primary distributions nu_energy = 0.02965 # from pi+ DAR edist = siren.distributions.Monoenergetic(nu_energy) -primary_injection_distributions["energy"] = edist -primary_physical_distributions["energy"] = edist -# fill cross section tables at this energy -controller.DN_processes.FillCrossSectionTablesAtEnergy(nu_energy) - -# Flux normalization: -# using the number quoted in 2105.14020, 4.74e9 nu/m^2/s / (6.2e14 POT/s) * 4*pi*20m^2 to get nu/POT flux_units = siren.distributions.NormalizationConstant(3.76e-2) -primary_physical_distributions["flux_units"] = flux_units -# direction distribution: cone from lower W target +# Cone direction distribution opening_angle = np.arctan(5 / 23.0) -# slightly larger than CCM lower_target_origin = siren.math.Vector3D(0, 0, -0.241) detector_origin = siren.math.Vector3D(23, 0, -0.65) lower_dir = detector_origin - lower_target_origin lower_dir.normalize() lower_inj_ddist = siren.distributions.Cone(lower_dir, opening_angle) -phys_ddist = ( - siren.distributions.IsotropicDirection() -) # truly we are isotropic -primary_injection_distributions["direction"] = lower_inj_ddist -primary_physical_distributions["direction"] = phys_ddist +phys_ddist = siren.distributions.IsotropicDirection() -# Position distribution: consider neutrinos from a point source +# Position distribution max_dist = 25 lower_pos_dist = siren.distributions.PointSourcePositionDistribution( - lower_target_origin - detector_origin, max_dist, set(controller.GetDetectorModelTargets()[0]) + lower_target_origin - detector_origin, max_dist, ) -primary_injection_distributions["position"] = lower_pos_dist - -# SetProcesses -controller.SetProcesses( - primary_type, primary_injection_distributions, primary_physical_distributions -) - -controller.Initialize() +primary_injection_distributions = [ + mass_ddist, # Mass distribution + edist, # Energy distribution + lower_inj_ddist, # Direction distribution + lower_pos_dist # Position distribution +] + +primary_physical_distributions = [ + edist, # Energy distribution + phys_ddist, # Direction distribution +] + +fiducial_volume = siren.utilities.get_fiducial_volume(experiment) +secondary_injection_distributions = {} +for secondary_type in secondary_processes.keys(): + secondary_injection_distributions[secondary_type] = [ + siren.distributions.SecondaryBoundedVertexDistribution(fiducial_volume, max_dist) + ] + +# Define stopping condition for the injector def stop(datum, i): secondary_type = datum.record.signature.secondary_types[i] return secondary_type != siren.dataclasses.Particle.ParticleType.N4 -controller.injector.SetStoppingCondition(stop) +injector = siren.injection.Injector() +injector.number_of_events = 1 +injector.detector_model = detector_model +injector.primary_type = primary_type +injector.primary_interactions = primary_processes[primary_type] +injector.primary_injection_distributions = primary_injection_distributions +injector.secondary_interactions = secondary_processes +injector.secondary_injection_distributions = secondary_injection_distributions +injector.stopping_condition = stop + -events = controller.GenerateEvents(fill_tables_at_exit=False) +# Generate events +events = [injector.generate_event() for _ in range(events_to_inject)] +# Output the events os.makedirs("output", exist_ok=True) -controller.SaveEvents( - "output/CCM_Dipole_M%2.2e_mu%2.2e_example" - % (model_kwargs["m4"], model_kwargs["mu_tr_mu4"]), - fill_tables_at_exit=False -) +weighter = siren.injection.Weighter() +weighter.injectors = [injector] +weighter.detector_model = detector_model +weighter.primary_type = primary_type +weighter.primary_interactions = primary_processes[primary_type] +weighter.secondary_interactions = secondary_processes +weighter.primary_physical_distributions = primary_physical_distributions +weighter.secondary_physical_distributions = {} + +weights = [weighter(event) for event in events] From 7a34462b105cd07d919dc15c92766f40544eadf5 Mon Sep 17 00:00:00 2001 From: Marisol Chavez Estrada Date: Fri, 11 Oct 2024 18:30:32 -0400 Subject: [PATCH 82/85] Add ParticleIDs in InteractionRecord to pybindings --- projects/dataclasses/private/pybindings/dataclasses.cxx | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/projects/dataclasses/private/pybindings/dataclasses.cxx b/projects/dataclasses/private/pybindings/dataclasses.cxx index 42b9f071d..8687aead6 100644 --- a/projects/dataclasses/private/pybindings/dataclasses.cxx +++ b/projects/dataclasses/private/pybindings/dataclasses.cxx @@ -160,12 +160,16 @@ PYBIND11_MODULE(dataclasses, m) { .def("__str__", [](InteractionRecord const & r) { return to_str(r); }) .def("__repr__", [](InteractionRecord const & r) { return to_repr(r); }) .def_readwrite("signature",&InteractionRecord::signature) + .def_readwrite("primary_id",&InteractionRecord::primary_id) + .def_readwrite("primary_initial_position",&InteractionRecord::primary_initial_position) .def_readwrite("primary_mass",&InteractionRecord::primary_mass) .def_readwrite("primary_momentum",&InteractionRecord::primary_momentum) .def_readwrite("primary_helicity",&InteractionRecord::primary_helicity) + .def_readwrite("target_id",&InteractionRecord::target_id) .def_readwrite("target_mass",&InteractionRecord::target_mass) .def_readwrite("target_helicity",&InteractionRecord::target_helicity) .def_readwrite("interaction_vertex",&InteractionRecord::interaction_vertex) + .def_readwrite("secondary_ids",&InteractionRecord::secondary_ids) .def_readwrite("secondary_masses",&InteractionRecord::secondary_masses) .def_readwrite("secondary_momenta",&InteractionRecord::secondary_momenta) .def_readwrite("secondary_helicities",&InteractionRecord::secondary_helicities) From 00164a1568ee0105b500b592c8d5e089ec215e51 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 11 Oct 2024 21:03:13 -0600 Subject: [PATCH 83/85] Updates to cmake --- CMakeLists.txt | 94 ++++++++++++++++++++------- projects/dataclasses/CMakeLists.txt | 5 +- projects/detector/CMakeLists.txt | 2 + projects/distributions/CMakeLists.txt | 2 + projects/geometry/CMakeLists.txt | 2 + projects/injection/CMakeLists.txt | 2 + projects/interactions/CMakeLists.txt | 2 + projects/math/CMakeLists.txt | 4 ++ projects/serialization/CMakeLists.txt | 5 ++ projects/utilities/CMakeLists.txt | 5 ++ 10 files changed, 99 insertions(+), 24 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 8defbc1b4..6dcf03d54 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,5 +1,5 @@ -cmake_minimum_required(VERSION 3.3.2 FATAL_ERROR) -cmake_policy(VERSION 3.3.2) +cmake_minimum_required(VERSION 3.20 FATAL_ERROR) +cmake_policy(VERSION 3.20) if(${CMAKE_HOST_SYSTEM_NAME} MATCHES "Darwin") set(MACOSX TRUE) @@ -11,13 +11,14 @@ SET(CMAKE_MODULE_PATH ${CMAKE_CURRENT_LIST_DIR}/cmake ${CMAKE_CURRENT_LIST_DIR}/ message(STATUS "CMAKE_PREFIX_PATH: ${CMAKE_PREFIX_PATH}") message(STATUS "CMAKE_INSTALL_PREFIX: ${CMAKE_INSTALL_PREFIX}") -set(CIBUILDWHEEL $ENV{CIBUILDWHEEL}) +set(CIBUILDWHEEL "$ENV{CIBUILDWHEEL}") message(STATUS "CIBUILDWHEEL: ${CIBUILDWHEEL}") -if(${CIBUILDWHEEL}) - set(CI_INSTALL_PREFIX $ENV{CI_INSTALL_PREFIX}) +if(CIBUILDWHEEL STREQUAL "1") + set(CI_INSTALL_PREFIX "$ENV{CI_INSTALL_PREFIX}") message(STATUS "CI_INSTALL_PREFIX: ${CI_INSTALL_PREFIX}") endif() + # parse pyproject.toml for the version include(pyproject) @@ -31,21 +32,31 @@ SET(CMAKE_CXX_STANDARD 14) SET(CMAKE_C_STANDARD 99) # set the build type and appropriate flags -option(CMAKE_BUILD_TYPE "" "Release") -set(_FLAGS "-O2 -Wall -fPIC") -set(_FLAGS_DEBUG "-g -O0 -Wall -fPIC") -set(_FLAGS_RELEASE "-O2 -Wall -fPIC -s") +if(NOT CMAKE_BUILD_TYPE) + set(CMAKE_BUILD_TYPE "Release") +endif() + +# Create an interface library for SIREN compile options +add_library(siren_compile_options INTERFACE) + +# Specify the compile options +target_compile_options(siren_compile_options INTERFACE + -O2 + -Wall + -fPIC + $<$:-g> + $<$:-O0> + $<$:-O2> + $<$:-s> +) + +# Conditionally add -stdlib=libc++ for Clang if (CMAKE_CXX_COMPILER_ID STREQUAL "Clang") - set(_FLAGS "${_FLAGS} -stdlib=libc++") - set(_FLAGS_DEBUG "${_FLAGS_DEBUG} -stdlib=libc++") - set(_FLAGS_RELEASE "${_FLAGS_RELEASE} -stdlib=libc++") + target_compile_options(siren_compile_options INTERFACE -stdlib=libc++) endif() -set(CMAKE_CXX_FLAGS ${_FLAGS}) -set(CMAKE_CXX_FLAGS_DEBUG ${_FLAGS_DEBUG}) -set(CMAKE_CXX_FLAGS_RELEASE ${_FLAGS_RELEASE}) # override install locations when building python extensions -if(DEFINED SKBUILD) +if(DEFINED SKBUILD_PLATLIB_DIR) cmake_path(RELATIVE_PATH SKBUILD_HEADERS_DIR BASE_DIRECTORY ${SKBUILD_PLATLIB_DIR} OUTPUT_VARIABLE CMAKE_INSTALL_INCLUDEDIR) cmake_path(RELATIVE_PATH SKBUILD_PLATLIB_DIR BASE_DIRECTORY ${SKBUILD_PLATLIB_DIR} OUTPUT_VARIABLE CMAKE_INSTALL_LIBDIR) message(STATUS "Setting include dir to: ${CMAKE_INSTALL_INCLUDEDIR}") @@ -63,12 +74,42 @@ include(pybind11) # load project dependencies include(rk) +if(TARGET rk_static) + target_link_libraries(rk_static INTERFACE siren_compile_options) +endif() +if(TARGET rk_shared) + target_link_libraries(rk_shared INTERFACE siren_compile_options) +endif() include(cereal) +if(TARGET cereal) + target_link_libraries(cereal INTERFACE siren_compile_options) +endif() include(delabella) +if(TARGET delabella_static) + target_link_libraries(delabella_static INTERFACE siren_compile_options) +endif() +if(TARGET delabella_shared) + target_link_libraries(delabella_shared INTERFACE siren_compile_options) +endif() include(CFITSIO) include(photospline) +if(TARGET photospline) + target_link_libraries(photospline INTERFACE siren_compile_options) +endif() include(googletest) +if(TARGET gtest) + target_link_libraries(gtest INTERFACE siren_compile_options) +endif() +if(TARGET gtest_main) + target_link_libraries(gtest_main INTERFACE siren_compile_options) +endif() +if(TARGET gmock) + target_link_libraries(gmock INTERFACE siren_compile_options) +endif() include(NamedType) +if(TARGET NamedType) + target_link_libraries(NamedType INTERFACE siren_compile_options) +endif() # load macros for googletest include(testing) @@ -87,8 +128,9 @@ add_subdirectory(projects/injection) # define the target library add_library(SIREN SHARED) set_property(TARGET SIREN PROPERTY POSITION_INDEPENDENT_CODE ON) +target_link_libraries(SIREN INTERFACE siren_compile_options) -if(${MACOSX}) +if(DEFINED MACOSX AND MACOSX) if(CMAKE_VERSION VERSION_LESS 3.13) target_link_libraries(SIREN PUBLIC "$<$:LINKER:-undefined,dynamic_lookup>") else() @@ -127,12 +169,15 @@ target_link_libraries(SIREN ) endif() +# Export siren_compile_options +install(TARGETS siren_compile_options EXPORT ${PROJECT_NAME}Config) + # define the install path normally or for python package -if(DEFINED SKBUILD) +if(DEFINED SKBUILD_PLATLIB_DIR) set_target_properties(SIREN PROPERTIES BUILD_WITH_INSTALL_RPATH FALSE LINK_FLAGS "-Wl,-rpath,\\\$ORIGIN") - if(${CIBUILDWHEEL}) + if(DEFINED CIBUILDWHEEL AND CIBUILDWHEEL) message(STATUS "Setting SIREN install lib dir to: ${CI_INSTALL_PREFIX}/lib") message(STATUS "Setting SIREN install include dir to: ${CI_INSTALL_PREFIX}/include") install(TARGETS SIREN @@ -183,7 +228,7 @@ else() endif() # optionally package runtime dependencies -if((DEFINED SKBUILD) AND (PACKAGE_SHARED_DEPS)) +if((DEFINED SKBUILD_PLATLIB_DIR) AND (PACKAGE_SHARED_DEPS)) install(CODE "set(SIREN_LIB_FILE \"${PROJECT_BINARY_DIR}/${CMAKE_SHARED_MODULE_PREFIX}SIREN${CMAKE_SHARED_MODULE_SUFFIX}\")") install(CODE "set(PYTHON_DEP_LIB_DESTINATION \"${SKBUILD_PLATLIB_DIR}/siren.libs/\")") install(CODE [[ @@ -209,7 +254,7 @@ if((DEFINED SKBUILD) AND (PACKAGE_SHARED_DEPS)) endif() # install the python extensions -if(DEFINED SKBUILD) +if(DEFINED SKBUILD_PLATLIB_DIR) install(TARGETS utilities LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}/siren) install(TARGETS math @@ -241,7 +286,8 @@ write_basic_package_version_file( VERSION ${PROJECT_VERSION} COMPATIBILITY AnyNewerVersion ) -export(EXPORT ${PROJECT_NAME}Config FILE ${PROJECT_NAME}Config.cmake) +export(EXPORT ${PROJECT_NAME}Config FILE "${PROJECT_NAME}Config.cmake" + NAMESPACE ${PROJECT_NAME}::) # Make importable from install location set(_config_dir share/${PROJECT_NAME}/cmake) @@ -249,7 +295,9 @@ install(FILES "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake" DESTINATION ${_config_dir} ) install(EXPORT ${PROJECT_NAME}Config - DESTINATION ${_config_dir} + FILE "${PROJECT_NAME}Config.cmake" + NAMESPACE ${PROJECT_NAME}:: + DESTINATION "${_config_dir}" ) MESSAGE("") diff --git a/projects/dataclasses/CMakeLists.txt b/projects/dataclasses/CMakeLists.txt index 2a7cda1f9..9a5448565 100644 --- a/projects/dataclasses/CMakeLists.txt +++ b/projects/dataclasses/CMakeLists.txt @@ -16,7 +16,10 @@ target_include_directories(SIREN_dataclasses PUBLIC $ ) -target_link_libraries(SIREN_dataclasses PUBLIC +target_link_libraries(SIREN_dataclasses + INTERFACE + siren_compile_options + PUBLIC photospline SIREN_serialization SIREN_utilities diff --git a/projects/detector/CMakeLists.txt b/projects/detector/CMakeLists.txt index 5942a65e4..4a132bd7c 100644 --- a/projects/detector/CMakeLists.txt +++ b/projects/detector/CMakeLists.txt @@ -25,6 +25,8 @@ target_include_directories(SIREN_detector PUBLIC ) target_link_libraries(SIREN_detector + INTERFACE + siren_compile_options PRIVATE $ PUBLIC diff --git a/projects/distributions/CMakeLists.txt b/projects/distributions/CMakeLists.txt index 71faec669..897821a6f 100644 --- a/projects/distributions/CMakeLists.txt +++ b/projects/distributions/CMakeLists.txt @@ -42,6 +42,8 @@ target_include_directories(SIREN_distributions PUBLIC ) target_link_libraries(SIREN_distributions + INTERFACE + siren_compile_options PRIVATE $ pybind11::embed diff --git a/projects/geometry/CMakeLists.txt b/projects/geometry/CMakeLists.txt index fc37d3e00..62b4c0dff 100644 --- a/projects/geometry/CMakeLists.txt +++ b/projects/geometry/CMakeLists.txt @@ -18,6 +18,8 @@ target_include_directories(SIREN_geometry PUBLIC ) target_link_libraries(SIREN_geometry + INTERFACE + siren_compile_options PRIVATE $ PUBLIC diff --git a/projects/injection/CMakeLists.txt b/projects/injection/CMakeLists.txt index e0956aee2..d8e91a2b7 100644 --- a/projects/injection/CMakeLists.txt +++ b/projects/injection/CMakeLists.txt @@ -14,6 +14,8 @@ target_include_directories(SIREN_injection PUBLIC ) target_link_libraries(SIREN_injection + INTERFACE + siren_compile_options PRIVATE $ pybind11::embed diff --git a/projects/interactions/CMakeLists.txt b/projects/interactions/CMakeLists.txt index c6ee384d2..4c0b6505b 100644 --- a/projects/interactions/CMakeLists.txt +++ b/projects/interactions/CMakeLists.txt @@ -26,6 +26,8 @@ target_include_directories(SIREN_interactions PUBLIC ) target_link_libraries(SIREN_interactions + INTERFACE + siren_compile_options PRIVATE $ pybind11::embed diff --git a/projects/math/CMakeLists.txt b/projects/math/CMakeLists.txt index db2aa36c4..1bde95e3a 100644 --- a/projects/math/CMakeLists.txt +++ b/projects/math/CMakeLists.txt @@ -17,6 +17,8 @@ target_include_directories(SIREN_math PUBLIC if(${MACOSX}) target_link_libraries(SIREN_math + INTERFACE + siren_compile_options PUBLIC photospline delabella_shared @@ -26,6 +28,8 @@ target_link_libraries(SIREN_math ) else() target_link_libraries(SIREN_math + INTERFACE + siren_compile_options PUBLIC photospline delabella_shared diff --git a/projects/serialization/CMakeLists.txt b/projects/serialization/CMakeLists.txt index f5e385c9f..c7983051b 100644 --- a/projects/serialization/CMakeLists.txt +++ b/projects/serialization/CMakeLists.txt @@ -4,6 +4,11 @@ target_include_directories(SIREN_serialization INTERFACE $ ) +target_link_libraries(SIREN_serialization + INTERFACE + siren_compile_options +) + install(DIRECTORY "${PROJECT_SOURCE_DIR}/projects/serialization/public/" EXPORT ${PROJECT_NAME}Config DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} diff --git a/projects/utilities/CMakeLists.txt b/projects/utilities/CMakeLists.txt index 5ad24dfaa..a8c9c11af 100644 --- a/projects/utilities/CMakeLists.txt +++ b/projects/utilities/CMakeLists.txt @@ -12,6 +12,11 @@ target_include_directories(SIREN_utilities PUBLIC $ ) +target_link_libraries(SIREN_utilities + INTERFACE + siren_compile_options +) + install(DIRECTORY "${PROJECT_SOURCE_DIR}/projects/utilities/public/" DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} FILES_MATCHING From 6e56833ce3194ac8cf5d1253da8cbb39d9fb77a0 Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 11 Oct 2024 21:18:08 -0600 Subject: [PATCH 84/85] FPIC for rk_static --- vendor/rk/CMakeLists.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/vendor/rk/CMakeLists.txt b/vendor/rk/CMakeLists.txt index cc15f75cd..f28dd9353 100644 --- a/vendor/rk/CMakeLists.txt +++ b/vendor/rk/CMakeLists.txt @@ -30,6 +30,7 @@ LIST(APPEND rk_HEADERS ) add_library(rk_static STATIC ${rk_SOURCES}) +set_property(TARGET rk_static PROPERTY POSITION_INDEPENDENT_CODE ON) add_library(rk_shared SHARED ${rk_SOURCES}) set_target_properties(rk_static PROPERTIES EXPORT_NAME rk) set_target_properties(rk_shared PROPERTIES EXPORT_NAME rk) From 404562cf8a81d99dae7b18a6a165f9ca45cdfadc Mon Sep 17 00:00:00 2001 From: Austin Schneider Date: Fri, 11 Oct 2024 21:24:49 -0600 Subject: [PATCH 85/85] Update pyproject.toml --- pyproject.toml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 567f40948..41912cfce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,6 @@ build-backend = "scikit_build_core.build" [tool.scikit-build] wheel.packages = [] cmake.build-type = "Release" -#wheel.install-dir = "siren" [tool.scikit-build.cmake.define] CMAKE_PREFIX_PATH="/tmp/downloads/local" @@ -44,7 +43,7 @@ description = "Sampling and Injection for Rare EveNts: A neutrino and rare-proce readme = "README.md" requires-python = ">=3.8" license = {file = "LICENSE"} -keywords = ["physics", "hep", "netrino", "bsm", "simulation", "injection", "weighting"] +keywords = ["physics", "hep", "neutrino", "bsm", "simulation", "injection", "weighting"] authors = [ {name = "Austin Schneider", email = "aschn@mit.edu"}, {name = "Nicholas Kamp", email = "nkamp@fas.harvard.edu"} @@ -85,7 +84,7 @@ DarkNews = ["DarkNews>=0.4.2"] Homepage = "https://github.com/Harvard-Neutrino/SIREN" Documentation = "https://readthedocs.org" Repository = "https://github.com/Harvard-Neutrino/SIREN.git" -Issues = "https://github.com/Harvard-Neutrino/LeptonInjector/issues" +Issues = "https://github.com/Harvard-Neutrino/SIREN/issues" [wheel] no-clean = true