From 273bafee7361e557e035bc9a6f45cee862f21a8f Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Fri, 12 Jul 2024 18:13:52 +0100 Subject: [PATCH 01/35] Add a standard --version option to common pycbc options (#4811) * Start moving to a common --version argument * Fix version importing * Continue moving to a common --version argument * Continue moving to a common --version argument * Moving minifollowups to a common --version argument * Moving plotting to a common --version argument * Move workflows to use standard versioning * Moving pygrb to a common --version argument * Moving executables in /bin to a common --version argument * allow ---version to provide a level of information set by the user * Move help into the class definition * minor wording clarification * Fix# to the pygrb_initialize_plot_parser * fix missed imports * CC * fix missed imports * Fix some places where --version fails * Allow pycbc_pygrb_exclusion_dist_table to use standard logging and version * Add --version testing to the CI, add version modifer test * Re-add failing codes * Do the exclusion of certain codes properly * Remove unused variable, move test_results into its own function in pycbc_test_suite.sh * Remove some code information that is no longer needed * Tabs-->spaces, allow log file not to be set in test_results --- bin/all_sky_search/pycbc_add_statmap | 2 - bin/all_sky_search/pycbc_apply_rerank | 2 - bin/all_sky_search/pycbc_average_psd | 2 - bin/all_sky_search/pycbc_bin_templates | 2 - bin/all_sky_search/pycbc_bin_trigger_rates_dq | 2 - bin/all_sky_search/pycbc_calculate_psd | 2 - bin/all_sky_search/pycbc_coinc_findtrigs | 3 +- bin/all_sky_search/pycbc_coinc_hdfinjfind | 4 +- bin/all_sky_search/pycbc_coinc_mergetrigs | 7 +- bin/all_sky_search/pycbc_coinc_statmap | 4 +- bin/all_sky_search/pycbc_coinc_statmap_inj | 3 - .../pycbc_combine_coincident_events | 2 - bin/all_sky_search/pycbc_combine_statmap | 2 - .../pycbc_cut_merge_triggers_to_tmpltbank | 3 - .../pycbc_distribute_background_bins | 2 - bin/all_sky_search/pycbc_exclude_zerolag | 3 - bin/all_sky_search/pycbc_fit_sngls_binned | 3 +- .../pycbc_fit_sngls_by_template | 3 +- .../pycbc_fit_sngls_over_multiparam | 5 +- bin/all_sky_search/pycbc_fit_sngls_over_param | 3 +- bin/all_sky_search/pycbc_followup_file | 2 - bin/all_sky_search/pycbc_foreground_censor | 3 +- bin/all_sky_search/pycbc_make_bayestar_skymap | 4 +- bin/all_sky_search/pycbc_merge_psds | 2 - bin/all_sky_search/pycbc_reduce_template_bank | 3 - bin/all_sky_search/pycbc_rerank_passthrough | 2 - bin/all_sky_search/pycbc_sngls_findtrigs | 5 +- bin/all_sky_search/pycbc_sngls_pastro | 3 - bin/all_sky_search/pycbc_sngls_statmap | 4 +- bin/all_sky_search/pycbc_sngls_statmap_inj | 4 +- bin/all_sky_search/pycbc_strip_injections | 3 +- bin/bank/pycbc_aligned_bank_cat | 2 - bin/bank/pycbc_aligned_stoch_bank | 6 - bin/bank/pycbc_bank_verification | 1 - bin/bank/pycbc_coinc_bank2hdf | 2 - bin/bank/pycbc_geom_aligned_2dstack | 1 - bin/bank/pycbc_geom_aligned_bank | 1 - bin/bank/pycbc_geom_nonspinbank | 1 - bin/bank/pycbc_tmpltbank_to_chi_params | 1 - bin/inference/pycbc_inference | 3 - .../pycbc_inference_plot_acceptance_rate | 4 - bin/inference/pycbc_inference_plot_acf | 3 - bin/inference/pycbc_inference_plot_acl | 3 - .../pycbc_inference_plot_dynesty_run | 3 - .../pycbc_inference_plot_dynesty_traceplot | 3 - .../pycbc_inference_plot_gelman_rubin | 4 +- bin/inference/pycbc_inference_plot_geweke | 4 - .../pycbc_inference_plot_inj_recovery | 4 - bin/inference/pycbc_inference_plot_movie | 3 - bin/inference/pycbc_inference_plot_posterior | 4 - bin/inference/pycbc_inference_plot_pp | 4 - bin/inference/pycbc_inference_plot_prior | 3 - bin/inference/pycbc_inference_plot_samples | 3 - ...bc_live_supervise_single_significance_fits | 8 +- .../pycbc_foreground_minifollowup | 2 - .../pycbc_injection_minifollowup | 2 - bin/minifollowups/pycbc_page_coincinfo | 3 - bin/minifollowups/pycbc_page_injinfo | 3 - bin/minifollowups/pycbc_page_snglinfo | 4 +- .../pycbc_plot_trigger_timeseries | 3 - bin/minifollowups/pycbc_single_template_plot | 2 - bin/minifollowups/pycbc_sngl_minifollowup | 2 - .../pycbc_upload_prep_minifollowup | 2 - .../pycbc_banksim_plot_eff_fitting_factor | 1 - .../pycbc_banksim_plot_fitting_factors | 1 - bin/plotting/pycbc_banksim_table_point_injs | 1 - bin/plotting/pycbc_create_html_snippet | 1 - bin/plotting/pycbc_ifar_catalog | 3 - bin/plotting/pycbc_page_coinc_snrchi | 3 - bin/plotting/pycbc_page_dq_table | 2 - bin/plotting/pycbc_page_foreground | 3 - bin/plotting/pycbc_page_foundmissed | 4 +- bin/plotting/pycbc_page_ifar | 3 - bin/plotting/pycbc_page_injtable | 3 - bin/plotting/pycbc_page_recovery | 4 +- bin/plotting/pycbc_page_segments | 3 - bin/plotting/pycbc_page_segplot | 4 +- bin/plotting/pycbc_page_segtable | 3 - bin/plotting/pycbc_page_sensitivity | 3 - bin/plotting/pycbc_page_snrchi | 3 - bin/plotting/pycbc_page_snrifar | 3 - bin/plotting/pycbc_page_snrratehist | 3 - bin/plotting/pycbc_page_template_bin_table | 2 - bin/plotting/pycbc_page_versioning | 4 +- bin/plotting/pycbc_page_vetotable | 2 - bin/plotting/pycbc_plot_bank_bins | 3 - bin/plotting/pycbc_plot_bank_corner | 6 - bin/plotting/pycbc_plot_dq_flag_likelihood | 2 - bin/plotting/pycbc_plot_dq_likelihood_vs_time | 2 - bin/plotting/pycbc_plot_dq_percentiles | 2 - bin/plotting/pycbc_plot_gating | 4 +- bin/plotting/pycbc_plot_hist | 2 - bin/plotting/pycbc_plot_multiifo_dtphase | 4 +- bin/plotting/pycbc_plot_psd_file | 3 - bin/plotting/pycbc_plot_qscan | 3 - bin/plotting/pycbc_plot_range | 3 - bin/plotting/pycbc_plot_range_vs_mtot | 3 - bin/plotting/pycbc_plot_singles_timefreq | 3 - bin/plotting/pycbc_plot_singles_vs_params | 3 - bin/plotting/pycbc_plot_throughput | 3 - bin/plotting/pycbc_plot_trigrate | 2 - bin/plotting/pycbc_plot_waveform | 3 - bin/pycbc_banksim | 4 +- bin/pycbc_banksim_combine_banks | 2 - bin/pycbc_banksim_match_combine | 1 - bin/pycbc_banksim_skymax | 4 +- bin/pycbc_coinc_time | 4 +- bin/pycbc_condition_strain | 3 - bin/pycbc_convertinjfiletohdf | 2 - bin/pycbc_create_injections | 4 - bin/pycbc_data_store | 2 - bin/pycbc_faithsim | 3 - bin/pycbc_fit_sngl_trigs | 3 +- bin/pycbc_hdf5_splitbank | 4 +- bin/pycbc_hdf_splitinj | 4 +- bin/pycbc_inj_cut | 2 - bin/pycbc_inspiral | 2 - bin/pycbc_make_html_page | 3 - bin/pycbc_make_skymap | 1 - bin/pycbc_merge_inj_hdf | 2 - bin/pycbc_multi_inspiral | 2 - bin/pycbc_optimal_snr | 2 - bin/pycbc_optimize_snr | 4 +- bin/pycbc_single_template | 2 - bin/pycbc_source_probability_offline | 2 - bin/pycbc_split_inspinj | 5 +- bin/pycbc_splitbank | 3 - bin/pygrb/pycbc_grb_inj_finder | 10 +- bin/pygrb/pycbc_grb_trig_cluster | 8 - bin/pygrb/pycbc_grb_trig_combiner | 9 +- bin/pygrb/pycbc_make_offline_grb_workflow | 8 - bin/pygrb/pycbc_pygrb_efficiency | 3 +- bin/pygrb/pycbc_pygrb_exclusion_dist_table | 4 +- bin/pygrb/pycbc_pygrb_grb_info_table | 1 - bin/pygrb/pycbc_pygrb_minifollowups | 2 - bin/pygrb/pycbc_pygrb_page_tables | 3 +- bin/pygrb/pycbc_pygrb_plot_chisq_veto | 3 +- bin/pygrb/pycbc_pygrb_plot_coh_ifosnr | 4 +- bin/pygrb/pycbc_pygrb_plot_injs_results | 3 +- bin/pygrb/pycbc_pygrb_plot_null_stats | 3 +- bin/pygrb/pycbc_pygrb_plot_skygrid | 3 +- bin/pygrb/pycbc_pygrb_plot_snr_timeseries | 3 +- bin/pygrb/pycbc_pygrb_plot_stats_distribution | 3 +- bin/pygrb/pycbc_pygrb_pp_workflow | 1 - .../pycbc_make_bank_verifier_workflow | 1 - bin/workflows/pycbc_make_faithsim_workflow | 4 - .../pycbc_make_inference_inj_workflow | 5 - .../pycbc_make_inference_plots_workflow | 3 - bin/workflows/pycbc_make_inference_workflow | 4 - .../pycbc_make_offline_search_workflow | 5 - .../pycbc_make_psd_estimation_workflow | 3 - bin/workflows/pycbc_make_sbank_workflow | 8 - bin/workflows/pycbc_make_uberbank_workflow | 1 - pycbc/__init__.py | 25 ++- pycbc/_version.py | 96 +++++++--- pycbc/results/pygrb_postprocessing_utils.py | 3 +- tools/pycbc_test_suite.sh | 173 +++++------------- 157 files changed, 190 insertions(+), 580 deletions(-) diff --git a/bin/all_sky_search/pycbc_add_statmap b/bin/all_sky_search/pycbc_add_statmap index 066fd12d1d7..65b8cff843a 100755 --- a/bin/all_sky_search/pycbc_add_statmap +++ b/bin/all_sky_search/pycbc_add_statmap @@ -26,8 +26,6 @@ def get_ifo_string(fi): parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument('--statmap-files', nargs='+', help="List of coinc files to be combined") parser.add_argument('--background-files', nargs='+', default=None, diff --git a/bin/all_sky_search/pycbc_apply_rerank b/bin/all_sky_search/pycbc_apply_rerank index 68e1126c967..f90ed7f7b8e 100644 --- a/bin/all_sky_search/pycbc_apply_rerank +++ b/bin/all_sky_search/pycbc_apply_rerank @@ -10,8 +10,6 @@ from shutil import copyfile parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--stat-files', nargs='+', help="Statistic files produced by candidate followup codes") parser.add_argument('--followup-file', diff --git a/bin/all_sky_search/pycbc_average_psd b/bin/all_sky_search/pycbc_average_psd index b89aaff7876..0f34ae2d456 100644 --- a/bin/all_sky_search/pycbc_average_psd +++ b/bin/all_sky_search/pycbc_average_psd @@ -26,13 +26,11 @@ import argparse import numpy as np import pycbc from pycbc.io import HFile -from pycbc.version import git_verbose_msg as version from pycbc.types import MultiDetOptionAction, FrequencySeries parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=version) parser.add_argument('--input-files', nargs='+', required=True, metavar='PATH', help='HDF5 files from pycbc_calculate_psd (one per ' 'detector) containing the input PSDs to average.') diff --git a/bin/all_sky_search/pycbc_bin_templates b/bin/all_sky_search/pycbc_bin_templates index 4cc9558ec1a..807c234adbe 100755 --- a/bin/all_sky_search/pycbc_bin_templates +++ b/bin/all_sky_search/pycbc_bin_templates @@ -8,12 +8,10 @@ import numpy as np import pycbc import pycbc.pnutils -from pycbc.version import git_verbose_msg as version from pycbc.events import background_bin_from_string parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=version) parser.add_argument("--ifo", type=str, required=True) parser.add_argument("--f-lower", type=float, default=15., help='Enforce a uniform low frequency cutoff to ' diff --git a/bin/all_sky_search/pycbc_bin_trigger_rates_dq b/bin/all_sky_search/pycbc_bin_trigger_rates_dq index ef069d9a6b1..f1c6f0ed7fb 100644 --- a/bin/all_sky_search/pycbc_bin_trigger_rates_dq +++ b/bin/all_sky_search/pycbc_bin_trigger_rates_dq @@ -16,11 +16,9 @@ from pycbc.events.veto import (select_segments_by_definer, segments_to_start_end) from pycbc.types.optparse import MultiDetOptionAction from pycbc.io.hdf import SingleDetTriggers -from pycbc.version import git_verbose_msg as version parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=version) parser.add_argument("--template-bins-file", required=True) parser.add_argument("--trig-file", required=True) parser.add_argument("--flag-file", required=True) diff --git a/bin/all_sky_search/pycbc_calculate_psd b/bin/all_sky_search/pycbc_calculate_psd index 38ea79fddeb..9764250fc9f 100755 --- a/bin/all_sky_search/pycbc_calculate_psd +++ b/bin/all_sky_search/pycbc_calculate_psd @@ -5,7 +5,6 @@ import logging, argparse, numpy, multiprocessing, time, copy from six.moves import zip_longest import pycbc, pycbc.psd, pycbc.strain, pycbc.events from pycbc.io import HFile -from pycbc.version import git_verbose_msg as version from pycbc.fft.fftw import set_measure_level from pycbc.workflow import resolve_td_option from ligo.segments import segmentlist, segment @@ -13,7 +12,6 @@ set_measure_level(0) parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=version) parser.add_argument("--low-frequency-cutoff", type=float, required=True, help="The low frequency cutoff to use for filtering (Hz)") parser.add_argument("--analysis-segment-file", required=True, diff --git a/bin/all_sky_search/pycbc_coinc_findtrigs b/bin/all_sky_search/pycbc_coinc_findtrigs index 3042df7cb65..35628648db0 100644 --- a/bin/all_sky_search/pycbc_coinc_findtrigs +++ b/bin/all_sky_search/pycbc_coinc_findtrigs @@ -2,8 +2,8 @@ import copy, argparse, logging, numpy, numpy.random import shutil, uuid, os.path, atexit from ligo.segments import infinity +import pycbc from pycbc.events import veto, coinc, stat, ranking, cuts -import pycbc.version from pycbc.io import HFile from pycbc import pool, init_logging from numpy.random import seed, shuffle @@ -12,7 +12,6 @@ from pycbc.types.optparse import MultiDetOptionAction parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg) parser.add_argument("--veto-files", nargs='*', action='append', default=[], help="Optional veto file. Triggers within veto segments " "contained in the file are ignored") diff --git a/bin/all_sky_search/pycbc_coinc_hdfinjfind b/bin/all_sky_search/pycbc_coinc_hdfinjfind index 9fc1bd3b858..4a4f3229e8f 100755 --- a/bin/all_sky_search/pycbc_coinc_hdfinjfind +++ b/bin/all_sky_search/pycbc_coinc_hdfinjfind @@ -6,12 +6,12 @@ files. import argparse, logging, types, numpy, os.path from ligo.lw import lsctables, utils as ligolw_utils from ligo import segments +import pycbc from pycbc import events, init_logging from pycbc.events import indices_within_segments from pycbc.types import MultiDetOptionAction from pycbc.inject import CBCHDFInjectionSet from pycbc.io import HFile -import pycbc.version from pycbc.io.ligolw import LIGOLWContentHandler @@ -57,8 +57,6 @@ def xml_to_hdf(table, hdf_file, hdf_key, columns): parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-files', nargs='+', required=True) parser.add_argument('--injection-files', nargs='+', required=True) parser.add_argument('--veto-file') diff --git a/bin/all_sky_search/pycbc_coinc_mergetrigs b/bin/all_sky_search/pycbc_coinc_mergetrigs index cf56a23f7c9..77637b85b6d 100755 --- a/bin/all_sky_search/pycbc_coinc_mergetrigs +++ b/bin/all_sky_search/pycbc_coinc_mergetrigs @@ -4,9 +4,8 @@ """ import numpy, argparse, h5py, logging -import pycbc.version from pycbc.io import HFile -from pycbc import init_logging +from pycbc import add_common_pycbc_options, init_logging def changes(arr): l = numpy.where(arr[:-1] != arr[1:])[0] @@ -31,9 +30,7 @@ def region(f, key, boundaries, ids): dtype=h5py.special_dtype(ref=h5py.RegionReference)) parser = argparse.ArgumentParser() -pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) +add_common_pycbc_options(parser) parser.add_argument('--trigger-files', nargs='+') parser.add_argument('--output-file', required=True) parser.add_argument('--bank-file', required=True) diff --git a/bin/all_sky_search/pycbc_coinc_statmap b/bin/all_sky_search/pycbc_coinc_statmap index e4915391750..8bdbca8bd0f 100755 --- a/bin/all_sky_search/pycbc_coinc_statmap +++ b/bin/all_sky_search/pycbc_coinc_statmap @@ -10,7 +10,7 @@ the FANs of any other gravitational waves in the dataset. import argparse, itertools import lal, logging, numpy from pycbc.events import veto, coinc, significance -import pycbc.version, pycbc.pnutils, pycbc.io +import pycbc.pnutils, pycbc.io import sys import pycbc.conversions as conv @@ -35,8 +35,6 @@ class fw(object): parser = argparse.ArgumentParser() # General required options pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--coinc-files', nargs='+', help='List of coincidence files used to calculate the ' 'FAP, FAR, etc.') diff --git a/bin/all_sky_search/pycbc_coinc_statmap_inj b/bin/all_sky_search/pycbc_coinc_statmap_inj index 0c4d33f9409..357ccef068a 100644 --- a/bin/all_sky_search/pycbc_coinc_statmap_inj +++ b/bin/all_sky_search/pycbc_coinc_statmap_inj @@ -6,15 +6,12 @@ with producing the combined foreground and background triggers """ import argparse, logging, itertools, copy, pycbc.io, numpy, lal from pycbc.events import veto, coinc, significance -import pycbc.version import pycbc.conversions as conv from pycbc import init_logging parser = argparse.ArgumentParser() # General required options pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--cluster-window', type=float, default=10, help='Length of time window in seconds to cluster coinc ' 'events [default=10s]') diff --git a/bin/all_sky_search/pycbc_combine_coincident_events b/bin/all_sky_search/pycbc_combine_coincident_events index 86d6be76828..55d928bf66e 100644 --- a/bin/all_sky_search/pycbc_combine_coincident_events +++ b/bin/all_sky_search/pycbc_combine_coincident_events @@ -9,7 +9,6 @@ import logging import pycbc from pycbc.io import HFile -import pycbc.version def com(f, files, group): """ Combine the same column from multiple files into another file f""" @@ -56,7 +55,6 @@ def com_with_detector_key(f, files, group): parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg) parser.add_argument('--statmap-files', nargs='+', help="List of coinc files to be redistributed") parser.add_argument('--output-file', help="name of output file") diff --git a/bin/all_sky_search/pycbc_combine_statmap b/bin/all_sky_search/pycbc_combine_statmap index 2587e624510..825b164273d 100755 --- a/bin/all_sky_search/pycbc_combine_statmap +++ b/bin/all_sky_search/pycbc_combine_statmap @@ -5,12 +5,10 @@ significant foreground, but leaves the background triggers alone. """ import numpy, argparse, logging, pycbc, pycbc.events, pycbc.io, lal -import pycbc.version from ligo import segments parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg) parser.add_argument('--statmap-files', nargs='+', help="List of coinc files to be redistributed") parser.add_argument('--cluster-window', type=float) diff --git a/bin/all_sky_search/pycbc_cut_merge_triggers_to_tmpltbank b/bin/all_sky_search/pycbc_cut_merge_triggers_to_tmpltbank index 71aaa7e6ea8..0848aa949c2 100644 --- a/bin/all_sky_search/pycbc_cut_merge_triggers_to_tmpltbank +++ b/bin/all_sky_search/pycbc_cut_merge_triggers_to_tmpltbank @@ -26,13 +26,10 @@ import argparse import numpy import h5py import pycbc -import pycbc.version from pycbc.io import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument("--input-file", required=True, help="Input merge triggers HDF file.") parser.add_argument("--output-file", required=True, diff --git a/bin/all_sky_search/pycbc_distribute_background_bins b/bin/all_sky_search/pycbc_distribute_background_bins index 66125f8d5ba..039624618b1 100644 --- a/bin/all_sky_search/pycbc_distribute_background_bins +++ b/bin/all_sky_search/pycbc_distribute_background_bins @@ -1,9 +1,7 @@ #!/bin/env python import argparse, numpy, pycbc.events, logging, pycbc.events, pycbc.io -import pycbc.version parser = argparse.ArgumentParser() -parser.add_argument("--version", action=pycbc.version.Version) pycbc.add_common_pycbc_options(parser) parser.add_argument('--coinc-files', nargs='+', help="List of coinc files to be redistributed") diff --git a/bin/all_sky_search/pycbc_exclude_zerolag b/bin/all_sky_search/pycbc_exclude_zerolag index f79cfc55dc9..82dd103d45b 100644 --- a/bin/all_sky_search/pycbc_exclude_zerolag +++ b/bin/all_sky_search/pycbc_exclude_zerolag @@ -5,14 +5,11 @@ coincidences from *any* coincidence type with ifar above a certain threshold """ import numpy as np, argparse, logging, pycbc, pycbc.io -import pycbc.version from pycbc.events import veto, coinc, significance import pycbc.conversions as conv parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument('--statmap-file', type=str, help="Coinc statmap file to be recalculated based on foreground removal") parser.add_argument('--other-statmap-files', nargs='+', diff --git a/bin/all_sky_search/pycbc_fit_sngls_binned b/bin/all_sky_search/pycbc_fit_sngls_binned index 7e51c4005a4..126ed493fc3 100644 --- a/bin/all_sky_search/pycbc_fit_sngls_binned +++ b/bin/all_sky_search/pycbc_fit_sngls_binned @@ -22,12 +22,12 @@ from matplotlib import pyplot as plt import copy, numpy as np +import pycbc from pycbc import events, bin_utils, results from pycbc.events import triggers from pycbc.events import trigger_fits as trstats from pycbc.events import stat as pystat from pycbc.io import HFile -import pycbc.version #### MAIN #### @@ -35,7 +35,6 @@ parser = argparse.ArgumentParser(usage="", description="Perform maximum-likelihood fits of single inspiral trigger" " distributions to various functions") pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument("--trigger-file", help="Input hdf5 file containing single triggers. " "Required") diff --git a/bin/all_sky_search/pycbc_fit_sngls_by_template b/bin/all_sky_search/pycbc_fit_sngls_by_template index f03aede6019..83b55ff5676 100755 --- a/bin/all_sky_search/pycbc_fit_sngls_by_template +++ b/bin/all_sky_search/pycbc_fit_sngls_by_template @@ -18,11 +18,11 @@ import argparse, logging import copy, numpy as np +import pycbc from pycbc import events, init_logging from pycbc.events import triggers, trigger_fits as trstats from pycbc.events import stat as statsmod from pycbc.types.optparse import MultiDetOptionAction -import pycbc.version from pycbc.io import HFile #### DEFINITIONS AND FUNCTIONS #### @@ -61,7 +61,6 @@ parser = argparse.ArgumentParser(usage="", description="Perform maximum-likelihood fits of single inspiral trigger" " distributions to various functions") pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument("--trigger-file", help="Input hdf5 file containing single triggers. " "Required") diff --git a/bin/all_sky_search/pycbc_fit_sngls_over_multiparam b/bin/all_sky_search/pycbc_fit_sngls_over_multiparam index e9ab33a94c6..4230efd71ef 100755 --- a/bin/all_sky_search/pycbc_fit_sngls_over_multiparam +++ b/bin/all_sky_search/pycbc_fit_sngls_over_multiparam @@ -13,8 +13,10 @@ # Public License for more details. -import sys, argparse, logging, pycbc.version, numpy +import sys, argparse, logging, numpy from scipy.stats import norm + +import pycbc from pycbc.events import triggers from pycbc.io import HFile from pycbc import init_logging @@ -177,7 +179,6 @@ parser = argparse.ArgumentParser(usage="", "background model.") pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument("--template-fit-file", required=True, nargs='+', help="hdf5 file(s) containing fit coefficients for each " "individual template. Can smooth over multiple " diff --git a/bin/all_sky_search/pycbc_fit_sngls_over_param b/bin/all_sky_search/pycbc_fit_sngls_over_param index cd6afcc9123..eb63ec13101 100644 --- a/bin/all_sky_search/pycbc_fit_sngls_over_param +++ b/bin/all_sky_search/pycbc_fit_sngls_over_param @@ -18,10 +18,10 @@ import argparse, logging import numpy as np +import pycbc from pycbc import init_logging from pycbc.io import HFile from pycbc.events import triggers -import pycbc.version parser = argparse.ArgumentParser(usage="", description="Smooth (regress) the dependence of coefficients describing " @@ -30,7 +30,6 @@ parser = argparse.ArgumentParser(usage="", "background model.") pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument("--template-fit-file", help="Input hdf5 file containing fit coefficients for each" " individual template. Required") diff --git a/bin/all_sky_search/pycbc_followup_file b/bin/all_sky_search/pycbc_followup_file index 22a7bd06f40..a1820ecdaf6 100644 --- a/bin/all_sky_search/pycbc_followup_file +++ b/bin/all_sky_search/pycbc_followup_file @@ -7,8 +7,6 @@ from pycbc.io import HFile parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--statmap-file', help="Statmap file containing the candidates/background to follow up") parser.add_argument('--bank-file', diff --git a/bin/all_sky_search/pycbc_foreground_censor b/bin/all_sky_search/pycbc_foreground_censor index cd3e7a3ef8b..78e20e7d6ae 100755 --- a/bin/all_sky_search/pycbc_foreground_censor +++ b/bin/all_sky_search/pycbc_foreground_censor @@ -1,7 +1,7 @@ #!/usr/bin/env python """Make segment file to blind the results from foreground related triggers """ -import os, argparse, logging, pycbc.version +import os, argparse, logging from urllib.parse import urlunparse import pycbc.events from pycbc.workflow import SegFile @@ -9,7 +9,6 @@ from pycbc.io import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--foreground-triggers', help="HDF file containing the zerolag foreground triggers " "from the analysis") diff --git a/bin/all_sky_search/pycbc_make_bayestar_skymap b/bin/all_sky_search/pycbc_make_bayestar_skymap index 5d937a2818e..a8ec0b74094 100644 --- a/bin/all_sky_search/pycbc_make_bayestar_skymap +++ b/bin/all_sky_search/pycbc_make_bayestar_skymap @@ -26,7 +26,7 @@ import tempfile from ligo.lw import lsctables, utils as ligolw_utils -import pycbc.version +import pycbc from pycbc import init_logging from pycbc.waveform import bank as wavebank from pycbc.io import WaveformArray @@ -34,8 +34,6 @@ from pycbc.io.ligolw import LIGOLWContentHandler parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument('--bayestar-executable', help="The bayestar-localize-coinc executable to be run. " "If not given, will use whatever is available in " diff --git a/bin/all_sky_search/pycbc_merge_psds b/bin/all_sky_search/pycbc_merge_psds index d034e2659c3..dd8bdafde07 100755 --- a/bin/all_sky_search/pycbc_merge_psds +++ b/bin/all_sky_search/pycbc_merge_psds @@ -18,12 +18,10 @@ """ Merge hdf psd files """ import logging, argparse, numpy, pycbc.types -from pycbc.version import git_verbose_msg as version from pycbc.io import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=version) parser.add_argument('--psd-files', nargs='+') parser.add_argument("--output-file", required=True) diff --git a/bin/all_sky_search/pycbc_reduce_template_bank b/bin/all_sky_search/pycbc_reduce_template_bank index e9243ade8f1..9345ac95dc1 100644 --- a/bin/all_sky_search/pycbc_reduce_template_bank +++ b/bin/all_sky_search/pycbc_reduce_template_bank @@ -26,13 +26,10 @@ import logging import imp import argparse import pycbc -import pycbc.version from pycbc.io import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument("--input-bank", required=True, help="Input template bank HDF file.") parser.add_argument("--output-bank", required=True, diff --git a/bin/all_sky_search/pycbc_rerank_passthrough b/bin/all_sky_search/pycbc_rerank_passthrough index 60662ad3fa3..74c14ea4ac8 100644 --- a/bin/all_sky_search/pycbc_rerank_passthrough +++ b/bin/all_sky_search/pycbc_rerank_passthrough @@ -5,8 +5,6 @@ from pycbc.io import HFile parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--output-file', help="File containing the newly assigned statistic values") diff --git a/bin/all_sky_search/pycbc_sngls_findtrigs b/bin/all_sky_search/pycbc_sngls_findtrigs index 897a379b32c..066cec88d5f 100644 --- a/bin/all_sky_search/pycbc_sngls_findtrigs +++ b/bin/all_sky_search/pycbc_sngls_findtrigs @@ -2,9 +2,10 @@ import argparse, logging, h5py, numpy as np from ligo.segments import infinity from numpy.random import seed, shuffle + +import pycbc from pycbc.events import veto, coinc, stat import pycbc.conversions as conv -import pycbc.version from pycbc import io from pycbc.events import cuts, trigger_fits as trfits from pycbc.events.veto import indices_outside_times @@ -13,8 +14,6 @@ from pycbc import init_logging parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action='version', - version=pycbc.version.git_verbose_msg) # Basic file input options parser.add_argument("--trigger-files", type=str, nargs=1, help="File containing single-detector triggers") diff --git a/bin/all_sky_search/pycbc_sngls_pastro b/bin/all_sky_search/pycbc_sngls_pastro index ccaa252ca67..e26a09b0cc7 100644 --- a/bin/all_sky_search/pycbc_sngls_pastro +++ b/bin/all_sky_search/pycbc_sngls_pastro @@ -17,7 +17,6 @@ from pycbc import conversions as conv from pycbc.events import veto, stat, ranking, coinc, single as sngl from pycbc.io.ligolw import LIGOLWContentHandler from ligo.segments import segment, segmentlist -import pycbc.version import matplotlib matplotlib.use('agg') from matplotlib import pyplot as plt @@ -40,8 +39,6 @@ mchirp_power = { parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument("--single-statmap-files", nargs='+', required=True, help="Single statmap files for which p_astro is " "calculated.") diff --git a/bin/all_sky_search/pycbc_sngls_statmap b/bin/all_sky_search/pycbc_sngls_statmap index bd462ca767d..074214dbdda 100755 --- a/bin/all_sky_search/pycbc_sngls_statmap +++ b/bin/all_sky_search/pycbc_sngls_statmap @@ -10,7 +10,7 @@ import logging, numpy, copy from pycbc.events import veto, coinc from pycbc.events import triggers, trigger_fits as trstats from pycbc.events import significance -import pycbc.version, pycbc.pnutils, pycbc.io +import pycbc.pnutils, pycbc.io import sys import pycbc.conversions as conv @@ -35,8 +35,6 @@ class fw(object): parser = argparse.ArgumentParser() # General required options pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--sngls-files', nargs='+', help='List of files containing trigger and statistic ' 'information.') diff --git a/bin/all_sky_search/pycbc_sngls_statmap_inj b/bin/all_sky_search/pycbc_sngls_statmap_inj index 4a9517c6171..177cd7e2fd1 100644 --- a/bin/all_sky_search/pycbc_sngls_statmap_inj +++ b/bin/all_sky_search/pycbc_sngls_statmap_inj @@ -10,7 +10,7 @@ import lal, logging, numpy from pycbc.events import veto, coinc from pycbc.events import triggers, trigger_fits as trstats from pycbc.events import significance -import pycbc.version, pycbc.pnutils, pycbc.io +import pycbc.pnutils, pycbc.io import sys import pycbc.conversions as conv @@ -35,8 +35,6 @@ class fw(object): parser = argparse.ArgumentParser() # General required options pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--sngls-files', nargs='+', help='List of files containign trigger and statistic ' 'information.') diff --git a/bin/all_sky_search/pycbc_strip_injections b/bin/all_sky_search/pycbc_strip_injections index 0ee3840ad97..027ad6b5ac7 100644 --- a/bin/all_sky_search/pycbc_strip_injections +++ b/bin/all_sky_search/pycbc_strip_injections @@ -1,5 +1,5 @@ #!/bin/env python -import numpy, argparse, pycbc.version, pycbc.pnutils, logging +import numpy, argparse, pycbc.pnutils, logging from pycbc.events import veto from pycbc.io.ligolw import LIGOLWContentHandler from ligo.lw import ligolw, table, utils as ligolw_utils @@ -13,7 +13,6 @@ def remove(l, i): parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--injection-file') parser.add_argument('--veto-file', help="File containing segments used to veto injections") diff --git a/bin/bank/pycbc_aligned_bank_cat b/bin/bank/pycbc_aligned_bank_cat index e5a2b1441bb..ae6c4be9110 100644 --- a/bin/bank/pycbc_aligned_bank_cat +++ b/bin/bank/pycbc_aligned_bank_cat @@ -25,7 +25,6 @@ import logging import glob import argparse import numpy -import pycbc.version from ligo.lw import utils from pycbc import tmpltbank # Old ligolw output functions no longer imported at package level @@ -49,7 +48,6 @@ parser = argparse.ArgumentParser(description=__doc__, formatter_class=tmpltbank.IndentedHelpFormatterWithNL) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=__version__) parser.add_argument("-i", "--input-glob", help="file glob the list of paramters") parser.add_argument("-I", "--input-files", nargs='+', diff --git a/bin/bank/pycbc_aligned_stoch_bank b/bin/bank/pycbc_aligned_stoch_bank index e615fd75415..d6d382d28dd 100644 --- a/bin/bank/pycbc_aligned_stoch_bank +++ b/bin/bank/pycbc_aligned_stoch_bank @@ -25,7 +25,6 @@ import numpy import logging import pycbc -import pycbc.version from pycbc import tmpltbank # Old ligolw output functions no longer imported at package level import pycbc.tmpltbank.bank_output_utils as bank_output @@ -35,17 +34,12 @@ import pycbc.strain from pycbc.pnutils import named_frequency_cutoffs -__author__ = "Ian Harry " -__version__ = pycbc.version.git_verbose_msg -__program__ = "pycbc_aligned_stoch_bank" - # Read command line option _desc = __doc__[1:] parser = argparse.ArgumentParser(description=_desc, formatter_class=tmpltbank.IndentedHelpFormatterWithNL) # Begin with code specific options -parser.add_argument("--version", action="version", version=__version__) pycbc.add_common_pycbc_options(parser) parser.add_argument("-V", "--vary-fupper", action="store_true", default=False, help="Use a variable upper frequency cutoff in laying " diff --git a/bin/bank/pycbc_bank_verification b/bin/bank/pycbc_bank_verification index f4707764533..97d65b07be6 100644 --- a/bin/bank/pycbc_bank_verification +++ b/bin/bank/pycbc_bank_verification @@ -49,7 +49,6 @@ parser = argparse.ArgumentParser(description=__doc__, formatter_class=tmpltbank.IndentedHelpFormatterWithNL) # Begin with code specific options -parser.add_argument("--version", action="version", version=__version__) pycbc.add_common_pycbc_options(parser) parser.add_argument("--histogram-output-file", action="store", default=None, help="Output a histogram of fitting factors to the " diff --git a/bin/bank/pycbc_coinc_bank2hdf b/bin/bank/pycbc_coinc_bank2hdf index db35f48d4be..b093b00be3b 100644 --- a/bin/bank/pycbc_coinc_bank2hdf +++ b/bin/bank/pycbc_coinc_bank2hdf @@ -55,8 +55,6 @@ def parse_parameters(parameters): return outnames, columns parser = argparse.ArgumentParser() -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) pycbc.add_common_pycbc_options(parser) parser.add_argument('--bank-file', required=True, help="The bank file to load. Must end in '.xml[.gz]' " diff --git a/bin/bank/pycbc_geom_aligned_2dstack b/bin/bank/pycbc_geom_aligned_2dstack index e12a46c44f8..547450844b9 100644 --- a/bin/bank/pycbc_geom_aligned_2dstack +++ b/bin/bank/pycbc_geom_aligned_2dstack @@ -50,7 +50,6 @@ parser = argparse.ArgumentParser(usage, description=_desc, formatter_class=pycbc.tmpltbank.IndentedHelpFormatterWithNL) pycbc.add_common_pycbc_options(parser) # Code specific options -parser.add_argument('--version', action='version', version=__version__) parser.add_argument("--pn-order", action="store", type=str,\ default=None,\ help="Determines the PN order to use. Note that if you "+\ diff --git a/bin/bank/pycbc_geom_aligned_bank b/bin/bank/pycbc_geom_aligned_bank index 2d1f6f8c5cd..aed29a79467 100644 --- a/bin/bank/pycbc_geom_aligned_bank +++ b/bin/bank/pycbc_geom_aligned_bank @@ -178,7 +178,6 @@ parser = argparse.ArgumentParser(description=_desc, # Begin with code specific options pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=__version__) parser.add_argument("-s", "--stack-distance", action="store", type=float,\ default=0.2, help="Minimum metric spacing before we "+\ "stack.") diff --git a/bin/bank/pycbc_geom_nonspinbank b/bin/bank/pycbc_geom_nonspinbank index dcf5aa71890..b778f7fafb0 100644 --- a/bin/bank/pycbc_geom_nonspinbank +++ b/bin/bank/pycbc_geom_nonspinbank @@ -48,7 +48,6 @@ parser = argparse.ArgumentParser( formatter_class=tmpltbank.IndentedHelpFormatterWithNL) # Begin with code specific options -parser.add_argument('--version', action='version', version=__version__) pycbc.add_common_pycbc_options(parser) parser.add_argument("--random-seed", action="store", type=int, default=None, diff --git a/bin/bank/pycbc_tmpltbank_to_chi_params b/bin/bank/pycbc_tmpltbank_to_chi_params index 8f2934d9534..b0314589ccb 100644 --- a/bin/bank/pycbc_tmpltbank_to_chi_params +++ b/bin/bank/pycbc_tmpltbank_to_chi_params @@ -45,7 +45,6 @@ parser = argparse.ArgumentParser(description=__doc__, formatter_class=tmpltbank.IndentedHelpFormatterWithNL) # Begin with code specific options -parser.add_argument("--version", action="version", version=__version__) pycbc.add_common_pycbc_options(parser) parser.add_argument("--input-bank", action="store", required=True, help="The template bank to use an input.") diff --git a/bin/inference/pycbc_inference b/bin/inference/pycbc_inference index d0cfd7b953a..701663a3550 100644 --- a/bin/inference/pycbc_inference +++ b/bin/inference/pycbc_inference @@ -31,7 +31,6 @@ from pycbc import (distributions, transforms, fft, opt, scheme, pool) from pycbc.waveform import generator -from pycbc import __version__ from pycbc import inference from pycbc.inference import (models, burn_in, option_utils) from pycbc.inference.io import loadfile @@ -41,8 +40,6 @@ from pycbc.workflow import configuration parser = argparse.ArgumentParser(usage=__file__ + " [--options]", description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=__version__, - help="Prints version information.") # output options parser.add_argument("--output-file", type=str, required=True, help="Output file path.") diff --git a/bin/inference/pycbc_inference_plot_acceptance_rate b/bin/inference/pycbc_inference_plot_acceptance_rate index fe0b81555b4..2fb563c27ae 100644 --- a/bin/inference/pycbc_inference_plot_acceptance_rate +++ b/bin/inference/pycbc_inference_plot_acceptance_rate @@ -23,9 +23,7 @@ use('agg') import matplotlib.pyplot as plt import numpy import pycbc -import pycbc.version from pycbc import results -from pycbc import __version__ from pycbc.inference import io import sys @@ -37,8 +35,6 @@ parser = argparse.ArgumentParser( pycbc.add_common_pycbc_options(parser) parser.add_argument("--input-file", type=str, required=True, help="Path to input HDF file.") -parser.add_argument('--version', action='version', version=__version__, - help='show version number and exit') # output plot options parser.add_argument("--output-file", type=str, required=True, help="Path to output plot.") diff --git a/bin/inference/pycbc_inference_plot_acf b/bin/inference/pycbc_inference_plot_acf index 41ab706395a..3d6949e609b 100644 --- a/bin/inference/pycbc_inference_plot_acf +++ b/bin/inference/pycbc_inference_plot_acf @@ -29,7 +29,6 @@ import pycbc from pycbc import results from pycbc.inference import io -from pycbc import __version__ from pycbc.inference import option_utils from pycbc.inference.sampler import samplers @@ -39,8 +38,6 @@ parser = io.ResultsArgumentParser(skip_args='thin-interval', description="Plots autocorrelation function " "from inference samples.") pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=__version__, - help='show version number and exit') # output plot options parser.add_argument("--output-file", type=str, required=True, help="Path to output plot.") diff --git a/bin/inference/pycbc_inference_plot_acl b/bin/inference/pycbc_inference_plot_acl index 860a9810713..b46cb18c930 100644 --- a/bin/inference/pycbc_inference_plot_acl +++ b/bin/inference/pycbc_inference_plot_acl @@ -30,7 +30,6 @@ import pycbc from pycbc import results from pycbc.filter import autocorrelation -from pycbc import __version__ from pycbc.inference import io # command line usage @@ -40,8 +39,6 @@ parser = io.ResultsArgumentParser(skip_args=['thin-interval', 'temps'], "length per walker from an MCMC " "sampler.") pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=__version__, - help='show version number and exit') # output plot options parser.add_argument("--output-file", type=str, required=True, help="Path to output plot.") diff --git a/bin/inference/pycbc_inference_plot_dynesty_run b/bin/inference/pycbc_inference_plot_dynesty_run index 6e2c8d03afb..3e1aa6795b2 100644 --- a/bin/inference/pycbc_inference_plot_dynesty_run +++ b/bin/inference/pycbc_inference_plot_dynesty_run @@ -26,7 +26,6 @@ from dynesty import plotting as dyplot from pycbc.inference import io import pycbc -from pycbc import __version__ from pycbc import results import sys @@ -37,8 +36,6 @@ parser = argparse.ArgumentParser( pycbc.add_common_pycbc_options(parser) parser.add_argument("--input-file", type=str, required=True, help="Path to input HDF file.") -parser.add_argument('--version', action='version', version=__version__, - help='show version number and exit') # output plot options parser.add_argument("--output-file", type=str, required=True, help="Path to output plot.") diff --git a/bin/inference/pycbc_inference_plot_dynesty_traceplot b/bin/inference/pycbc_inference_plot_dynesty_traceplot index c7e352f91b7..2d6c9a968b6 100644 --- a/bin/inference/pycbc_inference_plot_dynesty_traceplot +++ b/bin/inference/pycbc_inference_plot_dynesty_traceplot @@ -26,7 +26,6 @@ from dynesty import plotting as dyplot from pycbc.inference import io import pycbc -from pycbc import __version__ from pycbc import results import sys @@ -39,8 +38,6 @@ parser = argparse.ArgumentParser( pycbc.add_common_pycbc_options(parser) parser.add_argument("--input-file", type=str, required=True, help="Path to input HDF file.") -parser.add_argument('--version', action='version', version=__version__, - help='show version number and exit') # output plot options parser.add_argument("--output-file", type=str, required=True, help="Path to output plot.") diff --git a/bin/inference/pycbc_inference_plot_gelman_rubin b/bin/inference/pycbc_inference_plot_gelman_rubin index 9f3be01d50d..2af9fd1fc76 100644 --- a/bin/inference/pycbc_inference_plot_gelman_rubin +++ b/bin/inference/pycbc_inference_plot_gelman_rubin @@ -25,7 +25,7 @@ import matplotlib.pyplot as plt import sys from pycbc import ( - __version__, results, init_logging, add_common_pycbc_options + results, init_logging, add_common_pycbc_options ) from pycbc.inference import (gelman_rubin, io, option_utils) @@ -33,8 +33,6 @@ from pycbc.inference import (gelman_rubin, io, option_utils) parser = io.ResultsArgumentParser(skip_args=['walkers']) add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=__version__, - help='show version number and exit') # output options parser.add_argument("--output-file", type=str, required=True, diff --git a/bin/inference/pycbc_inference_plot_geweke b/bin/inference/pycbc_inference_plot_geweke index 755b2f3268a..650fe4254bc 100644 --- a/bin/inference/pycbc_inference_plot_geweke +++ b/bin/inference/pycbc_inference_plot_geweke @@ -26,7 +26,6 @@ import pycbc from pycbc import results import sys -from pycbc import __version__ from pycbc.inference import (io, geweke, option_utils) # add options to command line @@ -36,9 +35,6 @@ pycbc.add_common_pycbc_options(parser) # program-specific -parser.add_argument('--version', action='version', version=__version__, - help='show version number and exit') - # output options parser.add_argument("--output-file", type=str, required=True, help="Path to output plot.") diff --git a/bin/inference/pycbc_inference_plot_inj_recovery b/bin/inference/pycbc_inference_plot_inj_recovery index fc51655bdd8..7c9b0f048c9 100644 --- a/bin/inference/pycbc_inference_plot_inj_recovery +++ b/bin/inference/pycbc_inference_plot_inj_recovery @@ -11,10 +11,8 @@ import matplotlib.colorbar as cbar import matplotlib.pyplot as plt import numpy import pycbc -import pycbc.version from matplotlib import cm from pycbc import inject -from pycbc import __version__ from pycbc.inference import (option_utils, io) from pycbc.results import save_fig_with_metadata @@ -22,8 +20,6 @@ from pycbc.results import save_fig_with_metadata parser = io.ResultsArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=__version__, - help="Prints version information.") parser.add_argument("--output-file", required=True, type=str, help="Path to save output plot.") parser.add_argument("--percentiles", nargs=2, type=float, default=[5, 95], diff --git a/bin/inference/pycbc_inference_plot_movie b/bin/inference/pycbc_inference_plot_movie index c25b0d6ed75..7db38b92016 100644 --- a/bin/inference/pycbc_inference_plot_movie +++ b/bin/inference/pycbc_inference_plot_movie @@ -53,7 +53,6 @@ from matplotlib import pyplot import pycbc.results from pycbc import conversions -from pycbc import __version__ from pycbc.inference import (option_utils, io) from pycbc.results.scatter_histograms import (create_multidim_plot, @@ -111,8 +110,6 @@ def integer_logspace(start, end, num): skip_args = ['thin-start', 'thin-interval', 'thin-end', 'iteration'] parser = io.ResultsArgumentParser(description=__doc__, skip_args=skip_args) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=__version__, - help="show version number and exit") # make frame number and frame step mutually exclusive group = parser.add_mutually_exclusive_group(required=True) group.add_argument("--frame-number", type=int, diff --git a/bin/inference/pycbc_inference_plot_posterior b/bin/inference/pycbc_inference_plot_posterior index 45357266ad3..66890ced876 100644 --- a/bin/inference/pycbc_inference_plot_posterior +++ b/bin/inference/pycbc_inference_plot_posterior @@ -36,11 +36,9 @@ import matplotlib from matplotlib import (patches, use) import pycbc -import pycbc.version from pycbc.results.plot import (add_style_opt_to_parser, set_style_from_cli) from pycbc.results import metadata from pycbc.io import FieldArray -from pycbc import __version__ from pycbc import conversions from pycbc.workflow import WorkflowConfigParser from pycbc.inference import (option_utils, io) @@ -54,8 +52,6 @@ use('agg') parser = io.ResultsArgumentParser() pycbc.add_common_pycbc_options(parser) # program-specific -parser.add_argument("--version", action="version", version=__version__, - help="Prints version information.") parser.add_argument("--output-file", type=str, required=True, help="Output plot path.") parser.add_argument("--plot-prior", nargs="+", type=str, diff --git a/bin/inference/pycbc_inference_plot_pp b/bin/inference/pycbc_inference_plot_pp index 5e6490499c8..d23268b6f49 100644 --- a/bin/inference/pycbc_inference_plot_pp +++ b/bin/inference/pycbc_inference_plot_pp @@ -38,13 +38,9 @@ import pycbc.results.plot from pycbc.results import save_fig_with_metadata from pycbc.inference import (option_utils, io) -from pycbc import __version__ - # parse command line parser = io.ResultsArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=__version__, - help='show version number and exit') parser.add_argument("--output-file", required=True, type=str, help="Path to save output plot.") parser.add_argument("--injection-hdf-group", default="injections", diff --git a/bin/inference/pycbc_inference_plot_prior b/bin/inference/pycbc_inference_plot_prior index 436d0c6f0e6..c656dac3955 100644 --- a/bin/inference/pycbc_inference_plot_prior +++ b/bin/inference/pycbc_inference_plot_prior @@ -28,7 +28,6 @@ use('agg') from matplotlib import pyplot as plt import pycbc -from pycbc import __version__ from pycbc import (distributions, results, waveform) from pycbc.inference.option_utils import ParseParametersArg from pycbc.distributions.utils import prior_from_config @@ -76,8 +75,6 @@ parser.add_argument("--nsamples", type=int, default=10000, "plotting. Default is 10000.") parser.add_argument("--output-file", type=str, required=True, help="Path to output plot.") -parser.add_argument("--version", action="version", version=__version__, - help="show version number and exit") # parse the command line opts = parser.parse_args() diff --git a/bin/inference/pycbc_inference_plot_samples b/bin/inference/pycbc_inference_plot_samples index 651c79e5178..f79996dd82d 100644 --- a/bin/inference/pycbc_inference_plot_samples +++ b/bin/inference/pycbc_inference_plot_samples @@ -27,7 +27,6 @@ from matplotlib import rc import numpy import pycbc from pycbc import results -from pycbc import __version__ from pycbc.inference import (option_utils, io) import sys @@ -35,8 +34,6 @@ import sys parser = argparse.parser = io.ResultsArgumentParser( skip_args=['chains', 'iteration']) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=__version__, - help="show version number and exit") parser.add_argument("--chains", nargs='+', default=None, help="Chain/walker indices to plot. Options are 'all' or " "one or more chain indices. Default is to plot the " diff --git a/bin/live/pycbc_live_supervise_single_significance_fits b/bin/live/pycbc_live_supervise_single_significance_fits index 864a458037f..0c6561e9cea 100755 --- a/bin/live/pycbc_live_supervise_single_significance_fits +++ b/bin/live/pycbc_live_supervise_single_significance_fits @@ -465,7 +465,11 @@ def wait_for_utc_time(target_str): parser = argparse.ArgumentParser(description=__doc__) -parser.add_argument('--config-file', required=True) +pycbc.add_common_pycbc_options(parser) +parser.add_argument( + '--config-file', + required=True +) parser.add_argument( '--date', help='Date to analyse, if not given, will analyse yesterday (UTC). ' @@ -488,7 +492,7 @@ parser.add_argument( ) args = parser.parse_args() -pycbc.init_logging(True) +pycbc.init_logging(args.verbose, default_level=1) if args.run_daily_at is not None and args.date is not None: parser.error('Cannot take --run-daily-at and --date at the same time') diff --git a/bin/minifollowups/pycbc_foreground_minifollowup b/bin/minifollowups/pycbc_foreground_minifollowup index 68d5c392dcc..b1ce86d1a7a 100644 --- a/bin/minifollowups/pycbc_foreground_minifollowup +++ b/bin/minifollowups/pycbc_foreground_minifollowup @@ -30,11 +30,9 @@ from pycbc.events import select_segments_by_definer, coinc from pycbc.io import get_all_subkeys, HFile import pycbc.workflow.minifollowups as mini from pycbc.workflow.core import resolve_url_to_file -import pycbc.version parser = argparse.ArgumentParser(description=__doc__[1:]) add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--bank-file', help="HDF format template bank file") parser.add_argument('--statmap-file', diff --git a/bin/minifollowups/pycbc_injection_minifollowup b/bin/minifollowups/pycbc_injection_minifollowup index e2a1557484b..871b81f51ab 100644 --- a/bin/minifollowups/pycbc_injection_minifollowup +++ b/bin/minifollowups/pycbc_injection_minifollowup @@ -26,7 +26,6 @@ import numpy from pycbc import init_logging, add_common_pycbc_options import pycbc.workflow as wf import pycbc.workflow.minifollowups as mini -import pycbc.version from pycbc.types import MultiDetOptionAction from pycbc.events import select_segments_by_definer, coinc from pycbc.results import layout @@ -122,7 +121,6 @@ def sort_injections(args, inj_group, missed): parser = argparse.ArgumentParser(description=__doc__) add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--bank-file', help="HDF format template bank file") parser.add_argument('--injection-file', diff --git a/bin/minifollowups/pycbc_page_coincinfo b/bin/minifollowups/pycbc_page_coincinfo index cdfbbecf784..10f08bc2a01 100644 --- a/bin/minifollowups/pycbc_page_coincinfo +++ b/bin/minifollowups/pycbc_page_coincinfo @@ -27,7 +27,6 @@ import numpy import lal from pycbc import add_common_pycbc_options -import pycbc.version import pycbc.results import pycbc.pnutils from pycbc.io.hdf import HFile @@ -36,8 +35,6 @@ from pycbc.results import followup parser = argparse.ArgumentParser() add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--single-trigger-files', nargs='+', help="HDF format single detector trigger files for the full data run") parser.add_argument('--bank-file', diff --git a/bin/minifollowups/pycbc_page_injinfo b/bin/minifollowups/pycbc_page_injinfo index f7854e477cb..a09292ecc6e 100644 --- a/bin/minifollowups/pycbc_page_injinfo +++ b/bin/minifollowups/pycbc_page_injinfo @@ -19,7 +19,6 @@ import argparse import sys import numpy -import pycbc.version import pycbc.results import pycbc.pnutils from pycbc import init_logging, add_common_pycbc_options @@ -28,8 +27,6 @@ from pycbc.io.hdf import HFile parser = argparse.ArgumentParser() add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--output-file') parser.add_argument('--injection-file', required=True, help="The HDF format injection file. Required") diff --git a/bin/minifollowups/pycbc_page_snglinfo b/bin/minifollowups/pycbc_page_snglinfo index ff5eed58428..99f3a1629d2 100644 --- a/bin/minifollowups/pycbc_page_snglinfo +++ b/bin/minifollowups/pycbc_page_snglinfo @@ -26,7 +26,7 @@ matplotlib.use('Agg') import lal -import pycbc.version, pycbc.events, pycbc.results, pycbc.pnutils +import pycbc.events, pycbc.results, pycbc.pnutils from pycbc.results import followup from pycbc.events import stat as pystat from pycbc.io import hdf @@ -35,8 +35,6 @@ from pycbc import init_logging, add_common_pycbc_options parser = argparse.ArgumentParser() add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--single-trigger-file', required=True, help="HDF format single detector trigger files for the full " "data run") diff --git a/bin/minifollowups/pycbc_plot_trigger_timeseries b/bin/minifollowups/pycbc_plot_trigger_timeseries index b0506797c65..e70bad54865 100644 --- a/bin/minifollowups/pycbc_plot_trigger_timeseries +++ b/bin/minifollowups/pycbc_plot_trigger_timeseries @@ -26,7 +26,6 @@ import pylab import numpy from pycbc import init_logging, add_common_pycbc_options -import pycbc.version import pycbc.results from pycbc.types import MultiDetOptionAction from pycbc.events import ranking @@ -34,8 +33,6 @@ from pycbc.io import HFile, SingleDetTriggers parser = argparse.ArgumentParser() add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--single-trigger-files', nargs='+', action=MultiDetOptionAction, metavar="IFO:FILE", help="The HDF format single detector merged trigger files, in " diff --git a/bin/minifollowups/pycbc_single_template_plot b/bin/minifollowups/pycbc_single_template_plot index 0673fc1b2a7..f983368ae6c 100644 --- a/bin/minifollowups/pycbc_single_template_plot +++ b/bin/minifollowups/pycbc_single_template_plot @@ -29,8 +29,6 @@ from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--single-template-file', required=True, help="HDF file containing the SNR and CHISQ timeseries. " " The output of pycbc_single_template") diff --git a/bin/minifollowups/pycbc_sngl_minifollowup b/bin/minifollowups/pycbc_sngl_minifollowup index 206066288ea..a73a7f1cd68 100644 --- a/bin/minifollowups/pycbc_sngl_minifollowup +++ b/bin/minifollowups/pycbc_sngl_minifollowup @@ -30,7 +30,6 @@ from pycbc.results import layout from pycbc.types.optparse import MultiDetOptionAction from pycbc.events import select_segments_by_definer import pycbc.workflow.minifollowups as mini -import pycbc.version import pycbc.workflow as wf import pycbc.events from pycbc.workflow.core import resolve_url_to_file @@ -39,7 +38,6 @@ from pycbc.io import hdf parser = argparse.ArgumentParser(description=__doc__[1:]) add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--bank-file', help="HDF format template bank file") parser.add_argument('--single-detector-file', diff --git a/bin/minifollowups/pycbc_upload_prep_minifollowup b/bin/minifollowups/pycbc_upload_prep_minifollowup index 08fc60839bf..737cd6e5130 100644 --- a/bin/minifollowups/pycbc_upload_prep_minifollowup +++ b/bin/minifollowups/pycbc_upload_prep_minifollowup @@ -33,11 +33,9 @@ from pycbc.events import select_segments_by_definer, coinc from pycbc.io import get_all_subkeys, HFile import pycbc.workflow.minifollowups as mini from pycbc.workflow.core import resolve_url_to_file, resolve_td_option -import pycbc.version parser = argparse.ArgumentParser(description=__doc__[1:]) add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--bank-file', help="HDF format template bank file") parser.add_argument('--statmap-file', diff --git a/bin/plotting/pycbc_banksim_plot_eff_fitting_factor b/bin/plotting/pycbc_banksim_plot_eff_fitting_factor index 41230b463fc..4e1d5abc5c2 100644 --- a/bin/plotting/pycbc_banksim_plot_eff_fitting_factor +++ b/bin/plotting/pycbc_banksim_plot_eff_fitting_factor @@ -40,7 +40,6 @@ __program__ = "pycbc_banksim_plot_eff_fitting_factor" parser = argparse.ArgumentParser(usage='', description=__doc__) add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=__version__) parser.add_argument('--input-files', nargs='+', default=None, required=True, help="List of input files.") parser.add_argument('--output-file', default=None, required=True, diff --git a/bin/plotting/pycbc_banksim_plot_fitting_factors b/bin/plotting/pycbc_banksim_plot_fitting_factors index 32d748c15e4..49d78839d2f 100644 --- a/bin/plotting/pycbc_banksim_plot_fitting_factors +++ b/bin/plotting/pycbc_banksim_plot_fitting_factors @@ -38,7 +38,6 @@ __program__ = "pycbc_banksim_plot_fitting_factors" parser = argparse.ArgumentParser(usage='', description="Plot fitting factor distribution.") pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=__version__) parser.add_argument('--input-file', default=None, required=True, help="List of input files.") parser.add_argument('--output-file', default=None, required=True, diff --git a/bin/plotting/pycbc_banksim_table_point_injs b/bin/plotting/pycbc_banksim_table_point_injs index 8aef36e645d..d91957aac7a 100644 --- a/bin/plotting/pycbc_banksim_table_point_injs +++ b/bin/plotting/pycbc_banksim_table_point_injs @@ -34,7 +34,6 @@ __program__ = "pycbc_banksim_table_point_injs" parser = argparse.ArgumentParser(usage='', description="Plot effective fitting factor vs mass1 and mass2.") pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=__version__) parser.add_argument('--input-files', nargs='+', default=None, required=True, help="List of input files.") parser.add_argument('--directory-links', nargs='+', default=None, diff --git a/bin/plotting/pycbc_create_html_snippet b/bin/plotting/pycbc_create_html_snippet index f7b86dfb9ed..47b046eb90a 100644 --- a/bin/plotting/pycbc_create_html_snippet +++ b/bin/plotting/pycbc_create_html_snippet @@ -26,7 +26,6 @@ import pycbc.results # parse command line parser = argparse.ArgumentParser() add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg) parser.add_argument('--output-file', type=str, help='Path of the output HTML file.') parser.add_argument('--html-text', type=str, diff --git a/bin/plotting/pycbc_ifar_catalog b/bin/plotting/pycbc_ifar_catalog index a6e055b9eca..9ca1e597f22 100644 --- a/bin/plotting/pycbc_ifar_catalog +++ b/bin/plotting/pycbc_ifar_catalog @@ -25,7 +25,6 @@ import pylab from scipy.stats import norm, poisson import pycbc.results -import pycbc.version from pycbc import conversions from pycbc import init_logging, add_common_pycbc_options from pycbc.io.hdf import HFile @@ -34,8 +33,6 @@ parser = argparse.ArgumentParser(usage='pycbc_ifar_catalog [--options]', description='Plots cumulative IFAR vs count for' ' foreground triggers') add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-files', nargs='+', help='Path to coincident trigger HDF file(s)') parser.add_argument('--output-file', required=True, diff --git a/bin/plotting/pycbc_page_coinc_snrchi b/bin/plotting/pycbc_page_coinc_snrchi index 1f87e354612..6df693c3539 100644 --- a/bin/plotting/pycbc_page_coinc_snrchi +++ b/bin/plotting/pycbc_page_coinc_snrchi @@ -10,7 +10,6 @@ from pycbc.io import ( ) from pycbc import conversions, init_logging, add_common_pycbc_options from pycbc.detector import Detector -import pycbc.version def snr_from_chisq(chisq, newsnr, q=6.): snr = numpy.zeros(len(chisq)) + float(newsnr) @@ -20,8 +19,6 @@ def snr_from_chisq(chisq, newsnr, q=6.): parser = argparse.ArgumentParser() add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument('--found-injection-file', required=True, help='HDF format found injection file. Required') parser.add_argument('--single-injection-file', required=True, diff --git a/bin/plotting/pycbc_page_dq_table b/bin/plotting/pycbc_page_dq_table index db9fba5200d..e500ab9e038 100644 --- a/bin/plotting/pycbc_page_dq_table +++ b/bin/plotting/pycbc_page_dq_table @@ -8,11 +8,9 @@ import numpy as np import pycbc import pycbc.results -from pycbc.version import git_verbose_msg as version parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=version) parser.add_argument('--ifo', required=True) parser.add_argument('--dq-file', required=True) parser.add_argument('--output-file') diff --git a/bin/plotting/pycbc_page_foreground b/bin/plotting/pycbc_page_foreground index dce5799ae01..0144db45e8f 100755 --- a/bin/plotting/pycbc_page_foreground +++ b/bin/plotting/pycbc_page_foreground @@ -11,15 +11,12 @@ import numpy import pycbc import pycbc.results -import pycbc.version from pycbc.io import hdf from pycbc.pnutils import mass1_mass2_to_mchirp_eta parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-file', required=True) parser.add_argument('--bank-file', required=True) parser.add_argument('--single-detector-triggers', nargs='+') diff --git a/bin/plotting/pycbc_page_foundmissed b/bin/plotting/pycbc_page_foundmissed index 5e3e58e464a..f45c959553e 100644 --- a/bin/plotting/pycbc_page_foundmissed +++ b/bin/plotting/pycbc_page_foundmissed @@ -10,7 +10,7 @@ import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plot -import pycbc.results.followup, pycbc.pnutils, pycbc.results, pycbc.version +import pycbc.results.followup, pycbc.pnutils, pycbc.results import pycbc.pnutils from pycbc import init_logging, add_common_pycbc_options from pycbc.detector import Detector @@ -64,8 +64,6 @@ parser.add_argument('--far-type', choices=('inclusive', 'exclusive'), parser.add_argument('--missed-on-top', action='store_true', help="Plot missed injections on top of found ones and " "high FAR on top of low FAR") -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) args = parser.parse_args() init_logging(args.verbose) diff --git a/bin/plotting/pycbc_page_ifar b/bin/plotting/pycbc_page_ifar index fa092159e41..851447507f4 100644 --- a/bin/plotting/pycbc_page_ifar +++ b/bin/plotting/pycbc_page_ifar @@ -27,7 +27,6 @@ from ligo import segments from pycbc import init_logging, add_common_pycbc_options import pycbc.results -import pycbc.version from pycbc.events import veto from pycbc import conversions as conv from pycbc.io import HFile @@ -52,8 +51,6 @@ parser = argparse.ArgumentParser(usage='pycbc_page_ifar [--options]', 'coincident foreground triggers and a subset of' 'the coincident time slide triggers.') add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-file', type=str, required=True, help='Path to coincident trigger HDF file.') parser.add_argument('--output-file', type=str, required=True, diff --git a/bin/plotting/pycbc_page_injtable b/bin/plotting/pycbc_page_injtable index ecfd4b98733..5cafa75009c 100644 --- a/bin/plotting/pycbc_page_injtable +++ b/bin/plotting/pycbc_page_injtable @@ -10,7 +10,6 @@ import pycbc.results import pycbc.detector import pycbc.pnutils import pycbc.events -import pycbc.version from pycbc.io.hdf import HFile from pycbc import add_common_pycbc_options, init_logging from pycbc.types import MultiDetOptionAction @@ -18,8 +17,6 @@ from pycbc.types import MultiDetOptionAction parser = argparse.ArgumentParser(description=__doc__) add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument('--injection-file', help='HDF File containing the matched injections') parser.add_argument('--single-trigger-files', nargs='*', diff --git a/bin/plotting/pycbc_page_recovery b/bin/plotting/pycbc_page_recovery index a482a946fbf..2bc5ed05ace 100644 --- a/bin/plotting/pycbc_page_recovery +++ b/bin/plotting/pycbc_page_recovery @@ -5,15 +5,13 @@ import numpy, logging, argparse, sys, matplotlib matplotlib.use("Agg") import matplotlib.pyplot as plot -import pycbc.version, pycbc.detector +import pycbc.detector from pycbc import pnutils, results from pycbc.events import triggers from pycbc.io.hdf import HFile parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument("--injection-file", required=True, help="hdf injection file containing found injections. " "Required") diff --git a/bin/plotting/pycbc_page_segments b/bin/plotting/pycbc_page_segments index 447812b9713..9a3f92ad8a2 100644 --- a/bin/plotting/pycbc_page_segments +++ b/bin/plotting/pycbc_page_segments @@ -11,15 +11,12 @@ import mpld3 import mpld3.plugins from matplotlib.patches import Rectangle -import pycbc.version import pycbc.events from pycbc.results.mpld3_utils import MPLSlide, Tooltip parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--segment-files', nargs='+', help="List of segment files to plot") parser.add_argument('--output-file', help="output html file") diff --git a/bin/plotting/pycbc_page_segplot b/bin/plotting/pycbc_page_segplot index 17172be0ba8..dd595a558b8 100644 --- a/bin/plotting/pycbc_page_segplot +++ b/bin/plotting/pycbc_page_segplot @@ -16,7 +16,7 @@ # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -import argparse, pycbc.version +import argparse import matplotlib; matplotlib.use('Agg') import matplotlib.pyplot as plt import numpy, pylab, pycbc.events, mpld3, mpld3.plugins @@ -30,12 +30,10 @@ from pycbc.events.veto import get_segment_definer_comments from pycbc.results.color import ifo_color from pycbc.results.mpld3_utils import MPLSlide, LineTooltip from pycbc.workflow import SegFile -import pycbc.version # parse command line parser = argparse.ArgumentParser() add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg) parser.add_argument('--segment-files', type=str, nargs="+", help='XML files with a segment definer table to read.') parser.add_argument('--segment-names', type=str, nargs="+", required=False, diff --git a/bin/plotting/pycbc_page_segtable b/bin/plotting/pycbc_page_segtable index 08b0597d657..0d7b05b5cb3 100644 --- a/bin/plotting/pycbc_page_segtable +++ b/bin/plotting/pycbc_page_segtable @@ -28,7 +28,6 @@ from ligo import segments from pycbc.events.veto import get_segment_definer_comments from pycbc.results import save_fig_with_metadata from pycbc.workflow import SegFile -import pycbc.version def powerset_ifos(ifo_set): combo_set = [] @@ -39,8 +38,6 @@ def powerset_ifos(ifo_set): # parse command line parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument('--segment-files', type=str, nargs="+", help='XML files with a segment definer table to read.') parser.add_argument('--segment-names', type=str, nargs="+", required=False, default="", diff --git a/bin/plotting/pycbc_page_sensitivity b/bin/plotting/pycbc_page_sensitivity index f210bed016b..cad5146ec67 100755 --- a/bin/plotting/pycbc_page_sensitivity +++ b/bin/plotting/pycbc_page_sensitivity @@ -13,15 +13,12 @@ import pylab import pycbc.pnutils import pycbc.results import pycbc -import pycbc.version from pycbc import sensitivity from pycbc import conversions as conv from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--injection-file', nargs='+', help="Required. HDF format injection result file or space " "separated list of files") diff --git a/bin/plotting/pycbc_page_snrchi b/bin/plotting/pycbc_page_snrchi index 9bd0f0aab5c..af7956ac1fb 100644 --- a/bin/plotting/pycbc_page_snrchi +++ b/bin/plotting/pycbc_page_snrchi @@ -9,7 +9,6 @@ matplotlib.use('Agg') import pylab import pycbc.results -import pycbc.version from pycbc.events import veto from pycbc.io import ( get_chisq_from_file_choice, chisq_choices, SingleDetTriggers, HFile @@ -18,8 +17,6 @@ from pycbc.io import ( parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) parser.add_argument('--trigger-file', help='Single ifo trigger file') -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--veto-file', help='Optional, file of veto segments to remove triggers') parser.add_argument('--segment-name', default=None, type=str, diff --git a/bin/plotting/pycbc_page_snrifar b/bin/plotting/pycbc_page_snrifar index a3d3c3ada23..26689724300 100644 --- a/bin/plotting/pycbc_page_snrifar +++ b/bin/plotting/pycbc_page_snrifar @@ -12,7 +12,6 @@ from scipy.special import erfc, erfinv from pycbc.io.hdf import HFile import pycbc.results -import pycbc.version from pycbc import conversions as conv def sigma_from_p(p): @@ -41,8 +40,6 @@ far_from_p = numpy.vectorize(_far_from_p) parser = argparse.ArgumentParser() # General required options pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-file') parser.add_argument('--output-file') parser.add_argument('--not-cumulative', action='store_true') diff --git a/bin/plotting/pycbc_page_snrratehist b/bin/plotting/pycbc_page_snrratehist index 59860a237d4..5078698311e 100755 --- a/bin/plotting/pycbc_page_snrratehist +++ b/bin/plotting/pycbc_page_snrratehist @@ -15,7 +15,6 @@ from scipy.special import erf, erfinv from pycbc.io.hdf import HFile import pycbc.results -import pycbc.version from pycbc import conversions as conv def sigma_from_p(p): @@ -28,8 +27,6 @@ def p_from_sigma(sig): parser = argparse.ArgumentParser() # General required options pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-file') parser.add_argument('--output-file') parser.add_argument('--bin-size', type=float) diff --git a/bin/plotting/pycbc_page_template_bin_table b/bin/plotting/pycbc_page_template_bin_table index 68fec14ec6a..7e4c3f662b8 100644 --- a/bin/plotting/pycbc_page_template_bin_table +++ b/bin/plotting/pycbc_page_template_bin_table @@ -8,11 +8,9 @@ import numpy as np import pycbc import pycbc.results -from pycbc.version import git_verbose_msg as version parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=version) parser.add_argument('--ifo', required=True) parser.add_argument('--dq-file', required=True) parser.add_argument('--output-file') diff --git a/bin/plotting/pycbc_page_versioning b/bin/plotting/pycbc_page_versioning index 74fb09d048a..b05ef9ef37e 100755 --- a/bin/plotting/pycbc_page_versioning +++ b/bin/plotting/pycbc_page_versioning @@ -9,15 +9,13 @@ pycbc results pages import argparse import logging -import pycbc.version +import pycbc from pycbc import init_logging, add_common_pycbc_options from pycbc.results import (save_fig_with_metadata, html_escape, get_library_version_info, get_code_version_numbers) parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument('--executables', nargs='+', required=True, help="List of executables to provide version " "information for") diff --git a/bin/plotting/pycbc_page_vetotable b/bin/plotting/pycbc_page_vetotable index 8ae26f8d062..8e7a69a952f 100644 --- a/bin/plotting/pycbc_page_vetotable +++ b/bin/plotting/pycbc_page_vetotable @@ -28,7 +28,6 @@ from ligo.lw import utils import pycbc.results from pycbc.results import save_fig_with_metadata -import pycbc.version from pycbc.io.ligolw import LIGOLWContentHandler @@ -36,7 +35,6 @@ parser = argparse.ArgumentParser(description=__doc__) # add command line options pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg) parser.add_argument('--veto-definer-file', type=str, help='XML files with a veto_definer table to read.') parser.add_argument('--output-file', type=str, diff --git a/bin/plotting/pycbc_plot_bank_bins b/bin/plotting/pycbc_plot_bank_bins index 740fbfdf40c..ad19734c17e 100644 --- a/bin/plotting/pycbc_plot_bank_bins +++ b/bin/plotting/pycbc_plot_bank_bins @@ -12,7 +12,6 @@ import inspect from itertools import cycle import pycbc.events, pycbc.pnutils, pycbc.conversions, pycbc.results -import pycbc.version class H5BankFile(h5py.File): @@ -79,8 +78,6 @@ class H5BankFile(h5py.File): parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--bank-file', help='hdf format template bank file', required=True) parser.add_argument('--background-bins', nargs='+', diff --git a/bin/plotting/pycbc_plot_bank_corner b/bin/plotting/pycbc_plot_bank_corner index 52ef6eea29e..4494ab56eb8 100644 --- a/bin/plotting/pycbc_plot_bank_corner +++ b/bin/plotting/pycbc_plot_bank_corner @@ -29,8 +29,6 @@ import logging from textwrap import wrap import pycbc -import pycbc.version -from pycbc import __version__ from pycbc.results.plot import (add_style_opt_to_parser, set_style_from_cli) from pycbc.io import FieldArray, HFile from pycbc.inference import option_utils @@ -52,10 +50,6 @@ parameter_options = conversion_options + _fit_parameters parser = argparse.ArgumentParser(usage='pycbc_plot_bank_corner [--options]', description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", - action="version", - version=__version__, - help="Prints version information.") parser.add_argument("--bank-file", required=True, help="The bank file to read in and plot") diff --git a/bin/plotting/pycbc_plot_dq_flag_likelihood b/bin/plotting/pycbc_plot_dq_flag_likelihood index 5ac749dad78..2e8a483d4fd 100644 --- a/bin/plotting/pycbc_plot_dq_flag_likelihood +++ b/bin/plotting/pycbc_plot_dq_flag_likelihood @@ -9,13 +9,11 @@ from matplotlib import use as matplotlib_use from matplotlib import pyplot matplotlib_use('Agg') -from pycbc.version import git_verbose_msg as version import pycbc.results from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=version) parser.add_argument("--dq-file", required=True) parser.add_argument("--dq-label", required=True) parser.add_argument("--ifo", type=str, required=True) diff --git a/bin/plotting/pycbc_plot_dq_likelihood_vs_time b/bin/plotting/pycbc_plot_dq_likelihood_vs_time index b4740bdebe8..aa0c52f4022 100644 --- a/bin/plotting/pycbc_plot_dq_likelihood_vs_time +++ b/bin/plotting/pycbc_plot_dq_likelihood_vs_time @@ -10,13 +10,11 @@ from matplotlib import use use('Agg') from matplotlib import pyplot -from pycbc.version import git_verbose_msg as version import pycbc.results from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=version) parser.add_argument("--ifo", type=str, required=True) parser.add_argument("--dq-file", required=True) parser.add_argument('--background-bin', default='all_bin') diff --git a/bin/plotting/pycbc_plot_dq_percentiles b/bin/plotting/pycbc_plot_dq_percentiles index 6e2915a46f1..e8c6c29f40e 100644 --- a/bin/plotting/pycbc_plot_dq_percentiles +++ b/bin/plotting/pycbc_plot_dq_percentiles @@ -10,13 +10,11 @@ from matplotlib import use use('Agg') from matplotlib import pyplot -from pycbc.version import git_verbose_msg as version import pycbc.results from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=version) parser.add_argument("--ifo", type=str,required=True) parser.add_argument("--dq-file", required=True) parser.add_argument('--background-bin', default='all_bin') diff --git a/bin/plotting/pycbc_plot_gating b/bin/plotting/pycbc_plot_gating index 573c6fb42f6..d75e877b4fc 100644 --- a/bin/plotting/pycbc_plot_gating +++ b/bin/plotting/pycbc_plot_gating @@ -12,16 +12,14 @@ from matplotlib.patches import Rectangle import mpld3 import mpld3.plugins +import pycbc from pycbc.results.color import ifo_color from pycbc.results.mpld3_utils import MPLSlide -import pycbc.version from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument('--input-file', nargs='+', required=True, help='Single-detector inspiral HDF5 files to take gating ' 'data from.') diff --git a/bin/plotting/pycbc_plot_hist b/bin/plotting/pycbc_plot_hist index 58899ddeed5..04ae7eba99e 100644 --- a/bin/plotting/pycbc_plot_hist +++ b/bin/plotting/pycbc_plot_hist @@ -12,14 +12,12 @@ use('Agg') from matplotlib import pyplot import pycbc -import pycbc.version import pycbc.results import pycbc.io from pycbc.events import background_bin_from_string, veto, ranking parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-file', required=True, help="Combined single detector hdf trigger file") parser.add_argument('--veto-file', diff --git a/bin/plotting/pycbc_plot_multiifo_dtphase b/bin/plotting/pycbc_plot_multiifo_dtphase index 923c643b2fc..90833f966be 100755 --- a/bin/plotting/pycbc_plot_multiifo_dtphase +++ b/bin/plotting/pycbc_plot_multiifo_dtphase @@ -26,7 +26,7 @@ matplotlib.use('agg') from matplotlib import pyplot as plt from pycbc.events import coinc_rate -from pycbc import init_logging, version, add_common_pycbc_options +from pycbc import init_logging, add_common_pycbc_options from pycbc.results import save_fig_with_metadata from pycbc.io.hdf import HFile @@ -43,8 +43,6 @@ def marginalise_pdf(pdf, dimensions_to_keep): parser = argparse.ArgumentParser() add_common_pycbc_options(parser) -parser.add_argument('--version', action="version", - version=version.git_verbose_msg) parser.add_argument('--input-file', required=True, help="Input phasetd histogram file, made using " "pycbc_multiifo_dtphase") diff --git a/bin/plotting/pycbc_plot_psd_file b/bin/plotting/pycbc_plot_psd_file index 127db15785d..9a612aa4576 100644 --- a/bin/plotting/pycbc_plot_psd_file +++ b/bin/plotting/pycbc_plot_psd_file @@ -11,14 +11,11 @@ import sys import pycbc import pycbc.results import pycbc.psd -import pycbc.version from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument("--psd-files", nargs='+', required=True, help='HDF file(s) containing the PSDs to plot') parser.add_argument('--hdf-group', default=None, diff --git a/bin/plotting/pycbc_plot_qscan b/bin/plotting/pycbc_plot_qscan index ba4ba9b3cdf..e8bfae143d0 100644 --- a/bin/plotting/pycbc_plot_qscan +++ b/bin/plotting/pycbc_plot_qscan @@ -34,7 +34,6 @@ from matplotlib import pyplot as plt from matplotlib.colors import LogNorm import pycbc.strain -import pycbc.version import pycbc.results # https://stackoverflow.com/questions/9978880/python-argument-parser-list-of-list-or-tuple-of-tuples @@ -47,8 +46,6 @@ def t_window(s): parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument('--output-file', required=True, help='Output plot') parser.add_argument('--center-time', type=float, help='Center plot on the given GPS time. If omitted, use ' diff --git a/bin/plotting/pycbc_plot_range b/bin/plotting/pycbc_plot_range index 5d90c484bb4..f107d202b62 100644 --- a/bin/plotting/pycbc_plot_range +++ b/bin/plotting/pycbc_plot_range @@ -11,7 +11,6 @@ import sys import pycbc.results import pycbc.types -import pycbc.version import pycbc.waveform import pycbc.filter from pycbc.io.hdf import HFile @@ -21,8 +20,6 @@ set_measure_level(0) parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument("--psd-files", nargs='+', help='HDF file of psds') parser.add_argument("--output-file", help='output file name') parser.add_argument("--mass1", nargs="+", type=float, diff --git a/bin/plotting/pycbc_plot_range_vs_mtot b/bin/plotting/pycbc_plot_range_vs_mtot index a8333e94178..f03c7a90b72 100644 --- a/bin/plotting/pycbc_plot_range_vs_mtot +++ b/bin/plotting/pycbc_plot_range_vs_mtot @@ -11,7 +11,6 @@ import math import pycbc.results import pycbc.types -import pycbc.version import pycbc.waveform import pycbc.filter from pycbc.io.hdf import HFile @@ -21,8 +20,6 @@ set_measure_level(0) parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument("--psd-files", nargs='+', help='HDF file of psds') parser.add_argument("--output-file", help='output file name') diff --git a/bin/plotting/pycbc_plot_singles_timefreq b/bin/plotting/pycbc_plot_singles_timefreq index a5222c90283..69d90587765 100644 --- a/bin/plotting/pycbc_plot_singles_timefreq +++ b/bin/plotting/pycbc_plot_singles_timefreq @@ -38,14 +38,11 @@ import pycbc.events import pycbc.pnutils import pycbc.strain import pycbc.results -import pycbc.version import pycbc.waveform parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument('--trig-file', required=True, help='HDF5 file containing single triggers') parser.add_argument('--output-file', required=True, help='Output plot') diff --git a/bin/plotting/pycbc_plot_singles_vs_params b/bin/plotting/pycbc_plot_singles_vs_params index 0fa5620b68c..3654c2ab6a5 100644 --- a/bin/plotting/pycbc_plot_singles_vs_params +++ b/bin/plotting/pycbc_plot_singles_vs_params @@ -36,13 +36,10 @@ import pycbc.pnutils import pycbc.events import pycbc.results import pycbc.io -import pycbc.version from pycbc.events import ranking parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument('--single-trig-file', required=True, help='Path to file containing single-detector triggers in ' 'HDF5 format. Required') diff --git a/bin/plotting/pycbc_plot_throughput b/bin/plotting/pycbc_plot_throughput index ead55dcff29..252b09ef9bd 100755 --- a/bin/plotting/pycbc_plot_throughput +++ b/bin/plotting/pycbc_plot_throughput @@ -11,13 +11,10 @@ from scipy.stats import hmean import pycbc from pycbc.results.color import ifo_color -import pycbc.version from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument('--input-file', nargs='+', required=True, help='Single-detector inspiral HDF5 files to get ' 'templates per core.') diff --git a/bin/plotting/pycbc_plot_trigrate b/bin/plotting/pycbc_plot_trigrate index 0e324b1ec81..a0c538f2bae 100644 --- a/bin/plotting/pycbc_plot_trigrate +++ b/bin/plotting/pycbc_plot_trigrate @@ -27,7 +27,6 @@ import pycbc from pycbc import io, events, bin_utils, results from pycbc.events import triggers from pycbc.events import ranking -import pycbc.version #### DEFINITIONS AND FUNCTIONS #### @@ -41,7 +40,6 @@ def get_stat(statchoice, trigs): parser = argparse.ArgumentParser(usage="", description="Plot trigger rates") pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument("--trigger-file", help="Input hdf5 file containing single triggers. " "Required") diff --git a/bin/plotting/pycbc_plot_waveform b/bin/plotting/pycbc_plot_waveform index 9f94d1aac9d..ff6aed5d507 100644 --- a/bin/plotting/pycbc_plot_waveform +++ b/bin/plotting/pycbc_plot_waveform @@ -25,7 +25,6 @@ from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes from mpl_toolkits.axes_grid1.inset_locator import mark_inset from pycbc import waveform, io -from pycbc import version from pycbc import results from pycbc import init_logging, add_common_pycbc_options from pycbc.fft import ifft @@ -34,8 +33,6 @@ from pycbc.types import TimeSeries, zeros, complex64 parser = argparse.ArgumentParser(usage='', description=__doc__) add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=version.git_verbose_msg) parser.add_argument('--output-file', required=True) parser.add_argument("--low-frequency-cutoff", type=float, help="The low frequency cutoff to use for generation.") diff --git a/bin/pycbc_banksim b/bin/pycbc_banksim index a5d70dea773..0af218957ec 100644 --- a/bin/pycbc_banksim +++ b/bin/pycbc_banksim @@ -35,7 +35,7 @@ from pycbc import DYN_RANGE_FAC from pycbc.types import FrequencySeries, TimeSeries, zeros, complex_same_precision_as from pycbc.filter import match, sigmasq from pycbc.io.ligolw import LIGOLWContentHandler -import pycbc.psd, pycbc.scheme, pycbc.fft, pycbc.strain, pycbc.version +import pycbc.psd, pycbc.scheme, pycbc.fft, pycbc.strain from pycbc.detector import overhead_antenna_pattern as generate_fplus_fcross from pycbc.waveform import TemplateBank @@ -147,8 +147,6 @@ parser = ArgumentParser(description=__doc__) parser.add_argument("--match-file", dest="out_file", metavar="FILE", required=True, help="File to output match results") pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) #Template Settings parser.add_argument("--template-file", dest="bank_file", metavar="FILE", diff --git a/bin/pycbc_banksim_combine_banks b/bin/pycbc_banksim_combine_banks index 56f382f19ee..3a547d71aa4 100644 --- a/bin/pycbc_banksim_combine_banks +++ b/bin/pycbc_banksim_combine_banks @@ -28,7 +28,6 @@ import logging from numpy import * import pycbc -import pycbc.version __author__ = "Ian Harry " __program__ = "pycbc_banksim_combine_banks" @@ -37,7 +36,6 @@ __program__ = "pycbc_banksim_combine_banks" _desc = __doc__[1:] parser = argparse.ArgumentParser(description=_desc) -parser.add_argument('--version', action=pycbc.version.Version) pycbc.add_common_pycbc_options(parser) parser.add_argument("-I", "--input-files", nargs='+', help="Explicit list of input files.") diff --git a/bin/pycbc_banksim_match_combine b/bin/pycbc_banksim_match_combine index 1d140c3b6bc..111329e8e7e 100644 --- a/bin/pycbc_banksim_match_combine +++ b/bin/pycbc_banksim_match_combine @@ -44,7 +44,6 @@ __program__ = "pycbc_banksim_match_combine" # Read command line options parser = argparse.ArgumentParser(description=__doc__) -parser.add_argument("--version", action="version", version=__version__) pycbc.add_common_pycbc_options(parser) parser.add_argument("--match-files", nargs='+', help="Explicit list of match files.") diff --git a/bin/pycbc_banksim_skymax b/bin/pycbc_banksim_skymax index a32bf175cde..f93fdfa006b 100644 --- a/bin/pycbc_banksim_skymax +++ b/bin/pycbc_banksim_skymax @@ -39,7 +39,7 @@ from pycbc.filter import overlap_cplx, matched_filter from pycbc.filter import compute_max_snr_over_sky_loc_stat from pycbc.filter import compute_max_snr_over_sky_loc_stat_no_phase from pycbc.io.ligolw import LIGOLWContentHandler -import pycbc.psd, pycbc.scheme, pycbc.fft, pycbc.strain, pycbc.version +import pycbc.psd, pycbc.scheme, pycbc.fft, pycbc.strain from pycbc.detector import overhead_antenna_pattern as generate_fplus_fcross from pycbc.waveform import TemplateBank @@ -153,8 +153,6 @@ parser = ArgumentParser(description=__doc__) parser.add_argument("--match-file", dest="out_file", metavar="FILE", required=True, help="File to output match results") pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) #Template Settings parser.add_argument("--template-file", dest="bank_file", metavar="FILE", diff --git a/bin/pycbc_coinc_time b/bin/pycbc_coinc_time index 34e4687ea63..c4476deb1d3 100644 --- a/bin/pycbc_coinc_time +++ b/bin/pycbc_coinc_time @@ -6,8 +6,7 @@ from dqsegdb.apicalls import dqsegdbQueryTimes as query import ligo.segments -#from pycbc.workflow.segment import cat_to_veto_def_cat as convert_cat -import pycbc.version +import pycbc def sane(seg_list): """ Convert list of len two lists containing strs to segment list """ @@ -90,7 +89,6 @@ def get_vetoes(veto_def, ifo, server, veto_name, start_default, end_default): parser = argparse.ArgumentParser() -parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) pycbc.add_common_pycbc_options(parser) parser.add_argument('--gps-start-time', type=int, required=True, diff --git a/bin/pycbc_condition_strain b/bin/pycbc_condition_strain index e9ca518c113..4f65cc717a3 100644 --- a/bin/pycbc_condition_strain +++ b/bin/pycbc_condition_strain @@ -28,7 +28,6 @@ import logging import argparse import pycbc.strain -import pycbc.version import pycbc.frame import pycbc.fft from pycbc.types import float32, float64 @@ -47,8 +46,6 @@ def write_strain(file_name, channel, data): parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument('--output-strain-file', required=True, help='Name of output frame file. The file format is ' 'selected based on the extension (.gwf, .npy, .hdf ' diff --git a/bin/pycbc_convertinjfiletohdf b/bin/pycbc_convertinjfiletohdf index 57ccac2b6b5..5b983673ade 100755 --- a/bin/pycbc_convertinjfiletohdf +++ b/bin/pycbc_convertinjfiletohdf @@ -161,8 +161,6 @@ class LVKNewStyleInjectionSet(object): parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) parser.add_argument('--injection-file', required=True, help="The injection file to load. Must end in '.xml[.gz]' " "and must contain a SimInspiral table") diff --git a/bin/pycbc_create_injections b/bin/pycbc_create_injections index 3fac44c3d66..6b388a34ad1 100644 --- a/bin/pycbc_create_injections +++ b/bin/pycbc_create_injections @@ -117,7 +117,6 @@ import h5py from numpy.random import uniform import pycbc -import pycbc.version from pycbc.inject import InjectionSet from pycbc import distributions from pycbc import transforms @@ -131,9 +130,6 @@ parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) configuration.add_workflow_command_line_group(parser) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg, - help='Prints version information.') parser.add_argument('--ninjections', type=int, help='Number of injections to create.') parser.add_argument('--gps-start-time', type=int, help="Alternative to " diff --git a/bin/pycbc_data_store b/bin/pycbc_data_store index 9fdee6b3eb6..087b18fb87d 100755 --- a/bin/pycbc_data_store +++ b/bin/pycbc_data_store @@ -8,7 +8,6 @@ import numpy import pycbc import pycbc.strain import pycbc.dq -from pycbc.version import git_verbose_msg as version from pycbc.fft.fftw import set_measure_level from pycbc.events.veto import segments_to_start_end from pycbc.io.hdf import HFile @@ -17,7 +16,6 @@ set_measure_level(0) parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=version) parser.add_argument("--science-name", help="Science flag definition") parser.add_argument("--segment-server") parser.add_argument("--veto-definer-file") diff --git a/bin/pycbc_faithsim b/bin/pycbc_faithsim index e1bf979a15a..694b818ac46 100644 --- a/bin/pycbc_faithsim +++ b/bin/pycbc_faithsim @@ -31,7 +31,6 @@ import sys from ligo.lw import utils as ligolw_utils from ligo.lw import lsctables -import pycbc.version import pycbc.strain import pycbc.psd from pycbc.waveform import td_approximants, fd_approximants @@ -83,8 +82,6 @@ psd_names = pycbc.psd.get_lalsim_psd_list() taper_choices = ["start","end","startend"] parser = argparse.ArgumentParser(usage='', description="Calculate faithfulness for a set of waveforms.") -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) pycbc.add_common_pycbc_options(parser) parser.add_argument("--param-file", dest="bank_file", metavar="FILE", help="Sngl or Sim Inspiral Table containing waveform " diff --git a/bin/pycbc_fit_sngl_trigs b/bin/pycbc_fit_sngl_trigs index c449e41321f..7acde3384d3 100644 --- a/bin/pycbc_fit_sngl_trigs +++ b/bin/pycbc_fit_sngl_trigs @@ -20,10 +20,10 @@ use('Agg') from matplotlib import pyplot as plt import numpy as np +import pycbc from pycbc import io, events, bin_utils from pycbc.events import ranking from pycbc.events import trigger_fits as trstats -import pycbc.version #### DEFINITIONS AND FUNCTIONS #### @@ -56,7 +56,6 @@ parser = argparse.ArgumentParser(usage="", description="Perform maximum-likelihood fits of single inspiral trigger" "distributions to various functions") pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument("--inputs", nargs="+", help="Input file or space-separated list of input files " "containing single triggers. Currently .xml(.gz) " diff --git a/bin/pycbc_hdf5_splitbank b/bin/pycbc_hdf5_splitbank index d080d5774bb..ad50d57aed9 100755 --- a/bin/pycbc_hdf5_splitbank +++ b/bin/pycbc_hdf5_splitbank @@ -27,15 +27,13 @@ import h5py import logging from numpy import random -import pycbc, pycbc.version +import pycbc from pycbc.waveform import bank __author__ = "Soumi De " parser = argparse.ArgumentParser(description=__doc__[1:]) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument("--bank-file", type=str, help="Bank hdf file to load.") outbanks = parser.add_mutually_exclusive_group(required=True) diff --git a/bin/pycbc_hdf_splitinj b/bin/pycbc_hdf_splitinj index 1026a6f0fe9..288ecee39a6 100644 --- a/bin/pycbc_hdf_splitinj +++ b/bin/pycbc_hdf_splitinj @@ -8,16 +8,14 @@ Split sets are organized to maximize time between injections. import argparse import numpy as np +import pycbc from pycbc.inject import InjectionSet -import pycbc.version from pycbc.io.hdf import HFile # Parse command line parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) parser.add_argument("-f", "--output-files", nargs='*', required=True, help="Names of output files") parser.add_argument("-i", "--input-file", required=True, diff --git a/bin/pycbc_inj_cut b/bin/pycbc_inj_cut index 1ca70bcb5ca..bdcf07d994f 100644 --- a/bin/pycbc_inj_cut +++ b/bin/pycbc_inj_cut @@ -36,11 +36,9 @@ from ligo.lw import lsctables import pycbc import pycbc.inject from pycbc.types import MultiDetOptionAction -import pycbc.version parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action=pycbc.version.Version) parser.add_argument('--input', dest='inj_xml', required=True, help='Input LIGOLW injections file.') parser.add_argument('--output-missed', dest='output_missed', required=False, help="Output LIGOLW file containing injections we expect to miss.") diff --git a/bin/pycbc_inspiral b/bin/pycbc_inspiral index c7ea379cfe7..61e769ac2e7 100644 --- a/bin/pycbc_inspiral +++ b/bin/pycbc_inspiral @@ -27,7 +27,6 @@ import time from multiprocessing import Pool import pycbc -import pycbc.version from pycbc import vetoes, psd, waveform, strain, scheme, fft, DYN_RANGE_FAC, events from pycbc.vetoes.sgchisq import SingleDetSGChisq from pycbc.filter import MatchedFilterControl, make_frequency_series, qtransform @@ -54,7 +53,6 @@ parser = argparse.ArgumentParser(usage='', description="Find single detector gravitational-wave triggers.") pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action=pycbc.version.Version) parser.add_argument("--update-progress", help="updates a file 'progress.txt' with a value 0 .. 1.0 when this amount of (filtering) progress was made", type=float, default=0) diff --git a/bin/pycbc_make_html_page b/bin/pycbc_make_html_page index f4ea0f2e809..4b6d9c9e893 100644 --- a/bin/pycbc_make_html_page +++ b/bin/pycbc_make_html_page @@ -30,7 +30,6 @@ from ligo import segments import pycbc.results from pycbc.results.render import get_embedded_config, render_workflow_html_template, setup_template_render from pycbc.workflow import segment -import pycbc.version def examine_dir(cwd): """ @@ -167,8 +166,6 @@ default_logo_location = "https://raw.githubusercontent.com/gwastro/" + \ parser = argparse.ArgumentParser(usage='pycbc_make_html_page \ [--options]', description="Create static html pages of a filesystem's content.") -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) pycbc.add_common_pycbc_options(parser) parser.add_argument('-f', '--template-file', type=str, help='Template file to use for skeleton html page.') diff --git a/bin/pycbc_make_skymap b/bin/pycbc_make_skymap index cd522665ce5..ad62ce6fb71 100755 --- a/bin/pycbc_make_skymap +++ b/bin/pycbc_make_skymap @@ -481,7 +481,6 @@ if __name__ == '__main__': parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) - parser.add_argument('--version', action=pycbc.version.Version) # note that I am not using a MultiDetOptionAction for --trig-time as I # explicitly want to handle cases like `--trig-time 1234` and # `--trig-time H1:1234 L1:1234` in different ways diff --git a/bin/pycbc_merge_inj_hdf b/bin/pycbc_merge_inj_hdf index 9b208738d47..b09b0dce1a2 100755 --- a/bin/pycbc_merge_inj_hdf +++ b/bin/pycbc_merge_inj_hdf @@ -27,7 +27,6 @@ import h5py import pycbc import pycbc.inject -import pycbc.version def get_gc_end_time(injection): @@ -45,7 +44,6 @@ def get_gc_end_time(injection): if __name__ == '__main__': parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) - parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument('--injection-files', '-i', dest='injection_file', required=True, nargs='+', help='Input HDF5 files defining injections') diff --git a/bin/pycbc_multi_inspiral b/bin/pycbc_multi_inspiral index 6737d99736c..114ed7fa88c 100755 --- a/bin/pycbc_multi_inspiral +++ b/bin/pycbc_multi_inspiral @@ -30,7 +30,6 @@ import time import argparse import numpy as np -import pycbc.version from pycbc import ( detector, fft, @@ -120,7 +119,6 @@ def slide_limiter(args): # pycbc_multi_inspiral executable. time_init = time.time() parser = argparse.ArgumentParser(description=__doc__) -parser.add_argument('--version', action=pycbc.version.Version) add_common_pycbc_options(parser) parser.add_argument("--output", type=str) parser.add_argument( diff --git a/bin/pycbc_optimal_snr b/bin/pycbc_optimal_snr index ff10f857bb7..08fa83c4ae9 100644 --- a/bin/pycbc_optimal_snr +++ b/bin/pycbc_optimal_snr @@ -33,7 +33,6 @@ from ligo.lw import lsctables import pycbc import pycbc.inject import pycbc.psd -import pycbc.version from pycbc.filter import sigma, make_frequency_series from pycbc.types import TimeSeries, FrequencySeries, zeros, float32, \ MultiDetOptionAction, load_frequencyseries @@ -120,7 +119,6 @@ def get_gc_end_time(injection): if __name__ == '__main__': parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) - parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument('--input-file', '-i', dest='injection_file', required=True, help='Input LIGOLW file defining injections') diff --git a/bin/pycbc_optimize_snr b/bin/pycbc_optimize_snr index 11c9bcb91e7..6ce3abb9ba0 100755 --- a/bin/pycbc_optimize_snr +++ b/bin/pycbc_optimize_snr @@ -14,7 +14,7 @@ mpl_use_backend('agg') import pycbc from pycbc import ( - fft, scheme, version + fft, scheme ) from pycbc.types import MultiDetOptionAction, load_frequencyseries import pycbc.conversions as cv @@ -27,8 +27,6 @@ from pycbc.live import snr_optimizer parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=version.git_verbose_msg) parser.add_argument('--params-file', required=True, help='Location of the attributes file created by PyCBC ' 'Live') diff --git a/bin/pycbc_single_template b/bin/pycbc_single_template index 42a21dc6909..5aaff2787f4 100755 --- a/bin/pycbc_single_template +++ b/bin/pycbc_single_template @@ -31,7 +31,6 @@ from pycbc.types import zeros, complex64 from pycbc.types import complex_same_precision_as from pycbc.detector import Detector import pycbc.waveform.utils -import pycbc.version def subtract_template(stilde, template, snr, trigger_time, flow): idx = int((trigger_time - snr.start_time) / snr.delta_t) @@ -92,7 +91,6 @@ def select_segments(fname, anal_name, data_name, ifo, time, pad_data): parser = argparse.ArgumentParser(usage='', description="Single template gravitational-wave followup") pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action=pycbc.version.Version) parser.add_argument('--output-file', required=True) parser.add_argument('--subtract-template', action='store_true') parser.add_argument("--low-frequency-cutoff", type=float, diff --git a/bin/pycbc_source_probability_offline b/bin/pycbc_source_probability_offline index f12447603ef..3122d4583f3 100755 --- a/bin/pycbc_source_probability_offline +++ b/bin/pycbc_source_probability_offline @@ -29,8 +29,6 @@ parser.add_argument('--ifar-threshold', type=float, default=None, 'above threshold.') parser.add_argument('--include-mass-gap', action='store_true', help='Option to include the Mass Gap region.') -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) mchirp_area.insert_args(parser) args = parser.parse_args() diff --git a/bin/pycbc_split_inspinj b/bin/pycbc_split_inspinj index 28e5ee9c1ad..7cfaef6143b 100644 --- a/bin/pycbc_split_inspinj +++ b/bin/pycbc_split_inspinj @@ -6,15 +6,12 @@ from ligo.lw import utils as ligolw_utils from ligo.lw import lsctables from itertools import cycle -import pycbc.version +import pycbc from pycbc.io.ligolw import LIGOLWContentHandler, get_table_columns - # Parse command line parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", - version=pycbc.version.git_verbose_msg) group = parser.add_mutually_exclusive_group(required=True) group.add_argument("-n", "--num-splits", type=int, help="Number of files to be generated") diff --git a/bin/pycbc_splitbank b/bin/pycbc_splitbank index 8919c31ac2f..ab4f59e6276 100644 --- a/bin/pycbc_splitbank +++ b/bin/pycbc_splitbank @@ -35,7 +35,6 @@ from ligo.lw import lsctables from ligo.lw import utils as ligolw_utils import pycbc -from pycbc import version from pycbc.io.ligolw import LIGOLWContentHandler, create_process_table from pycbc.conversions import mchirp_from_mass1_mass2 from pycbc.pnutils import frequency_cutoff_from_name @@ -47,8 +46,6 @@ __program__ = "pycbc_splitbank" # Command line parsing parser = argparse.ArgumentParser(description=__doc__) -parser.add_argument('--version', action='version', version=version.git_verbose_msg) - pycbc.add_common_pycbc_options(parser) group = parser.add_mutually_exclusive_group(required=True) group.add_argument('--templates-per-bank', metavar='SAMPLES', diff --git a/bin/pygrb/pycbc_grb_inj_finder b/bin/pygrb/pycbc_grb_inj_finder index 0eab460e463..05f6eebef51 100644 --- a/bin/pygrb/pycbc_grb_inj_finder +++ b/bin/pygrb/pycbc_grb_inj_finder @@ -39,7 +39,7 @@ from gwdatafind.utils import filename_metadata from ligo.segments import segmentlist from ligo.segments.utils import fromsegwizard -from pycbc import __version__, add_common_pycbc_options, init_logging +from pycbc import add_common_pycbc_options, init_logging from pycbc.inject import InjectionSet from pycbc.io.hdf import HFile from pycbc.results.pygrb_postprocessing_utils import template_hash_to_id @@ -155,14 +155,6 @@ parser = argparse.ArgumentParser( add_common_pycbc_options(parser) -parser.add_argument( - "-V", - "--version", - action="version", - version=__version__, - help="show version number and exit", -) - # input/output parser.add_argument( "-f", diff --git a/bin/pygrb/pycbc_grb_trig_cluster b/bin/pygrb/pycbc_grb_trig_cluster index 92b1cba65f4..c0fc0937474 100644 --- a/bin/pygrb/pycbc_grb_trig_cluster +++ b/bin/pygrb/pycbc_grb_trig_cluster @@ -34,7 +34,6 @@ import h5py from gwdatafind.utils import filename_metadata -from pycbc import __version__ from pycbc import init_logging, add_common_pycbc_options from pycbc.io.hdf import HFile @@ -121,13 +120,6 @@ parser = argparse.ArgumentParser( ) add_common_pycbc_options(parser) -parser.add_argument( - "-V", - "--version", - action="version", - version=__version__, - help="show version number and exit", -) # clustering parser.add_argument( diff --git a/bin/pygrb/pycbc_grb_trig_combiner b/bin/pygrb/pycbc_grb_trig_combiner index 65ee3b08bc5..af2e5620cb9 100644 --- a/bin/pygrb/pycbc_grb_trig_combiner +++ b/bin/pygrb/pycbc_grb_trig_combiner @@ -35,7 +35,7 @@ from gwdatafind.utils import (file_segment, filename_metadata) from ligo import segments from ligo.segments.utils import fromsegwizard -from pycbc import __version__, add_common_pycbc_options, init_logging +from pycbc import add_common_pycbc_options, init_logging from pycbc.results.pygrb_postprocessing_utils import template_hash_to_id from pycbc.io.hdf import HFile @@ -342,13 +342,6 @@ parser = argparse.ArgumentParser( ) add_common_pycbc_options(parser) -parser.add_argument( - "-V", - "--version", - action="version", - version=__version__, - help="show version number and exit", -) # tags parser.add_argument( diff --git a/bin/pygrb/pycbc_make_offline_grb_workflow b/bin/pygrb/pycbc_make_offline_grb_workflow index e8cb3db28d3..88d52304b94 100644 --- a/bin/pygrb/pycbc_make_offline_grb_workflow +++ b/bin/pygrb/pycbc_make_offline_grb_workflow @@ -20,13 +20,6 @@ Make workflow for the archival, targeted, coherent inspiral pipeline. """ -import pycbc.version - -__author__ = "Andrew Williamson " -__version__ = pycbc.version.git_verbose_msg -__date__ = pycbc.version.date -__program__ = "pycbc_make_offline_grb_workflow" - import sys import os import argparse @@ -46,7 +39,6 @@ workflow_name = "pygrb_offline" # Parse command line options and instantiate pycbc workflow object parser = argparse.ArgumentParser() add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=__version__) _workflow.add_workflow_command_line_group(parser) _workflow.add_workflow_settings_cli(parser) args = parser.parse_args() diff --git a/bin/pygrb/pycbc_pygrb_efficiency b/bin/pygrb/pycbc_pygrb_efficiency index abba4bc8790..3f18a8f4703 100644 --- a/bin/pygrb/pycbc_pygrb_efficiency +++ b/bin/pygrb/pycbc_pygrb_efficiency @@ -92,8 +92,7 @@ def efficiency_with_errs(found_bestnr, num_injections, num_mc_injs=0): # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__, - version=__version__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__) parser.add_argument("-F", "--trig-file", action="store", required=True, help="Location of off-source trigger file.") parser.add_argument("--onsource-file", action="store", diff --git a/bin/pygrb/pycbc_pygrb_exclusion_dist_table b/bin/pygrb/pycbc_pygrb_exclusion_dist_table index c170aae419a..873a44c9943 100644 --- a/bin/pygrb/pycbc_pygrb_exclusion_dist_table +++ b/bin/pygrb/pycbc_pygrb_exclusion_dist_table @@ -32,7 +32,7 @@ __program__ = "pycbc_pygrb_exclusion_dist_table" parser = argparse.ArgumentParser(description=__doc__, formatter_class= argparse.ArgumentDefaultsHelpFormatter) -parser.add_argument("--version", action="version", version=__version__) +pycbc.add_common_pycbc_options(parser) parser.add_argument("--input-files", nargs="+", required=True, help="List of JSON input files" + " output by pycbc_pygrb_efficiency" + @@ -42,6 +42,8 @@ parser.add_argument("--output-file", required=True, " of exclusion distances.") opts = parser.parse_args() +pycbc.init_logging(opts.verbose) + # Load JSON files as a list of dictionaries file_contents = [] for file_name in opts.input_files: diff --git a/bin/pygrb/pycbc_pygrb_grb_info_table b/bin/pygrb/pycbc_pygrb_grb_info_table index 9903886e8b7..4189a5a44bb 100644 --- a/bin/pygrb/pycbc_pygrb_grb_info_table +++ b/bin/pygrb/pycbc_pygrb_grb_info_table @@ -45,7 +45,6 @@ __program__ = "pycbc_pygrb_grb_info_table" parser = argparse.ArgumentParser(description=__doc__, formatter_class= argparse.ArgumentDefaultsHelpFormatter) add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=__version__) parser.add_argument("--trigger-time", type=int, required=True, help="GPS time of the GRB.") diff --git a/bin/pygrb/pycbc_pygrb_minifollowups b/bin/pygrb/pycbc_pygrb_minifollowups index 2b89d66e594..1f13c549199 100644 --- a/bin/pygrb/pycbc_pygrb_minifollowups +++ b/bin/pygrb/pycbc_pygrb_minifollowups @@ -105,8 +105,6 @@ def make_timeseries_plot(workflow, trig_file, snr_type, central_time, # Main script starts here # ============================================================================= parser = argparse.ArgumentParser(description=__doc__[1:]) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) pycbc.add_common_pycbc_options(parser) parser.add_argument('--trig-file', help="HDF file with the triggers found by PyGRB") diff --git a/bin/pygrb/pycbc_pygrb_page_tables b/bin/pygrb/pycbc_pygrb_page_tables index 5f37f2cc1a9..02d6d6c5411 100755 --- a/bin/pygrb/pycbc_pygrb_page_tables +++ b/bin/pygrb/pycbc_pygrb_page_tables @@ -180,8 +180,7 @@ def load_missed_found_injections(hdf_file, ifos, snr_threshold, bank_file, # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__, - version=__version__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__) parser.add_argument("-F", "--offsource-file", action="store", required=True, help="Location of off-source trigger file") parser.add_argument("--onsource-file", action="store", diff --git a/bin/pygrb/pycbc_pygrb_plot_chisq_veto b/bin/pygrb/pycbc_pygrb_plot_chisq_veto index 815a9792224..2a624083a12 100644 --- a/bin/pygrb/pycbc_pygrb_plot_chisq_veto +++ b/bin/pygrb/pycbc_pygrb_plot_chisq_veto @@ -171,8 +171,7 @@ def calculate_contours(trig_data, opts, new_snrs=None): # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__, - version=__version__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__) parser.add_argument("-t", "--trig-file", action="store", default=None, required=True, help="The location of the trigger file") diff --git a/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr b/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr index d378d1aea96..1770bb27e71 100644 --- a/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr +++ b/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr @@ -164,9 +164,7 @@ def plot_deviation(percentile, snr_grid, y, ax, style): # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser( - description=__doc__, version=__version__ -) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__) parser.add_argument( "-t", "--trig-file", diff --git a/bin/pygrb/pycbc_pygrb_plot_injs_results b/bin/pygrb/pycbc_pygrb_plot_injs_results index 1802ada4966..7a67d1dfab3 100644 --- a/bin/pygrb/pycbc_pygrb_plot_injs_results +++ b/bin/pygrb/pycbc_pygrb_plot_injs_results @@ -170,8 +170,7 @@ def load_data(input_file_handle, keys, tag): # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__, - version=__version__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__) parser.add_argument("--found-missed-file", help="The hdf injection results file", required=True) parser.add_argument("--trig-file", diff --git a/bin/pygrb/pycbc_pygrb_plot_null_stats b/bin/pygrb/pycbc_pygrb_plot_null_stats index 5652ce8dab5..187b366962d 100644 --- a/bin/pygrb/pycbc_pygrb_plot_null_stats +++ b/bin/pygrb/pycbc_pygrb_plot_null_stats @@ -127,8 +127,7 @@ def calculate_contours(opts, new_snrs=None): # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__, - version=__version__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__) parser.add_argument("-t", "--trig-file", action="store", default=None, required=True, help="The location of the trigger file") diff --git a/bin/pygrb/pycbc_pygrb_plot_skygrid b/bin/pygrb/pycbc_pygrb_plot_skygrid index 4cc37b76832..435f37f1d20 100644 --- a/bin/pygrb/pycbc_pygrb_plot_skygrid +++ b/bin/pygrb/pycbc_pygrb_plot_skygrid @@ -58,8 +58,7 @@ def load_data(input_file, ifos, vetoes, injections=False): # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__, - version=__version__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__) parser.add_argument("-t", "--trig-file", action="store", default=None, required=True, help="The location of the trigger file") diff --git a/bin/pygrb/pycbc_pygrb_plot_snr_timeseries b/bin/pygrb/pycbc_pygrb_plot_snr_timeseries index 75b567d6483..7fe26f270f3 100644 --- a/bin/pygrb/pycbc_pygrb_plot_snr_timeseries +++ b/bin/pygrb/pycbc_pygrb_plot_snr_timeseries @@ -95,8 +95,7 @@ def reset_times(data_time, trig_time): # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__, - version=__version__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__) parser.add_argument("-t", "--trig-file", action="store", default=None, required=True, help="The location of the trigger file") diff --git a/bin/pygrb/pycbc_pygrb_plot_stats_distribution b/bin/pygrb/pycbc_pygrb_plot_stats_distribution index bca30d940ca..827b8467eaa 100644 --- a/bin/pygrb/pycbc_pygrb_plot_stats_distribution +++ b/bin/pygrb/pycbc_pygrb_plot_stats_distribution @@ -47,8 +47,7 @@ __program__ = "pycbc_pygrb_plot_stats_distribution" # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__, - version=__version__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__) parser.add_argument("-F", "--trig-file", action="store", required=True, help="Location of off-source trigger file") parser.add_argument("-x", "--x-variable", required=True, diff --git a/bin/pygrb/pycbc_pygrb_pp_workflow b/bin/pygrb/pycbc_pygrb_pp_workflow index fc14632d79d..73620850291 100644 --- a/bin/pygrb/pycbc_pygrb_pp_workflow +++ b/bin/pygrb/pycbc_pygrb_pp_workflow @@ -49,7 +49,6 @@ __program__ = "pycbc_pygrb_pp_workflow" # Use the standard workflow command-line parsing routines. parser = argparse.ArgumentParser(description=__doc__[1:]) pycbc.add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=__version__) parser.add_argument("-t", "--trig-files", action="store", required=True, nargs="+", help="The locations of the trigger files " diff --git a/bin/workflows/pycbc_make_bank_verifier_workflow b/bin/workflows/pycbc_make_bank_verifier_workflow index 27349611f70..8f5b6808477 100644 --- a/bin/workflows/pycbc_make_bank_verifier_workflow +++ b/bin/workflows/pycbc_make_bank_verifier_workflow @@ -156,7 +156,6 @@ class BanksimTablePointInjsExecutable(wf.Executable): _desc = __doc__[1:] parser = argparse.ArgumentParser(description=_desc) add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=__version__) wf.add_workflow_command_line_group(parser) wf.add_workflow_settings_cli(parser) args = parser.parse_args() diff --git a/bin/workflows/pycbc_make_faithsim_workflow b/bin/workflows/pycbc_make_faithsim_workflow index 37aae89a3a5..9bcb6a13ed0 100755 --- a/bin/workflows/pycbc_make_faithsim_workflow +++ b/bin/workflows/pycbc_make_faithsim_workflow @@ -18,9 +18,6 @@ from pycbc import add_common_pycbc_options, init_logging from pycbc.workflow.plotting import PlotExecutable from pycbc.workflow import setup_splittable_dax_generated -__version__ = pycbc.version.git_verbose_msg - - def make_faithsim_plot(workflow, analysis_time, input_file, out_dir, tags=None): tags = [] if tags is None else tags secs = workflow.cp.get_subsections("pycbc_faithsim_plots") @@ -100,7 +97,6 @@ class CollectResultsExecutable(wf.Executable): parser = argparse.ArgumentParser(description=__doc__) add_common_pycbc_options(parser) -parser.add_argument("--version", action="version", version=__version__) wf.add_workflow_command_line_group(parser) wf.add_workflow_settings_cli(parser) args = parser.parse_args() diff --git a/bin/workflows/pycbc_make_inference_inj_workflow b/bin/workflows/pycbc_make_inference_inj_workflow index a0cee1d1a94..ffb6a47167a 100644 --- a/bin/workflows/pycbc_make_inference_inj_workflow +++ b/bin/workflows/pycbc_make_inference_inj_workflow @@ -23,11 +23,9 @@ import logging import os import shlex import numpy -import pycbc.version import socket import sys -from pycbc import __version__ from pycbc import results, init_logging, add_common_pycbc_options from pycbc.results import layout from pycbc.results import metadata @@ -91,9 +89,6 @@ core.add_workflow_settings_cli(parser, include_subdax_opts=True) parser.add_argument("--seed", type=int, default=0, help="Starting to seed to use. This will be incremented " "one for each injection analyzed. Default is 0.") -# version option -parser.add_argument("--version", action="version", version=__version__, - help="Prints version information.") # parser command line opts = parser.parse_args() diff --git a/bin/workflows/pycbc_make_inference_plots_workflow b/bin/workflows/pycbc_make_inference_plots_workflow index a74b6afe0b8..5ce1e74dccf 100644 --- a/bin/workflows/pycbc_make_inference_plots_workflow +++ b/bin/workflows/pycbc_make_inference_plots_workflow @@ -41,7 +41,6 @@ from pycbc.workflow import core from pycbc.workflow import datafind from pycbc.workflow import plotting from pycbc.workflow import versioning -from pycbc import __version__ import pycbc.workflow.inference_followups as inffu @@ -134,8 +133,6 @@ add_common_pycbc_options(parser) configuration.add_workflow_command_line_group(parser) # workflow options core.add_workflow_settings_cli(parser, include_subdax_opts=True) -parser.add_argument("--version", action="version", version=__version__, - help="Prints version information.") opts = parser.parse_args() posterior_file_dir = 'posterior_files' diff --git a/bin/workflows/pycbc_make_inference_workflow b/bin/workflows/pycbc_make_inference_workflow index 9139c3cc0f3..4806a6365e3 100644 --- a/bin/workflows/pycbc_make_inference_workflow +++ b/bin/workflows/pycbc_make_inference_workflow @@ -40,7 +40,6 @@ from pycbc.workflow import core from pycbc.workflow import datafind from pycbc.workflow import plotting from pycbc.workflow import versioning -from pycbc import __version__ import pycbc.workflow.inference_followups as inffu from pycbc.workflow.jobsetup import PycbcInferenceExecutable @@ -162,9 +161,6 @@ parser.add_argument("--seed", type=int, default=0, help="Seed to use for inference job(s). If multiple " "events are analyzed, the seed will be incremented " "by one for each event.") -# version option -parser.add_argument("--version", action="version", version=__version__, - help="Prints version information.") # parser command line diff --git a/bin/workflows/pycbc_make_offline_search_workflow b/bin/workflows/pycbc_make_offline_search_workflow index 50cc7dd8c01..2d6a2aef729 100755 --- a/bin/workflows/pycbc_make_offline_search_workflow +++ b/bin/workflows/pycbc_make_offline_search_workflow @@ -22,10 +22,6 @@ finding and ranking then generate post-processing and plots. """ import pycbc -import pycbc.version -__version__ = pycbc.version.git_verbose_msg -__date__ = pycbc.version.date -__program__ = "pycbc_offline" import sys import socket @@ -153,7 +149,6 @@ def check_stop(job_name, container, workflow, finalize_workflow): parser = argparse.ArgumentParser(description=__doc__[1:]) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=__version__) wf.add_workflow_command_line_group(parser) wf.add_workflow_settings_cli(parser) args = parser.parse_args() diff --git a/bin/workflows/pycbc_make_psd_estimation_workflow b/bin/workflows/pycbc_make_psd_estimation_workflow index 78ad36c24d6..7a5dc74861a 100644 --- a/bin/workflows/pycbc_make_psd_estimation_workflow +++ b/bin/workflows/pycbc_make_psd_estimation_workflow @@ -29,15 +29,12 @@ from ligo import segments as _segments import lal import pycbc -import pycbc.version import pycbc.workflow from pycbc.results import save_fig_with_metadata, two_column_layout import pycbc.workflow parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', - version=pycbc.version.git_verbose_msg) pycbc.workflow.add_workflow_command_line_group(parser) pycbc.workflow.add_workflow_settings_cli(parser) args = parser.parse_args() diff --git a/bin/workflows/pycbc_make_sbank_workflow b/bin/workflows/pycbc_make_sbank_workflow index d413b5cb86f..70133647625 100644 --- a/bin/workflows/pycbc_make_sbank_workflow +++ b/bin/workflows/pycbc_make_sbank_workflow @@ -28,16 +28,9 @@ import os import argparse import pycbc -import pycbc.version import pycbc.workflow as wf import pycbc.workflow.pegasus_workflow as pwf -# Boiler-plate stuff -__author__ = "Ian Harry " -__version__ = pycbc.version.git_verbose_msg -__date__ = pycbc.version.date -__program__ = "pycbc_make_sbank_workflow" - # We define classes for all executables used in the workflow class SbankExecutable(wf.Executable): @@ -171,7 +164,6 @@ class CombineHDFBanksExecutable(wf.Executable): _desc = __doc__[1:] parser = argparse.ArgumentParser(description=_desc) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=__version__) parser.add_argument("--output-file", type=str, default=None, help="Specify the output file name. Either a name can be " "provided or a full path to file. Is this is not " diff --git a/bin/workflows/pycbc_make_uberbank_workflow b/bin/workflows/pycbc_make_uberbank_workflow index c0498971706..8d33187ca0c 100644 --- a/bin/workflows/pycbc_make_uberbank_workflow +++ b/bin/workflows/pycbc_make_uberbank_workflow @@ -142,7 +142,6 @@ class SbankDaxGenerator(wf.Executable): _desc = __doc__[1:] parser = argparse.ArgumentParser(description=_desc) pycbc.add_common_pycbc_options(parser) -parser.add_argument('--version', action='version', version=__version__) wf.add_workflow_command_line_group(parser) wf.add_workflow_settings_cli(parser) args = parser.parse_args() diff --git a/pycbc/__init__.py b/pycbc/__init__.py index 3b6f6f1e8f2..02d8b95e961 100644 --- a/pycbc/__init__.py +++ b/pycbc/__init__.py @@ -39,9 +39,11 @@ # before version.py has been generated. from .version import git_hash from .version import version as pycbc_version + from .version import PyCBCVersionAction except: git_hash = 'none' pycbc_version = 'none' + PyCBCVersionAction = None __version__ = pycbc_version @@ -79,12 +81,23 @@ def add_common_pycbc_options(parser): title="PyCBC common options", description="Common options for PyCBC executables.", ) - group.add_argument('-v', '--verbose', action='count', default=0, - help='Add verbosity to logging. Adding the option ' - 'multiple times makes logging progressively ' - 'more verbose, e.g. --verbose or -v provides ' - 'logging at the info level, but -vv or ' - '--verbose --verbose provides debug logging.') + group.add_argument( + '-v', + '--verbose', + action='count', + default=0, + help=( + 'Add verbosity to logging. Adding the option ' + 'multiple times makes logging progressively ' + 'more verbose, e.g. --verbose or -v provides ' + 'logging at the info level, but -vv or ' + '--verbose --verbose provides debug logging.' + ) + ) + group.add_argument( + '--version', + action=PyCBCVersionAction, + ) def init_logging(verbose=False, default_level=0, to_file=None, diff --git a/pycbc/_version.py b/pycbc/_version.py index cdb4e915a3d..a68d1981160 100644 --- a/pycbc/_version.py +++ b/pycbc/_version.py @@ -75,40 +75,82 @@ def get_lal_info(module, lib_glob): return version_str -class Version(argparse.Action): - """Subclass of argparse.Action that prints version information for PyCBC, - LAL and LALSimulation. +class PyCBCVersionAction(argparse._StoreAction): + """Subclass of argparse._StoreAction that prints version information for + PyCBC, and for LAL and LALSimulation depending on an integer variable. + Can be supplied without the option """ - def __init__(self, nargs=0, **kw): - super(Version, self).__init__(nargs=nargs, **kw) - - def __call__(self, parser, namespace, values, option_string=None): - import pycbc + default_help = ( + 'Display PyCBC version information and exit. ' + 'Can optionally supply a modifier integer to control the ' + 'verbosity of the version information. 0 and 1 are the ' + 'same as --version; 2 provides more detailed PyCBC library ' + 'information; 3 provides information about PyCBC, ' + 'LAL and LALSimulation packages (if installed)' + ) - version_str = ( - "--- PyCBC Version --------------------------\n" + - pycbc.version.git_verbose_msg + - "\n\nImported from: " + inspect.getfile(pycbc) + def __init__(self, + option_strings, + dest, + help=default_help, + **kw): + argparse._StoreAction.__init__( + self, + option_strings, + dest=dest, + nargs='?', + help=help, + type=int, + **kw, ) - version_str += "\n\n--- LAL Version ----------------------------\n" - try: - import lal.git_version - except ImportError: - version_str += "\nLAL not installed in environment\n" - else: - version_str += get_lal_info(lal, '_lal*.so') + def __call__(self, parser, namespace, values, option_string=None): + version_no = 0 if values is None else values + import pycbc + setattr(namespace, self.dest, version_no) + if version_no <= 1: + # --version called with zero or default - return the + # simple version string + version_str = "PyCBC version: " + pycbc.version.version + if version_no > 1: + # --version with flag above 1 - return the verbose version string + version_str = ( + "--- PyCBC Version --------------------------\n" + + pycbc.version.git_verbose_msg + ) + if version_no > 2: + # --version called more than twice - print all version information + # possible + import __main__ + version_str += ( + "\n\nCurrent Executable: " + __main__.__file__ + + "\nImported from: " + inspect.getfile(pycbc) + + "\n\n--- LAL Version ----------------------------\n" + ) - version_str += "\n\n--- LALSimulation Version-------------------\n" - try: - import lalsimulation.git_version - except ImportError: - version_str += "\nLALSimulation not installed in environment\n" - else: - version_str += get_lal_info(lalsimulation, '_lalsimulation*.so') + try: + import lal.git_version + except ImportError: + version_str += "\nLAL not installed in environment\n" + else: + version_str += get_lal_info( + lal, + '_lal*.so' + ) + + version_str += "\n\n--- LALSimulation Version-------------------\n" + try: + import lalsimulation.git_version + except ImportError: + version_str += "\nLALSimulation not installed in environment\n" + else: + version_str += get_lal_info( + lalsimulation, + '_lalsimulation*.so' + ) print(version_str) sys.exit(0) -__all__ = ['Version'] +__all__ = ['PyCBCVersionAction'] diff --git a/pycbc/results/pygrb_postprocessing_utils.py b/pycbc/results/pygrb_postprocessing_utils.py index 638cee6fe14..8f9fd82fb0b 100644 --- a/pycbc/results/pygrb_postprocessing_utils.py +++ b/pycbc/results/pygrb_postprocessing_utils.py @@ -56,14 +56,13 @@ # * Add to the parser object the arguments used for BestNR calculation # * Add to the parser object the arguments for found/missed injection files # ============================================================================= -def pygrb_initialize_plot_parser(description=None, version=None): +def pygrb_initialize_plot_parser(description=None): """Sets up a basic argument parser object for PyGRB plotting scripts""" formatter_class = argparse.ArgumentDefaultsHelpFormatter parser = argparse.ArgumentParser(description=description, formatter_class=formatter_class) add_common_pycbc_options(parser) - parser.add_argument("--version", action="version", version=version) parser.add_argument("-o", "--output-file", default=None, help="Output file.") parser.add_argument("--x-lims", action="store", default=None, diff --git a/tools/pycbc_test_suite.sh b/tools/pycbc_test_suite.sh index 6e1f745194e..3d220599b0c 100755 --- a/tools/pycbc_test_suite.sh +++ b/tools/pycbc_test_suite.sh @@ -10,78 +10,69 @@ echo -e "\\n>> [`date`] Python Minor Version:" $PYTHON_MINOR_VERSION LOG_FILE=$(mktemp -t pycbc-test-log.XXXXXXXXXX) RESULT=0 +cat_output=true + +function test_result { + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + if $cat_output ; then + echo -e "---------------------------------------------------------" + cat $LOG_FILE + echo -e "---------------------------------------------------------" + fi + else + echo -e " Pass" + fi +} if [ "$PYCBC_TEST_TYPE" = "unittest" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then for prog in `find test -name '*.py' -print | egrep -v '(long|lalsim|test_waveform)'` do echo -e ">> [`date`] running unit test for $prog" python $prog &> $LOG_FILE - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - cat $LOG_FILE - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi + test_result done fi if [ "$PYCBC_TEST_TYPE" = "help" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then # check that all executables that do not require # special environments can return a help message - for prog in `find ${PATH//:/ } -maxdepth 1 -name 'pycbc*' -print 2>/dev/null | egrep -v '(pycbc_live_nagios_monitor|pycbc_make_offline_grb_workflow|pycbc_mvsc_get_features|pycbc_upload_xml_to_gracedb|pycbc_coinc_time)' | sort | uniq` + for prog in `find ${PATH//:/ } -maxdepth 1 -name 'pycbc*' -print 2>/dev/null | egrep -v '(pycbc_live_nagios_monitor|pycbc_mvsc_get_features|pycbc_coinc_time)' | sort | uniq` do echo -e ">> [`date`] running $prog --help" $prog --help &> $LOG_FILE - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - cat $LOG_FILE - echo -e "---------------------------------------------------------" - else - echo -e " Pass." + test_result + if [[ `echo $prog | egrep '(pycbc_copy_output_map|pycbc_submit_dax|pycbc_stageout_failed_workflow)'` ]] ; then + continue fi + echo -e ">> [`date`] running $prog --version" + $prog --version &> $LOG_FILE + test_result + done + # also check that --version with increased modifiers works for one executable + echo -e ">> [`date`] running pycbc_inspiral --version with modifiers" + for modifier in "" 0 1 2 3 + do + echo -e ">> [`date`] running pycbc_inspiral --version ${modifier}" + pycbc_inspiral --version ${modifier} &> $LOG_FILE + test_result done - # also check that --version works for one executable - echo -e ">> [`date`] running pycbc_inspiral --version" - pycbc_inspiral --version &> $LOG_FILE - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - cat $LOG_FILE - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi fi +cat_output=false + if [ "$PYCBC_TEST_TYPE" = "search" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then # run pycbc inspiral test pushd examples/inspiral bash -e run.sh - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi + test_result popd # run a quick bank placement example pushd examples/tmpltbank bash -e testNonspin2.sh - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi + test_result popd # run PyCBC Live test @@ -90,26 +81,14 @@ if [ "$PYCBC_TEST_TYPE" = "search" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then # broken by a new release of python-ligo-lw pushd examples/live bash -e run.sh - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi + test_result popd fi # run pycbc_multi_inspiral (PyGRB) test pushd examples/multi_inspiral bash -e run.sh - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi + test_result popd fi @@ -118,97 +97,49 @@ if [ "$PYCBC_TEST_TYPE" = "inference" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then ## Run inference on 2D-normal analytic likelihood function pushd examples/inference/analytic-normal2d bash -e run.sh - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi + test_result popd ## Run inference on BBH example; this will also run ## a test of create_injections pushd examples/inference/bbh-injection bash -e make_injection.sh - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi + test_result # now run inference bash -e run_test.sh - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi + test_result popd ## Run inference on GW150914 data pushd examples/inference/gw150914 bash -e run_test.sh - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi + test_result popd ## Run inference using single template model pushd examples/inference/single bash -e get.sh bash -e run.sh - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi + test_result popd ## Run inference using relative model pushd examples/inference/relative bash -e get.sh bash -e run.sh - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi + test_result popd ## Run inference using the hierarchical model pushd examples/inference/hierarchical bash -e run_test.sh - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi + test_result popd ## Run inference samplers pushd examples/inference/samplers bash -e run.sh - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi + test_result popd ## Run pycbc_make_skymap example @@ -217,13 +148,7 @@ if [ "$PYCBC_TEST_TYPE" = "inference" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then # broken by a new release of python-ligo-lw pushd examples/make_skymap bash -e simulated_data.sh - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - else - echo -e " Pass." - fi + test_result popd fi fi @@ -232,11 +157,7 @@ if [ "$PYCBC_TEST_TYPE" = "docs" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then echo -e "\\n>> [`date`] Building documentation" python setup.py build_gh_pages - if test $? -ne 0 ; then - echo -e " FAILED!" - echo -e "---------------------------------------------------------" - RESULT=1 - fi + test_result fi exit ${RESULT} From 4553ed70f063b872f0157b5a416d743fb8916635 Mon Sep 17 00:00:00 2001 From: Yifan Wang Date: Sun, 14 Jul 2024 01:22:25 +0200 Subject: [PATCH 02/35] fix a memory leakage bug in pycbc_brute_bank (#4814) * fix the memory leakage bug * typo --- bin/bank/pycbc_brute_bank | 1 + pycbc/pool.py | 14 +++++++++++++- 2 files changed, 14 insertions(+), 1 deletion(-) mode change 100644 => 100755 bin/bank/pycbc_brute_bank diff --git a/bin/bank/pycbc_brute_bank b/bin/bank/pycbc_brute_bank old mode 100644 new mode 100755 index 58fdea4c5a4..61626e48832 --- a/bin/bank/pycbc_brute_bank +++ b/bin/bank/pycbc_brute_bank @@ -259,6 +259,7 @@ class TriangleBank(object): ({k: params[k][idx] for k in params} for idx in range(total_num)) ): waveform_cache += [return_wf] + pool.close_pool() del pool for hp in waveform_cache: diff --git a/pycbc/pool.py b/pycbc/pool.py index e3606b151cb..a770b9537ec 100644 --- a/pycbc/pool.py +++ b/pycbc/pool.py @@ -91,7 +91,7 @@ def allmap(self, fcn, args): return results def map(self, func, items, chunksize=None): - """ Catch keyboard interuppts to allow the pool to exit cleanly. + """ Catch keyboard interrupts to allow the pool to exit cleanly. Parameters ---------- @@ -113,6 +113,13 @@ def map(self, func, items, chunksize=None): self.join() raise KeyboardInterrupt + def close_pool(self): + """ Close the pool and remove the reference + """ + self.close() + self.join() + atexit.unregister(_shutdown_pool) + def _dummy_broadcast(self, f, args): self.map(f, [args] * self.size) @@ -130,6 +137,11 @@ def map(self, f, items): imap = map imap_unordered = map + def close_pool(self): + ''' Dummy function to be consistent with BroadcastPool + ''' + pass + def use_mpi(require_mpi=False, log=True): """ Get whether MPI is enabled and if so the current size and rank """ From e1d273485105e229cf1ad51c3a9cc3b99b81c822 Mon Sep 17 00:00:00 2001 From: Marco Cusinato Date: Thu, 18 Jul 2024 14:15:17 +0200 Subject: [PATCH 03/35] Update pycbc_pygrb_efficiency (#4812) --- bin/pygrb/pycbc_pygrb_efficiency | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bin/pygrb/pycbc_pygrb_efficiency b/bin/pygrb/pycbc_pygrb_efficiency index 3f18a8f4703..ebb40b6eda8 100644 --- a/bin/pygrb/pycbc_pygrb_efficiency +++ b/bin/pygrb/pycbc_pygrb_efficiency @@ -551,7 +551,7 @@ ax.plot(dist_plot_vals, (fraction_no_mc), 'g-', ax.errorbar(dist_plot_vals, (fraction_no_mc), yerr=[yerr_low_no_mc, yerr_high_no_mc], c='green') marg_eff = fraction_mc -if not np.isnan(marg_eff.sum()): +if np.nansum(marg_eff) > 0: ax.plot(dist_plot_vals, marg_eff, 'r-', label='Marginalised') ax.errorbar(dist_plot_vals, marg_eff, yerr=[yerr_low_mc, yerr_high_mc], c='red') @@ -624,10 +624,10 @@ ax.plot(dist_plot_vals, (fraction_no_mc), 'g-', ax.errorbar(dist_plot_vals, (fraction_no_mc), yerr=[yerr_low_no_mc, yerr_high_no_mc], c='green') marg_eff = fraction_mc -if not np.isnan(marg_eff.sum()): +if not np.nansum(marg_eff) > 0: ax.plot(dist_plot_vals, marg_eff, 'r-', label='Marginalised') ax.errorbar(dist_plot_vals, marg_eff, yerr=[yerr_low, yerr_high], c='red') -if not np.isnan(red_efficiency.sum()): +if not np.nansum(red_efficiency) > 0: ax.plot(dist_plot_vals, red_efficiency, 'm-', label='Inc. counting errors') ax.set_ylim([0, 1]) From 50a5caf4e4f4fa6581b66bea23e6b02a2b33b143 Mon Sep 17 00:00:00 2001 From: Francesco Pannarale Date: Sun, 21 Jul 2024 16:02:35 +0200 Subject: [PATCH 04/35] PyGRB trigger clustering with short timeslides (#4820) * grb_trig_cluster now works for timeslides * Fixes for PR --- bin/pygrb/pycbc_grb_trig_cluster | 196 +++++++++++++++++++------------ 1 file changed, 118 insertions(+), 78 deletions(-) diff --git a/bin/pygrb/pycbc_grb_trig_cluster b/bin/pygrb/pycbc_grb_trig_cluster index c0fc0937474..5f0f1007256 100644 --- a/bin/pygrb/pycbc_grb_trig_cluster +++ b/bin/pygrb/pycbc_grb_trig_cluster @@ -168,105 +168,145 @@ outfile = os.path.join( ), ) -# -- generate clustering bins ------------------- +# this list contains the indexing of clusters from all slides +all_clusters = [] -nbins = int((end - start) // win + 1) -bins = [[] for i in range(nbins)] -loudsnr = numpy.zeros(nbins) -loudtime = numpy.zeros(nbins) -clusters = [] - -# -- cluster ------------------------------------ +# load necessary information from all triggers with HFile(args.trig_file, "r") as h5f: - time = h5f["network"]["end_time_gc"][()] - snr = h5f["network"][args.rank_column][()] + all_times = h5f["network/end_time_gc"][()] + all_snrs = h5f[f"network/{args.rank_column}"][()] + slide_ids = h5f["network/slide_id"][()] # empty file (no triggers), so just copy the file -if not time.size: +if not all_times.size: shutil.copyfile(args.trig_file, outfile) msg = "trigger file is empty\n" msg += "copied input file to {}".format(outfile) logging.info(msg) sys.exit(0) -# find loudest trigger in each bin -for i in tqdm.tqdm(range(time.size), desc="Initialising bins", - disable=not args.verbose, total=time.size, unit='triggers', - **TQDM_KW): - t, s = time[i], snr[i] - idx = int(float(t - start) // win) - bins[idx].append(i) - if s > loudsnr[idx]: - loudsnr[idx] = s - loudtime[idx] = t - -prev = -1 -nxt_ = 1 -first = True -last = False -add_cluster = clusters.append -nclusters = 0 - -# cluster -bar = tqdm.tqdm(bins, desc="Clustering bins", - disable=not args.verbose, total=nbins, unit='bins', - postfix=dict(nclusters=0), **TQDM_KW) -for i, bin_ in enumerate(bar): - if not bin_: # empty - continue - - for idx in bin_: - t, s = time[idx], snr[idx] - - if s < loudsnr[i]: # not loudest in own bin +# -- cluster ------------------------------------ + +unique_slide_ids = numpy.unique(slide_ids) +max_slide_id = max(unique_slide_ids) +msg = 'Clustering '+str(len(slide_ids))+' triggers from ' +msg += str(len(unique_slide_ids))+' slides' +logging.info(msg) + +for slide_id in unique_slide_ids: + # indices to slice current slide + slide_id_pos = numpy.where(slide_ids == slide_id)[0] + # all time and snr values for the current slide + time = all_times[slide_id_pos] + snr = all_snrs[slide_id_pos] + + # generate clustering bins + nbins = int((end - start) // win + 1) + bins = [[] for i in range(nbins)] + loudsnr = numpy.zeros(nbins) + loudtime = numpy.zeros(nbins) + # list to index clusters for current slide + clusters = [] + + # find loudest trigger in each bin, for the current slide + for i in tqdm.tqdm(range(time.size), + desc="Initialising bins", + disable=not args.verbose, + total=time.size, + unit='triggers', + **TQDM_KW): + t, s = time[i], snr[i] + idx = int(float(t - start) // win) + bins[idx].append(i) + if s > loudsnr[idx]: + loudsnr[idx] = s + loudtime[idx] = t + + prev = -1 + nxt_ = 1 + first = True + last = False + add_cluster = clusters.append + nclusters = 0 + + # cluster + bar = tqdm.tqdm(bins, + desc="Clustering bins", + disable=not args.verbose, + total=nbins, + unit='bins', + postfix=dict(nclusters=0), + **TQDM_KW) + for i, bin_ in enumerate(bar): + if not bin_: # empty continue - # check loudest event in previous bin - if not first: - prevt = loudtime[prev] - if prevt and abs(prevt - t) < win and s < loudsnr[prev]: - continue + for idx in bin_: + t, s = time[idx], snr[idx] - # check loudest event in next bin - if not last: - nextt = loudtime[nxt_] - if nextt and abs(nextt - t) < win and s < loudsnr[nxt_]: + if s < loudsnr[i]: # not loudest in own bin continue - loudest = True - - # check all events in previous bin - if not first and prevt and abs(prevt - t) < win: - for id2 in bins[prev]: - if abs(time[id2] - t) < win and s < snr[id2]: - loudest = False - break - - # check all events in next bin - if loudest and not last and nextt and abs(nextt - t) < win: - for id2 in bins[nxt_]: - if abs(time[id2] - t) < win and s < snr[id2]: - loudest = False - break - - # this is loudest in its vicinity, keep it - if loudest: - add_cluster(idx) - nclusters += 1 - bar.set_postfix(nclusters=nclusters) + # check loudest event in previous bin + if not first: + prevt = loudtime[prev] + if prevt and abs(prevt - t) < win and s < loudsnr[prev]: + continue + + # check loudest event in next bin + if not last: + nextt = loudtime[nxt_] + if nextt and abs(nextt - t) < win and s < loudsnr[nxt_]: + continue + + loudest = True + + # check all events in previous bin + if not first and prevt and abs(prevt - t) < win: + for id2 in bins[prev]: + if abs(time[id2] - t) < win and s < snr[id2]: + loudest = False + break + + # check all events in next bin + if loudest and not last and nextt and abs(nextt - t) < win: + for id2 in bins[nxt_]: + if abs(time[id2] - t) < win and s < snr[id2]: + loudest = False + break + + # this is loudest in its vicinity, keep it + if loudest: + add_cluster(idx) + nclusters += 1 + bar.set_postfix(nclusters=nclusters) + + # update things for next time + first = False + last = i == nbins - 1 + prev += 1 + nxt_ += 1 + + bar.update() + + # clusters is the indexing array for a specific slide_id + # all_clusters is the (absolute) indexing of all clustered triggers + # so look up the indices [clusters] within the absolute indexing array + # slide_id_pos which is built at each slide_id + all_clusters += list(slide_id_pos[clusters]) + msg = 'Slide '+str(slide_id)+'/'+str(max_slide_id) + msg += ' has '+str(len(slide_id_pos)) + msg += ' trigers that were clustered to '+str(len(clusters)) + logging.info(msg) - # update things for next time - first = False - last = i == nbins - 1 - prev += 1 - nxt_ += 1 +logging.info('Total clustered triggers: '+str(len(all_clusters))) - bar.update() +# -- write output -------------------------------- slice_hdf5( args.trig_file, outfile, - numpy.asarray(clusters), + numpy.asarray(all_clusters), verbose=args.verbose, ) From 3fca67fa79bbcc3516274e8f14638f7d7c4943ae Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Mon, 22 Jul 2024 11:20:07 +0100 Subject: [PATCH 05/35] Use singles FAR calculation properly in the example search (#4819) --- examples/search/analysis.ini | 3 +++ 1 file changed, 3 insertions(+) diff --git a/examples/search/analysis.ini b/examples/search/analysis.ini index 2bed5df7b2c..4361ef1eb2c 100644 --- a/examples/search/analysis.ini +++ b/examples/search/analysis.ini @@ -217,6 +217,9 @@ fit-threshold = ${sngls_statmap|fit-threshold} [combine_statmap] cluster-window = ${statmap|cluster-window} +far-calculation-method = ${sngls_statmap|far-calculation-method} +fit-function = ${sngls_statmap|fit-function} +fit-threshold = ${sngls_statmap|fit-threshold} [combine_statmap-full_data] max-hierarchical-removal = ${workflow-results|max-hierarchical-removal} From f756e18fedfb31b7a5094add7d9c010f72bf5b68 Mon Sep 17 00:00:00 2001 From: Alex Nitz Date: Mon, 22 Jul 2024 21:58:34 +0100 Subject: [PATCH 06/35] =?UTF-8?q?add=20ability=20to=20cache=20detector=20r?= =?UTF-8?q?esponse=20to=20marginalize=20time=20model=20and=20us=E2=80=A6?= =?UTF-8?q?=20(#4806)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add ability to cache detect response to marginalize time model and use alternate sampler rate than data * cc * update * update example * update example --- examples/inference/margtime/margtime.ini | 5 ++- examples/inference/margtime/run.sh | 3 +- .../models/marginalized_gaussian_noise.py | 43 ++++++++++++++----- 3 files changed, 39 insertions(+), 12 deletions(-) diff --git a/examples/inference/margtime/margtime.ini b/examples/inference/margtime/margtime.ini index ce2bf7bebbd..fd17c0e7e5a 100644 --- a/examples/inference/margtime/margtime.ini +++ b/examples/inference/margtime/margtime.ini @@ -3,8 +3,11 @@ name = marginalized_time low-frequency-cutoff = 30.0 +# This is the sample rate used for the model and marginalization +sample_rate = 4096 + marginalize_vector_params = tc, ra, dec, polarization -marginalize_vector_samples = 500 +marginalize_vector_samples = 2000 ; You shouldn't use phase marginalization if the approximant has ; higher-order modes diff --git a/examples/inference/margtime/run.sh b/examples/inference/margtime/run.sh index 383d348ef29..3d9a7f32a73 100644 --- a/examples/inference/margtime/run.sh +++ b/examples/inference/margtime/run.sh @@ -1,6 +1,6 @@ OMP_NUM_THREADS=1 pycbc_inference \ --config-file `dirname "$0"`/margtime.ini \ ---nprocesses 2 \ +--nprocesses 1 \ --processing-scheme mkl \ --output-file marg_150914.hdf \ --seed 0 \ @@ -23,4 +23,5 @@ pycbc_inference_plot_posterior \ "primary_mass(mass1, mass2) / (1 + redshift(distance)):srcmass1" \ "secondary_mass(mass1, mass2) / (1 + redshift(distance)):srcmass2" \ ra dec tc inclination coa_phase polarization distance \ +--vmin 23.2 \ --z-arg snr diff --git a/pycbc/inference/models/marginalized_gaussian_noise.py b/pycbc/inference/models/marginalized_gaussian_noise.py index 0a1342d0432..9052a5018ed 100644 --- a/pycbc/inference/models/marginalized_gaussian_noise.py +++ b/pycbc/inference/models/marginalized_gaussian_noise.py @@ -19,6 +19,7 @@ """ import itertools +import logging import numpy from scipy import special @@ -207,8 +208,10 @@ class MarginalizedTime(DistMarg, BaseGaussianNoise): def __init__(self, variable_params, data, low_frequency_cutoff, psds=None, high_frequency_cutoff=None, normalize=False, + sample_rate=None, **kwargs): + self.sample_rate = float(sample_rate) self.kwargs = kwargs variable_params, kwargs = self.setup_marginalization( variable_params, @@ -241,6 +244,14 @@ def __init__(self, variable_params, self.dets = {} + if sample_rate is not None: + for ifo in self.data: + if self.sample_rate < self.data[ifo].sample_rate: + raise ValueError("Model sample rate was set less than the" + " data. ") + logging.info("Using %s sample rate for marginalization", + sample_rate) + def _nowaveform_loglr(self): """Convenience function to set loglr values if no waveform generated. """ @@ -296,8 +307,15 @@ def _loglr(self): hp[self._kmin[det]:kmax] *= self._weight[det][slc] hc[self._kmin[det]:kmax] *= self._weight[det][slc] - hp.resize(len(self._whitened_data[det])) - hc.resize(len(self._whitened_data[det])) + # Use a higher sample rate if requested + if self.sample_rate is not None: + tlen = int(round(self.sample_rate * + self.whitened_data[det].duration)) + flen = tlen // 2 + 1 + hp.resize(flen) + hc.resize(flen) + self._whitened_data[det].resize(flen) + cplx_hpd[det], _, _ = matched_filter_core( hp, self._whitened_data[det], @@ -325,15 +343,20 @@ def _loglr(self): for det in wfs: if det not in self.dets: self.dets[det] = Detector(det) - fp, fc = self.dets[det].antenna_pattern( - params['ra'], - params['dec'], - params['polarization'], - params['tc']) - dt = self.dets[det].time_delay_from_earth_center(params['ra'], - params['dec'], - params['tc']) + + if self.precalc_antenna_factors: + fp, fc, dt = self.get_precalc_antenna_factors(det) + else: + fp, fc = self.dets[det].antenna_pattern( + params['ra'], + params['dec'], + params['polarization'], + params['tc']) + dt = self.dets[det].time_delay_from_earth_center(params['ra'], + params['dec'], + params['tc']) dtc = params['tc'] + dt + cplx_hd = fp * cplx_hpd[det].at_time(dtc, interpolate='quadratic') cplx_hd += fc * cplx_hcd[det].at_time(dtc, From 3d9f7e2590a6d06daedb0ba012d30f24ddb9c604 Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Tue, 23 Jul 2024 09:01:50 +0100 Subject: [PATCH 07/35] Add mechanism for re-loading the statistic files in live (#4816) * Add mechanism for re-loading the statistic files * CC fixes * CC 2 * Fix for/else statement - wrongly implemented --- bin/pycbc_live | 11 ++ pycbc/events/coinc.py | 82 ++++++++-- pycbc/events/single.py | 103 +++++++++++- pycbc/events/stat.py | 270 ++++++++++++++++++++++++++++++-- test/test_live_coinc_compare.py | 3 +- 5 files changed, 443 insertions(+), 26 deletions(-) diff --git a/bin/pycbc_live b/bin/pycbc_live index 2595adf44b4..1b45e22cbb4 100755 --- a/bin/pycbc_live +++ b/bin/pycbc_live @@ -1048,6 +1048,8 @@ parser.add_argument('--psd-variation', action='store_true', "values for each single detector triggers found by " "the search. Required when using a single detector " "ranking statistic that includes psd variation.") +parser.add_argument("--statistic-refresh-rate", type=float, + help="How often to refresh the statistic object") scheme.insert_processing_option_group(parser) LiveSingle.insert_args(parser) @@ -1166,6 +1168,8 @@ with ctx: if analyze_singles and evnt.rank == 0: sngl_estimator = {ifo: LiveSingle.from_cli(args, ifo) for ifo in evnt.trigg_ifos} + for estim in sngl_estimator.values(): + estim.start_refresh_thread() # Create double coincident background estimator # for every pair of triggering interferometers @@ -1186,6 +1190,10 @@ with ctx: setproctitle('PyCBC Live {} bg estimator'.format( ppdets(c.ifos, '-'))) + def estimator_refresh_threads(_): + c = estimators[my_coinc_id] + c.start_refresh_thread() + def get_coinc(results): c = estimators[my_coinc_id] r = c.add_singles(results) @@ -1200,6 +1208,7 @@ with ctx: coinc_pool = BroadcastPool(len(estimators)) coinc_pool.allmap(set_coinc_id, range(len(estimators))) + coinc_pool.broadcast(estimator_refresh_threads, None) logging.info('Starting') @@ -1428,3 +1437,5 @@ if evnt.rank == 1: if args.enable_profiling is not None and evnt.rank == args.enable_profiling: pr.dump_stats(f'profiling_rank_{evnt.rank:03d}') + +logging.info("Exiting as the end time has been reached") diff --git a/pycbc/events/coinc.py b/pycbc/events/coinc.py index 7bdf298a23b..2fcf4a48a30 100644 --- a/pycbc/events/coinc.py +++ b/pycbc/events/coinc.py @@ -25,9 +25,18 @@ coincident triggers. """ -import numpy, logging, pycbc.pnutils, copy +import numpy +import logging +import copy +from datetime import datetime as dt +import time as timemod +import threading + +import pycbc.pnutils from pycbc.detector import Detector, ppdets from pycbc import conversions as conv + +from . import stat as pycbcstat from .eventmgr_cython import coincbuffer_expireelements from .eventmgr_cython import coincbuffer_numgreater from .eventmgr_cython import timecoincidence_constructidxs @@ -829,6 +838,7 @@ def __init__(self, num_templates, analysis_block, background_statistic, ifar_limit=100, timeslide_interval=.035, coinc_window_pad=.002, + statistic_refresh_rate=None, return_background=False, **kwargs): """ @@ -856,6 +866,9 @@ def __init__(self, num_templates, analysis_block, background_statistic, coinc_window_pad: float Amount of time allowed to form a coincidence in addition to the time of flight in seconds. + statistic_refresh_rate: float + How regularly to run the update_files method on the statistic + class (in seconds), default not do do this return_background: boolean If true, background triggers will also be included in the file output. @@ -863,11 +876,10 @@ def __init__(self, num_templates, analysis_block, background_statistic, Additional options for the statistic to use. See stat.py for more details on statistic options. """ - from . import stat self.num_templates = num_templates self.analysis_block = analysis_block - stat_class = stat.get_statistic(background_statistic) + stat_class = pycbcstat.get_statistic(background_statistic) self.stat_calculator = stat_class( sngl_ranking, stat_files, @@ -875,6 +887,10 @@ def __init__(self, num_templates, analysis_block, background_statistic, **kwargs ) + self.time_stat_refreshed = dt.now() + self.stat_calculator_lock = threading.Lock() + self.statistic_refresh_rate = statistic_refresh_rate + self.timeslide_interval = timeslide_interval self.return_background = return_background self.coinc_window_pad = coinc_window_pad @@ -955,7 +971,6 @@ def pick_best_coinc(cls, coinc_results): @classmethod def from_cli(cls, args, num_templates, analysis_chunk, ifos): - from . import stat # Allow None inputs stat_files = args.statistic_files or [] @@ -964,7 +979,7 @@ def from_cli(cls, args, num_templates, analysis_chunk, ifos): # flatten the list of lists of filenames to a single list (may be empty) stat_files = sum(stat_files, []) - kwargs = stat.parse_statistic_keywords_opt(stat_keywords) + kwargs = pycbcstat.parse_statistic_keywords_opt(stat_keywords) return cls(num_templates, analysis_chunk, args.ranking_statistic, @@ -975,13 +990,13 @@ def from_cli(cls, args, num_templates, analysis_chunk, ifos): timeslide_interval=args.timeslide_interval, ifos=ifos, coinc_window_pad=args.coinc_window_pad, + statistic_refresh_rate=args.statistic_refresh_rate, **kwargs) @staticmethod def insert_args(parser): - from . import stat - stat.insert_statistic_option_group(parser) + pycbcstat.insert_statistic_option_group(parser) group = parser.add_argument_group('Coincident Background Estimation') group.add_argument('--store-background', action='store_true', @@ -1374,11 +1389,12 @@ def add_singles(self, results): valid_ifos = [k for k in results.keys() if results[k] and k in self.ifos] if len(valid_ifos) == 0: return {} - # Add single triggers to the internal buffer - self._add_singles_to_buffer(results, ifos=valid_ifos) + with self.stat_calculator_lock: + # Add single triggers to the internal buffer + self._add_singles_to_buffer(results, ifos=valid_ifos) - # Calculate zerolag and background coincidences - _, coinc_results = self._find_coincs(results, valid_ifos=valid_ifos) + # Calculate zerolag and background coincidences + _, coinc_results = self._find_coincs(results, valid_ifos=valid_ifos) # record if a coinc is possible in this chunk if len(valid_ifos) == 2: @@ -1386,6 +1402,50 @@ def add_singles(self, results): return coinc_results + def start_refresh_thread(self): + """ + Start a thread managing whether the stat_calculator will be updated + """ + thread = threading.Thread( + target=self.refresh_statistic, + daemon=True + ) + logger.info( + "Starting %s statistic refresh thread", + ''.join(self.ifos), + ) + thread.start() + + def refresh_statistic(self): + """ + Function to refresh the stat_calculator at regular intervals + """ + while True: + # How long since the statistic was last updated? + since_stat_refresh = \ + (dt.now() - self.time_stat_refreshed).total_seconds() + if since_stat_refresh > self.statistic_refresh_rate: + self.time_stat_refreshed = dt.now() + logger.info( + "Checking %s statistic for updated files", + ''.join(self.ifos), + ) + with self.stat_calculator_lock: + self.stat_calculator.check_update_files() + # Sleep one second for safety + timemod.sleep(1) + # Now include the time it took the check / update the statistic + since_stat_refresh = \ + (dt.now() - self.time_stat_refreshed).total_seconds() + logger.debug( + "%s statistic: Waiting %.3fs for next refresh", + ''.join(self.ifos), + self.statistic_refresh_rate - since_stat_refresh, + ) + timemod.sleep( + self.statistic_refresh_rate - since_stat_refresh + 1 + ) + __all__ = [ "background_bin_from_string", diff --git a/pycbc/events/single.py b/pycbc/events/single.py index 2df8fb8f4e6..e62b3d8379d 100644 --- a/pycbc/events/single.py +++ b/pycbc/events/single.py @@ -2,12 +2,15 @@ """ import logging import copy -import h5py +import threading +from datetime import datetime as dt +import time import numpy as np from pycbc.events import trigger_fits as fits, stat from pycbc.types import MultiDetOptionAction from pycbc import conversions as conv +from pycbc.io.hdf import HFile from pycbc import bin_utils logger = logging.getLogger('pycbc.events.single') @@ -25,13 +28,58 @@ def __init__(self, ifo, statistic=None, sngl_ranking=None, stat_files=None, + statistic_refresh_rate=None, **kwargs): + """ + Parameters + ---------- + ifo: str + Name of the ifo that is being analyzed + newsnr_threshold: float + Minimum value for the reweighted SNR of the event under + consideration. Which reweighted SNR is defined by sngl_ranking + reduced_chisq_threshold: float + Maximum value for the reduced chisquared of the event under + consideration + duration_threshold: float + Minimum value for the duration of the template which found the + event under consideration + fit_file: str or path + (optional) the file containing information about the + single-detector event significance distribution fits + sngl_ifar_est_dist: str + Which trigger distribution to use when calculating IFAR of + single-detector events + fixed_ifar: float + (optional) give a fixed IFAR value to any event which passes the + threshold criteria + statistic: str + The name of the statistic to rank events. + sngl_ranking: str + The single detector ranking to use with the background statistic + stat_files: list of strs + List of filenames that contain information used to construct + various coincident statistics. + maximum_ifar: float + The largest inverse false alarm rate in years that we would like to + calculate. + statistic_refresh_rate: float + How regularly to run the update_files method on the statistic + class (in seconds), default not do do this + kwargs: dict + Additional options for the statistic to use. See stat.py + for more details on statistic options. + """ self.ifo = ifo self.fit_file = fit_file self.sngl_ifar_est_dist = sngl_ifar_est_dist self.fixed_ifar = fixed_ifar self.maximum_ifar = maximum_ifar + self.time_stat_refreshed = dt.now() + self.stat_calculator_lock = threading.Lock() + self.statistic_refresh_rate = statistic_refresh_rate + stat_class = stat.get_statistic(statistic) self.stat_calculator = stat_class( sngl_ranking, @@ -188,6 +236,7 @@ def from_cli(cls, args, ifo): statistic=args.ranking_statistic, sngl_ranking=args.sngl_ranking, stat_files=stat_files, + statistic_refresh_rate=args.statistic_refresh_rate, **kwargs ) @@ -227,7 +276,9 @@ def check(self, trigs, data_reader): trigsc['chisq_dof'] = (cut_trigs['chisq_dof'] + 2) / 2 # Calculate the ranking reweighted SNR for cutting - single_rank = self.stat_calculator.get_sngl_ranking(trigsc) + with self.stat_calculator_lock: + single_rank = self.stat_calculator.get_sngl_ranking(trigsc) + sngl_idx = single_rank > self.thresholds['ranking'] if not np.any(sngl_idx): return None @@ -236,8 +287,9 @@ def check(self, trigs, data_reader): for k in trigs} # Calculate the ranking statistic - sngl_stat = self.stat_calculator.single(cutall_trigs) - rank = self.stat_calculator.rank_stat_single((self.ifo, sngl_stat)) + with self.stat_calculator_lock: + sngl_stat = self.stat_calculator.single(cutall_trigs) + rank = self.stat_calculator.rank_stat_single((self.ifo, sngl_stat)) # 'cluster' by taking the maximal statistic value over the trigger set i = rank.argmax() @@ -265,7 +317,7 @@ def calculate_ifar(self, sngl_ranking, duration): return self.fixed_ifar[self.ifo] try: - with h5py.File(self.fit_file, 'r') as fit_file: + with HFile(self.fit_file, 'r') as fit_file: bin_edges = fit_file['bins_edges'][:] live_time = fit_file[self.ifo].attrs['live_time'] thresh = fit_file.attrs['fit_threshold'] @@ -303,3 +355,44 @@ def calculate_ifar(self, sngl_ranking, duration): rate_louder *= len(rates) return min(conv.sec_to_year(1. / rate_louder), self.maximum_ifar) + + def start_refresh_thread(self): + """ + Start a thread managing whether the stat_calculator will be updated + """ + thread = threading.Thread( + target=self.refresh_statistic, + daemon=True + ) + logger.info("Starting %s statistic refresh thread", self.ifo) + thread.start() + + def refresh_statistic(self): + """ + Function to refresh the stat_calculator at regular intervals + """ + while True: + # How long since the statistic was last updated? + since_stat_refresh = \ + (dt.now() - self.time_stat_refreshed).total_seconds() + if since_stat_refresh > self.statistic_refresh_rate: + self.time_stat_refreshed = dt.now() + logger.info( + "Checking %s statistic for updated files", + self.ifo, + ) + with self.stat_calculator_lock: + self.stat_calculator.check_update_files() + # Sleep one second for safety + time.sleep(1) + # Now use the time it took the check / update the statistic + since_stat_refresh = \ + (dt.now() - self.time_stat_refreshed).total_seconds() + logger.debug( + "%s statistic: Waiting %.3fs for next refresh", + self.ifo, + self.statistic_refresh_rate - since_stat_refresh + ) + time.sleep( + self.statistic_refresh_rate - since_stat_refresh + ) diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index fb4bc8eee9a..f61e7c55b66 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -26,8 +26,11 @@ values. """ import logging +from hashlib import sha1 +from datetime import datetime as dt import numpy import h5py + from . import ranking from . import coinc_rate from .eventmgr_cython import logsignalrateinternals_computepsignalbins @@ -68,6 +71,9 @@ def __init__(self, sngl_ranking, files=None, ifos=None, **kwargs): " %s. Can't provide more than one!" % stat) logger.info("Found file %s for stat %s", filename, stat) self.files[stat] = filename + # Keep track of when stat files hashes so it can be + # reloaded if it has changed + self.file_hashes = self.get_file_hashes() # Provide the dtype of the single detector method's output # This is used by background estimation codes that need to maintain @@ -85,6 +91,67 @@ def __init__(self, sngl_ranking, files=None, ifos=None, **kwargs): if key.startswith('sngl_ranking_'): self.sngl_ranking_kwargs[key[13:]] = value + def get_file_hashes(self): + """ + Get sha1 hashes for all the files + """ + logger.debug( + "Getting file hashes" + ) + start = dt.now() + file_hashes = {} + for stat, filename in self.files.items(): + with open(filename, 'rb') as file_binary: + file_hashes[stat] = sha1(file_binary.read()).hexdigest() + logger.debug( + "Got file hashes for %d files, took %.3es", + len(self.files), + (dt.now() - start).total_seconds() + ) + return file_hashes + + def files_changed(self): + """ + Compare hashes of files now with the ones we have cached + """ + changed_file_hashes = self.get_file_hashes() + for stat, old_hash in self.file_hashes.items(): + if changed_file_hashes[stat] != old_hash: + logger.info( + "%s statistic file %s has changed", + ''.join(self.ifos), + stat, + ) + else: + # Remove the dataset from the dictionary of hashes + del changed_file_hashes[stat] + + if changed_file_hashes == {}: + logger.debug( + "No %s statistic files have changed", + ''.join(self.ifos) + ) + + return list(changed_file_hashes.keys()) + + def check_update_files(self): + """ + Check whether files associated with the statistic need updated, + then do so for each file which needs changing + """ + files_changed = self.files_changed() + for file_key in files_changed: + self.update_file(file_key) + self.file_hashes = self.get_file_hashes() + + def update_file(self, key): + """ + Update file used in this statistic referenced by key. + """ + err_msg = "This function is a stub that should be overridden by the " + err_msg += "sub-classes. You shouldn't be seeing this error!" + raise NotImplementedError(err_msg) + def get_sngl_ranking(self, trigs): """ Returns the ranking for the single detector triggers. @@ -351,6 +418,13 @@ def __init__(self, sngl_ranking, files=None, ifos=None, if pregenerate_hist and not len(ifos) == 1: self.get_hist() + elif len(ifos) == 1: + # remove all phasetd files from self.files and self.file_hashes, + # as they are not needed + for k in list(self.files.keys()): + if 'phasetd_newsnr' in k: + del self.files[k] + del self.file_hashes[k] def get_hist(self, ifos=None): """ @@ -380,8 +454,20 @@ def get_hist(self, ifos=None): selected = name break + # If there are other phasetd_newsnr files, they aren't needed. + # So tidy them out of the self.files dictionary + rejected = [key for key in self.files.keys() + if 'phasetd_newsnr' in key and not key == selected] + for k in rejected: + del self.files[k] + del self.file_hashes[k] + if selected is None and len(ifos) > 1: raise RuntimeError("Couldn't figure out which stat file to use") + if len(ifos) == 1: + # We dont need the histogram file, but we are trying to get one + # just skip it in this case + return logger.info("Using signal histogram %s for ifos %s", selected, ifos) weights = {} @@ -495,6 +581,30 @@ def get_hist(self, ifos=None): self.has_hist = True + def update_file(self, key): + """ + Update file used in this statistic. + If others are used (i.e. this statistic is inherited), they will + need updated separately + """ + if 'phasetd_newsnr' in key and not len(self.ifos) == 1: + if ''.join(sorted(self.ifos)) not in key: + logger.debug( + "%s file is not used for %s statistic", + key, + ''.join(self.ifos) + ) + return False + logger.info( + "Updating %s statistic %s file", + ''.join(self.ifos), + key + ) + # This is a PhaseTDStatistic file which needs updating + self.get_hist() + return True + return False + def logsignalrate(self, stats, shift, to_shift): """ Calculate the normalized log rate density of signals via lookup @@ -711,7 +821,9 @@ def coinc_lim_for_thresh(self, sngls_list, thresh, limifo, if not self.has_hist: self.get_hist() - fixed_statsq = sum([b['snglstat'] ** 2 for a, b in sngls_list]) + fixed_statsq = sum( + [b['snglstat'] ** 2 for a, b in sngls_list if a != limifo] + ) s1 = thresh ** 2. - fixed_statsq # Assume best case scenario and use maximum signal rate s1 -= 2. * self.hist_max @@ -752,9 +864,11 @@ def __init__(self, sngl_ranking, files=None, ifos=None, **kwargs): parsed_attrs = [f.split('-') for f in self.files.keys()] self.bg_ifos = [at[0] for at in parsed_attrs if (len(at) == 2 and at[1] == 'fit_coeffs')] + if not len(self.bg_ifos): raise RuntimeError("None of the statistic files has the required " "attribute called {ifo}-fit_coeffs !") + self.fits_by_tid = {} self.alphamax = {} for i in self.bg_ifos: @@ -809,6 +923,26 @@ def assign_fits(self, ifo): return fits_by_tid_dict + def update_file(self, key): + """ + Update file used in this statistic. + If others are used (i.e. this statistic is inherited), they will + need updated separately + """ + if key.endswith('-fit_coeffs'): + # This is a ExpFitStatistic file which needs updating + # Which ifo is it? + ifo = key[:2] + self.fits_by_tid[ifo] = self.assign_fits(ifo) + self.get_ref_vals(ifo) + logger.info( + "Updating %s statistic %s file", + ''.join(self.ifos), + key + ) + return True + return False + def get_ref_vals(self, ifo): """ Get the largest `alpha` value over all templates for given ifo. @@ -1151,6 +1285,18 @@ def __init__(self, sngl_ranking, files=None, ifos=None, **kwargs): PhaseTDStatistic.__init__(self, sngl_ranking, files=files, ifos=ifos, **kwargs) + def update_file(self, key): + """ + Update file used in this statistic. + If others are used (i.e. this statistic is inherited), they will + need updated separately + """ + # Here we inherit the PhaseTD and ExpFit file checks, + # nothing else needs doing + uf_exp_fit = ExpFitCombinedSNR.update_file(self, key) + uf_phasetd = PhaseTDStatistic.update_file(self, key) + return uf_exp_fit or uf_phasetd + def single(self, trigs): """ Calculate the necessary single detector information @@ -1299,6 +1445,24 @@ def reassign_rate(self, ifo): self.fits_by_tid[ifo]['fit_by_rate_above_thresh'] /= analysis_time self.fits_by_tid[ifo]['fit_by_rate_in_template'] /= analysis_time + def update_file(self, key): + """ + Update file used in this statistic. + If others are used (i.e. this statistic is inherited), they will + need updated separately + """ + # Check if the file to update is an ExpFit file + uf_expfit = ExpFitStatistic.update_file(self, key) + # If this has been updated we must do the reassign_rate step here + # on top of the file update from earlier + if uf_expfit: + # This is a fit coeff file which needs updating + # Which ifo is it? + ifo = key[:2] + self.reassign_rate(ifo) + return True + return False + def rank_stat_coinc(self, s, slide, step, to_shift, **kwargs): # pylint:disable=unused-argument """ @@ -1399,12 +1563,9 @@ def __init__(self, sngl_ranking, files=None, ifos=None, for ifo in self.bg_ifos: self.assign_median_sigma(ifo) - ref_ifos = reference_ifos.split(',') - - # benchmark_logvol is a benchmark sensitivity array over template id - hl_net_med_sigma = numpy.amin([self.fits_by_tid[ifo]['median_sigma'] - for ifo in ref_ifos], axis=0) - self.benchmark_logvol = 3. * numpy.log(hl_net_med_sigma) + self.ref_ifos = reference_ifos.split(',') + self.benchmark_logvol = None + self.assign_benchmark_logvol() self.single_increasing = False # Initialize variable to hold event template id(s) self.curr_tnum = None @@ -1425,6 +1586,41 @@ def assign_median_sigma(self, ifo): self.fits_by_tid[ifo]['median_sigma'] = \ coeff_file['median_sigma'][:][tid_sort] + def assign_benchmark_logvol(self): + """ + Assign the benchmark log-volume used by the statistic. + This is the sensitive log-volume of each template in the + network of reference IFOs + """ + # benchmark_logvol is a benchmark sensitivity array over template id + bench_net_med_sigma = numpy.amin( + [self.fits_by_tid[ifo]['median_sigma'] for ifo in self.ref_ifos], + axis=0, + ) + self.benchmark_logvol = 3. * numpy.log(bench_net_med_sigma) + + def update_file(self, key): + """ + Update file used in this statistic. + If others are used (i.e. this statistic is inherited), they will + need updated separately + """ + # Here we inherit the PhaseTD file checks + uf_phasetd = PhaseTDStatistic.update_file(self, key) + uf_exp_fit = ExpFitBgRateStatistic.update_file(self, key) + if uf_phasetd: + # The key to update refers to a PhaseTDStatistic file + return True + if uf_exp_fit: + # The key to update refers to a ExpFitBgRateStatistic file + # In this case we must reload some statistic information + # Which ifo is it? + ifo = key[:2] + self.assign_median_sigma(ifo) + self.assign_benchmark_logvol() + return True + return False + def lognoiserate(self, trigs, alphabelow=6): """ Calculate the log noise rate density over single-ifo ranking @@ -1555,7 +1751,6 @@ def rank_stat_coinc(self, s, slide, step, to_shift, ln_noise_rate = coinc_rate.combination_noise_lograte( sngl_rates, kwargs['time_addition'], kwargs['dets']) - # Extent of time-difference space occupied noise_twindow = coinc_rate.multiifo_noise_coincident_area( self.hist_ifos, kwargs['time_addition'], @@ -1934,6 +2129,29 @@ def assign_kdes(self, kname): with h5py.File(self.files[kname + '-kde_file'], 'r') as kde_file: self.kde_by_tid[kname + '_kdevals'] = kde_file['data_kde'][:] + def update_file(self, key): + """ + Update file used in this statistic. + If others are used (i.e. this statistic is inherited), they will + need updated separately + """ + # Inherit from ExpFitFgBgNormStatistic + uf_expfit = ExpFitFgBgNormStatistic.update_file(self, key) + if uf_expfit: + # The key to update refers to a ExpFitFgBgNormStatistic file + return True + # Is the key a KDE statistic file that we update here? + if key.endswith('kde_file'): + logger.info( + "Updating %s statistic %s file", + ''.join(self.ifos), + key + ) + kde_style = key.split('-')[0] + self.assign_kdes(kde_style) + return True + return False + def kde_ratio(self): """ Calculate the weighting factor according to the ratio of the @@ -2137,6 +2355,30 @@ def setup_segments(self, key): return dq_state_segs_dict + def update_file(self, key): + """ + Update file used in this statistic. + If others are used (i.e. this statistic is inherited), they will + need updated separately + """ + # Inherit from ExpFitFgBgNormStatistic + uf_expfit = ExpFitFgBgNormStatistic.update_file(self, key) + if uf_expfit: + # We have updated a ExpFitFgBgNormStatistic file already + return True + # We also need to check if the DQ files have updated + if key.endswith('dq_stat_info'): + logger.info( + "Updating %s statistic %s file", + ''.join(self.ifos), + key + ) + self.assign_dq_rates(key) + self.assign_template_bins(key) + self.setup_segments(key) + return True + return False + def find_dq_noise_rate(self, trigs, dq_state): """Get dq values for a specific ifo and dq states""" @@ -2196,7 +2438,6 @@ def lognoiserate(self, trigs): """ # make sure every trig has a dq state - try: ifo = trigs.ifo except AttributeError: @@ -2245,6 +2486,17 @@ def __init__(self, sngl_ranking, files=None, ifos=None, **kwargs): for kname in self.kde_names: ExpFitFgBgKDEStatistic.assign_kdes(self, kname) + def update_file(self, key): + """ + Update file used in this statistic. + If others are used (i.e. this statistic is inherited), they will + need updated separately + """ + # Inherit from DQExpFitFgBgNormStatistic and ExpFitFgBgKDEStatistic + uf_dq = DQExpFitFgBgNormStatistic.update_file(self, key) + uf_kde = ExpFitFgBgKDEStatistic.update_file(self, key) + return uf_dq or uf_kde + def kde_ratio(self): """ Inherited, see docstring for ExpFitFgBgKDEStatistic.kde_signalrate diff --git a/test/test_live_coinc_compare.py b/test/test_live_coinc_compare.py index 68c7cdc6e37..531bb8d16f0 100644 --- a/test/test_live_coinc_compare.py +++ b/test/test_live_coinc_compare.py @@ -76,7 +76,8 @@ def setUp(self, *args): timeslide_interval=0.1, background_ifar_limit=100, store_background=True, - coinc_window_pad=0.002 + coinc_window_pad=0.002, + statistic_refresh_rate=None, ) # number of templates in the bank From 65b8a89ac5568f6410ae35f5bcb9fd8c0cef78a0 Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Wed, 24 Jul 2024 15:08:39 +0100 Subject: [PATCH 08/35] Add a script to supervise template fitting in pycbc live (#4813) * Move code which will be shared into a module * getting more things ready for the trigger fits / dq supervision * start work on fit_by, fit_over and DQ trigger supervision * some work - bring in latest trigger file finding * updates to use latest trigger file finding * pass cuts through to the collation script * symlink fit_over_multiparam output to variable file * remove unused imports, argument ordering, plot titles * CC * Fix docstring, formatting * Move the trigger collatiojn/fitting superviosor to use configparsert * start moving significance fit supervision to configparser * Start puttinf template fits into supervision code shared with significance fits * Deal properly with whene there are no trigers, so we can't calculate the median_sigma * Remove no-longer-valid efficiency savings * Some minor fixes for when sending mail from supervision * Mve to a single supervision file for both trigger and significance fits * Remove FIXMEs * Use check=True with subprocess.run * Remove function checking fit coefficients - it is now unclear what the 'safe' ranges should be for the various different stages * Fix minor bug * typo * Some minor fixes and TDC comments * CC --- .../pycbc_fit_sngls_by_template | 3 + .../pycbc_fit_sngls_over_multiparam | 10 +- .../pycbc_live_plot_single_significance_fits | 4 +- bin/live/pycbc_live_single_significance_fits | 7 - ...pycbc_live_supervise_collated_trigger_fits | 634 ++++++++++++++++++ ...bc_live_supervise_single_significance_fits | 521 -------------- bin/plotting/pycbc_plot_bank_corner | 4 - pycbc/live/__init__.py | 1 + pycbc/live/supervision.py | 154 +++++ 9 files changed, 802 insertions(+), 536 deletions(-) create mode 100755 bin/live/pycbc_live_supervise_collated_trigger_fits delete mode 100755 bin/live/pycbc_live_supervise_single_significance_fits create mode 100644 pycbc/live/supervision.py diff --git a/bin/all_sky_search/pycbc_fit_sngls_by_template b/bin/all_sky_search/pycbc_fit_sngls_by_template index 83b55ff5676..5e7d31e87e3 100755 --- a/bin/all_sky_search/pycbc_fit_sngls_by_template +++ b/bin/all_sky_search/pycbc_fit_sngls_by_template @@ -352,6 +352,9 @@ sigma_regions = trigf[args.ifo + '/sigmasq_template'][:] median_sigma = [] for reg in sigma_regions: strigs = trigf[args.ifo + '/sigmasq'][reg] + if len(strigs) == 0: + median_sigma.append(np.nan) + continue median_sigma.append(np.median(strigs) ** 0.5) outfile = HFile(args.output, 'w') diff --git a/bin/all_sky_search/pycbc_fit_sngls_over_multiparam b/bin/all_sky_search/pycbc_fit_sngls_over_multiparam index 4230efd71ef..45b46fa32e3 100755 --- a/bin/all_sky_search/pycbc_fit_sngls_over_multiparam +++ b/bin/all_sky_search/pycbc_fit_sngls_over_multiparam @@ -342,14 +342,20 @@ if len(args.template_fit_file) > 1: nasum = nabove[tidsort].cumsum() invsum = invalphan[tidsort].cumsum() ntsum = ntotal[tidsort].cumsum() - mssum = median_sigma[tidsort].cumsum() num = right - left tid = tid_unique nabove = (nasum[right] - nasum[left]) / num invalphan = (invsum[right] - invsum[left]) / num ntotal = (ntsum[right] - ntsum[left]) / num - median_sigma = (mssum[right] - mssum[left]) / num + if median_sigma is not None: + # Median sigma is a special one - we need to make sure that + # we do not mess things up when nan values are given, so we + # can't use the special cumsum fast option + median_sigma = [ + numpy.nanmean(median_sigma[tidsort[l:r]]) + for l, r in zip(left, right) + ] if args.output_fits_by_template: # Store fit_by_template values for output file diff --git a/bin/live/pycbc_live_plot_single_significance_fits b/bin/live/pycbc_live_plot_single_significance_fits index 5010066856a..3dc2414a297 100644 --- a/bin/live/pycbc_live_plot_single_significance_fits +++ b/bin/live/pycbc_live_plot_single_significance_fits @@ -131,10 +131,10 @@ for ifo in all_ifos: continue # Keep track of some maxima for use in setting the plot limits - maxstat = stats[ifo].max() + maxstat = np.nanmax(stats[ifo]) max_rate = 0 - statrange = maxstat - max(stats[ifo].min(), fit_threshold[ifo]) + statrange = maxstat - max(np.nanmin(stats[ifo]), fit_threshold[ifo]) plotmax = maxstat + statrange * 0.05 plotbins = np.linspace(fit_threshold[ifo], plotmax, 400) diff --git a/bin/live/pycbc_live_single_significance_fits b/bin/live/pycbc_live_single_significance_fits index d122d6b2787..b1e76c32612 100644 --- a/bin/live/pycbc_live_single_significance_fits +++ b/bin/live/pycbc_live_single_significance_fits @@ -98,13 +98,6 @@ args.trigger_cuts = args.trigger_cuts or [] args.trigger_cuts.append(f"end_time:{args.gps_start_time}:lower_inc") args.trigger_cuts.append(f"end_time:{args.gps_end_time}:upper_inc") -# Efficiency saving: add SNR cut before any others as sngl_ranking can -# only be less than SNR. -args.trigger_cuts.insert(0, f"snr:{args.fit_threshold}:lower_inc") - -# Cut triggers with sngl-ranking below threshold -args.trigger_cuts.append(f"{args.sngl_ranking}:{args.fit_threshold}:lower_inc") - trigger_cut_dict, template_cut_dict = cuts.ingest_cuts_option_group(args) logging.info("Setting up duration bins") diff --git a/bin/live/pycbc_live_supervise_collated_trigger_fits b/bin/live/pycbc_live_supervise_collated_trigger_fits new file mode 100755 index 00000000000..3206f549c44 --- /dev/null +++ b/bin/live/pycbc_live_supervise_collated_trigger_fits @@ -0,0 +1,634 @@ +#!/usr/bin/env python + +"""Supervise the periodic re-fitting of PyCBC Live single-detector triggers, +and the associated plots. +""" + +import re +import logging +import argparse +from datetime import datetime, timedelta +from dateutil.relativedelta import relativedelta +import os +import shutil +import subprocess +import numpy as np + +from lal import gpstime + +import pycbc +from pycbc.io.hdf import HFile +from pycbc.live import supervision as sv +from pycbc.types.config import InterpolatingConfigParser as icp + +def read_options(args): + """ + read the options into a dictionary + """ + logging.info("Reading config file") + cp = icp(configFiles=[args.config_file]) + config_opts = { + section: {k: v for k, v in cp[section].items()} + for section in cp.sections() + } + del config_opts['environment'] + return config_opts + +def trigger_collation( + day_dt, + day_str, + collation_control_options, + collation_options, + output_dir, + controls + ): + """ + Perform the trigger collation as specified + """ + logging.info("Performing trigger collation") + collate_args = [ + 'pycbc_live_collate_triggers', + ] + collate_args += sv.dict_to_args(collation_options) + gps_start = gpstime.utc_to_gps(day_dt).gpsSeconds + gps_end = gpstime.utc_to_gps(day_dt + timedelta(days=1)).gpsSeconds + + trig_merge_file = os.path.join( + output_dir, + collation_control_options['collated-triggers-format'].format( + ifos=''.join(controls['ifos'].split()), + start=gps_start, + duration=(gps_end - gps_start) + ) + ) + collate_args += [ + '--gps-start-time', f'{gps_start:d}', + '--gps-end-time', f'{gps_end:d}', + '--output-file', trig_merge_file, + ] + + sv.run_and_error(collate_args, controls) + + return trig_merge_file + + +def fit_by_template( + trigger_merge_file, + day_str, + fbt_control_options, + fbt_options, + output_dir, + ifo, + controls + ): + """ + Supervise the running of pycbc_fit_sngls_by_template on live triggers + """ + logging.info("Performing daily fit_by_template") + fbt_out_fname = fbt_control_options['fit-by-template-format'].format( + date=day_str, + ifo=ifo, + ) + fbt_out_full = os.path.join(output_dir, fbt_out_fname) + fit_by_args = ['pycbc_fit_sngls_by_template'] + fit_by_args += ['--trigger-file', trigger_merge_file] + fit_by_args += sv.dict_to_args(fbt_options) + fit_by_args += ['--output', fbt_out_full, '--ifo', ifo] + sv.run_and_error(fit_by_args, controls) + + return fbt_out_full, day_str + + +def find_daily_fit_files( + combined_control_options, + daily_fname_format, + daily_files_dir, + ifo=None + ): + """ + Find files which match the specified formats + """ + log_str = f"Finding files in {daily_files_dir} with format {daily_fname_format}" + if ifo is not None: + log_str += f"in detector {ifo}" + logging.info(log_str) + combined_days = int(combined_control_options['combined-days']) + if 'replay-start-time' in combined_control_options: + replay_start_time = int(combined_control_options['replay-start-time']) + true_start_time = int(combined_control_options['true-start-time']) + replay_duration = int(combined_control_options['replay-duration']) + rep_start_utc = lal.GPSToUTC(replay_start_time)[0:6] + + dt_replay_start = datetime( + year=rep_start_utc[0], + month=rep_start_utc[1], + day=rep_start_utc[2], + hour=rep_start_utc[3], + minute=rep_start_utc[4], + second=rep_start_utc[5] + ) + + td = (day_dt - dt_replay_start).total_seconds() + + # Time since the start of this replay + time_since_replay = np.remainder(td, replay_duration) + + # Add this on to the original start time to get the current time of + # the replay data + true_utc = lal.GPSToUTC(true_start_time)[0:6] + dt_true_start = datetime( + year=true_utc[0], + month=true_utc[1], + day=true_utc[2], + hour=true_utc[3], + minute=true_utc[4], + second=true_utc[5] + ) + # Original time of the data being replayed right now + current_date = dt_true_start + timedelta(seconds=time_since_replay) + else: + current_date = day_dt + + date_test = current_date + timedelta(days=1) + + daily_files = [] + missed_files = 0 + # Maximum consecutive number of days between files before a warning is raised + # 10 days of the detector being off would be unusual for current detectors + max_nmissed = combined_control_options.get('maximum_missed_files', 10) + found_files = 0 + while found_files < combined_days and missed_files < max_nmissed: + # Loop through the possible file locations and see if the file exists + date_test -= timedelta(days=1) + date_out = date_test.strftime("%Y_%m_%d") + daily_fname = daily_fname_format.format( + date=date_out, + ifo=ifo, + ) + + output_dir = os.path.join( + daily_files_dir, + date_out + ) + daily_full = os.path.join( + output_dir, + daily_fname + ) + # Check that the file exists: + if not os.path.exists(daily_full): + missed_files += 1 + logging.info("File %s does not exist - skipping", daily_full) + continue + if not len(daily_files): + end_date = date_out + # This is now the oldest file + first_date = date_out + # reset the "missed files" counter, and add to the "found files" + missed_files = 0 + found_files += 1 + daily_files.append(daily_full) + + if found_files == 0: + raise RuntimeError("No files found") + + if missed_files == max_nmissed: + # If more than a set maximum days between files, something + # is wrong with the analysis. Warn about this and use fewer + # files + logging.warning( + f'More than {max_nmissed} days between files, only using ' + f'{found_files} files!' + ) + + return daily_files, first_date, end_date + + +def fit_over_multiparam( + fit_over_controls, + fit_over_options, + ifo, + day_str, + output_dir, + controls + ): + """ + Supervise the smoothing of live trigger fits using + pycbc_fit_sngls_over_multiparam + """ + daily_files, first_date, end_date = find_daily_fit_files( + fit_over_controls, + fit_over_controls['fit-by-format'], + controls['output-directory'], + ifo=ifo + ) + logging.info( + "Smoothing fits using fit_over_multiparam with %d files and " + "specified parameters", + len(daily_files) + ) + file_id_str = f'{first_date}-{end_date}' + out_fname = fit_over_controls['fit-over-format'].format( + dates=file_id_str, + ifo=ifo, + ) + + fit_over_args = ['pycbc_fit_sngls_over_multiparam', '--template-fit-file'] + fit_over_args += daily_files + fit_over_args += sv.dict_to_args(fit_over_options) + fit_over_full = os.path.join(output_dir, out_fname) + fit_over_args += ['--output', fit_over_full] + sv.run_and_error(fit_over_args, controls) + if 'variable-fit-over-param' in fit_over_controls: + variable_fits = fit_over_controls['variable-fit-over-param'].format( + ifo=ifo + ) + sv.symlink(fit_over_full, variable_fits) + + return fit_over_full, file_id_str + +def plot_fits( + fits_file, + ifo, + day_title_str, + plot_fit_options, + controls, + smoothed=False + ): + """Plotting for fit_by files, and linking to the public directory""" + fits_plot_output = fits_file[:-3] + 'png' + logging.info( + "Plotting template fits %s to %s", + fits_file, + fits_plot_output + ) + fits_plot_arguments = [ + 'pycbc_plot_bank_corner', + '--fits-file', + fits_file, + '--output-plot-file', + fits_plot_output, + ] + fits_plot_arguments += sv.dict_to_args(plot_fit_options) + + title = "Fit parameters for pycbc-live, triggers from " + day_title_str + if smoothed == True: + title += ', smoothed' + fits_plot_arguments += ['--title', title] + sv.run_and_error(fits_plot_arguments, controls) + if 'public-dir' in controls: + public_dir = os.path.abspath(os.path.join( + controls['public-dir'], + *day_str.split('_') + )) + sv.symlink(fits_plot_output, public_dir) + + +def single_significance_fits( + daily_controls, + daily_options, + output_dir, + day_str, + day_dt, + controls, + test_options, + stat_files=None, + ): + """ + Supervise the significance fits for live triggers using + pycbc_live_single_significance_fits + """ + daily_options['output'] = os.path.join( + output_dir, + daily_controls['sig-daily-format'].format(date=day_str), + ) + daily_args = ['pycbc_live_single_significance_fits'] + + gps_start_time = gpstime.utc_to_gps(day_dt).gpsSeconds + gps_end_time = gpstime.utc_to_gps(day_dt + timedelta(days=1)).gpsSeconds + daily_options['gps-start-time'] = f'{gps_start_time:d}' + daily_options['gps-end-time'] = f'{gps_end_time:d}' + daily_args += sv.dict_to_args(daily_options) + if stat_files is not None: + daily_args += ['--statistic-files'] + stat_files + + sv.run_and_error(daily_args, controls) + + return daily_options['output'] + +def plot_single_significance_fits(daily_output, daily_plot_options, controls): + """ + Plotting daily significance fits, and link to public directory if wanted + """ + daily_plot_output = f'{daily_output[:-4]}_{{ifo}}.png' + logging.info( + "Plotting daily significance fits from %s to %s", + daily_output, + daily_plot_output + ) + daily_plot_arguments = [ + 'pycbc_live_plot_single_significance_fits', + '--trigger-fits-file', + daily_output, + '--output-plot-name-format', + daily_plot_output, + ] + daily_plot_arguments += sv.dict_to_args(daily_plot_options) + sv.run_and_error(daily_plot_arguments, controls) + + # Link the plots to the public-dir if wanted + if 'public-dir' in controls: + daily_plot_outputs = [ + daily_plot_output.format(ifo=ifo) + for ifo in controls['ifos'].split() + ] + logging.info("Linking daily fits plots") + for dpo in daily_plot_outputs: + public_dir = os.path.abspath(os.path.join( + controls['public-dir'], + *day_str.split('_') + )) + + sv.symlink(dpo, public_dir) + + +def combine_significance_fits( + combined_options, + combined_controls, + output_dir, + day_str, + controls + ): + """ + Supervise the smoothing of live trigger significance fits using + pycbc_live_combine_single_significance_fits + """ + daily_files, first_date, end_date = find_daily_fit_files( + combined_controls, + combined_controls['daily-format'], + controls['output-directory'], + ) + logging.info( + "Smoothing significance fits over %d files", + len(daily_files) + ) + date_range = f'{first_date}-{end_date}' + outfile_name = combined_controls['outfile-format'].format( + date=day_str, + date_range=date_range, + ) + combined_options['output'] = os.path.join(output_dir, outfile_name) + combined_options['trfits-files'] = ' '.join(daily_files) + + combined_args = ['pycbc_live_combine_single_significance_fits'] + combined_args += sv.dict_to_args(combined_options) + + sv.run_and_error(combined_args, controls) + + if 'variable-significance-fits' in combined_controls: + logging.info("Linking to variable significance fits file") + sv.symlink( + combined_options['output'], + combined_controls['variable-significance-fits'] + ) + + return combined_options['output'], date_range + +def plot_combined_significance_fits( + csf_file, + date_range, + output_dir, + combined_plot_options, + combined_plot_control_options, + controls + ): + """ + Plotting combined significance fits, and link to public directory if wanted + """ + + oput_fmt = combined_plot_control_options['output-plot-name-format'] + if not '{date_range}' in oput_fmt: + raise RuntimeError( + "Must specify {date_range} in output-plot-name-format" + ) + oput_fmt = oput_fmt.replace('{date_range}', date_range) + oput_full = os.path.join(output_dir, oput_fmt) + combined_plot_arguments = [ + 'pycbc_live_plot_combined_single_significance_fits', + '--combined-fits-file', + csf_file, + '--output-plot-name-format', + oput_full + ] + combined_plot_arguments += sv.dict_to_args(combined_plot_options) + + sv.run_and_error(combined_plot_arguments, controls) + + # Get the list of combined plotting output files: + combined_plot_outputs = [ + oput_full.format(ifo=ifo, type='fit_coeffs') for ifo in + controls['ifos'].split() + ] + combined_plot_outputs += [ + oput_full.format(ifo=ifo, type='counts') for ifo in + controls['ifos'].split() + ] + + if 'public-dir' in controls: + logging.info("Linking combined fits to public dir") + public_dir = os.path.abspath(os.path.join( + controls['public-dir'], + *day_str.split('_') + )) + for cpo in combined_plot_outputs: + sv.symlink(cpo, public_dir) + +def supervise_collation_fits_dq(args, day_dt, day_str): + """ + Perform the trigger collation and fits etc. as specified + """ + # Read in the config file and pack into appropriate dictionaries + config_opts = read_options(args) + controls = config_opts['control'] + collation_options = config_opts['collation'] + collation_control_options = config_opts['collation_control'] + fit_by_template_options = config_opts['fit_by_template'] + fit_by_template_control_options = config_opts['fit_by_template_control'] + fit_over_options = config_opts['fit_over_multiparam'] + fit_over_control_options = config_opts['fit_over_multiparam_control'] + plot_fit_options = config_opts['plot_fit'] + daily_options = config_opts['significance_daily_fits'] + daily_control_options = config_opts['significance_daily_fits_control'] + daily_plot_options = config_opts['plot_significance_daily'] + combined_options = config_opts['significance_combined_fits'] + combined_control_options = config_opts['significance_combined_fits_control'] + combined_plot_options = config_opts['plot_significance_combined'] + combined_plot_control_options = config_opts['plot_significance_combined_control'] + test_options = config_opts['test'] + + # The main output directory will have a date subdirectory which we + # put the output into + sv.ensure_directories(controls, day_str) + + ifos = controls['ifos'].split() + output_dir = os.path.join( + controls['output-directory'], + day_str + ) + logging.info("Outputs to %s", output_dir) + if 'public_dir' in controls: + public_dir = os.path.abspath(os.path.join( + controls['public-dir'], + *day_str.split('_') + )) + logging.info("Outputs to be linked to % ", public_dir) + + merged_triggers = trigger_collation( + day_dt, + day_str, + collation_control_options, + collation_options, + output_dir, + controls + ) + # Store the locations of files needed for the statistic + stat_files = [] + for ifo in config_opts['control']['ifos'].split(): + if args.fit_by_template: + fbt_file, date_str = fit_by_template( + merged_triggers, + day_str, + fit_by_template_control_options, + fit_by_template_options, + output_dir, + ifo, + controls, + ) + plot_fits( + fbt_file, + ifo, + date_str, + plot_fit_options, + controls + ) + + if args.fit_over_multiparam: + fom_file, date_str = fit_over_multiparam( + fit_over_control_options, + fit_over_options, + ifo, + day_str, + output_dir, + controls + ) + stat_files.append(fom_file) + plot_fits( + fom_file, + ifo, + date_str, + plot_fit_options, + controls, + smoothed=True, + ) + + if args.single_significance_fits: + ssf_file = single_significance_fits( + daily_control_options, + daily_options, + output_dir, + day_str, + day_dt, + controls, + test_options, + stat_files=stat_files, + ) + plot_single_significance_fits( + ssf_file, + daily_plot_options, + controls + ) + if args.combine_significance_fits: + csf_file, date_str = combine_significance_fits( + combined_options, + combined_control_options, + output_dir, + date_str, + controls + ) + plot_combined_significance_fits( + csf_file, + date_str, + output_dir, + combined_plot_options, + combined_plot_control_options, + controls + ) + + +def get_yesterday_date(): + """ Get the date string for yesterday's triggers """ + day_dt = datetime.utcnow() - timedelta(days=1) + day_dt = datetime.combine(day_dt, datetime.min.time()) + day_str = day_dt.strftime('%Y_%m_%d') + return date_dt, date_str + +parser = argparse.ArgumentParser(description=__doc__) +pycbc.add_common_pycbc_options(parser) +parser.add_argument( + '--config-file', + required=True +) +parser.add_argument( + '--date', + help='Date to analyse, if not given, will analyse yesterday (UTC). ' + 'Format YYYY_MM_DD. Do not use if using --run-daily-at.' +) +parser.add_argument( + '--fit-by-template', + action='store_true', + help="Perform template fits calculation." +) +parser.add_argument( + '--fit-over-multiparam', + action='store_true', + help="Perform template fits smoothing." +) +parser.add_argument( + '--single-significance-fits', + action='store_true', + help="Perform daily singles significance fits." +) +parser.add_argument( + '--combine-significance-fits', + action='store_true', + help="Do combination of singles significance fits." +) +parser.add_argument( + '--run-daily-at', + metavar='HH:MM:SS', + help='Stay running and repeat the fitting daily at the given UTC hour.' +) +args = parser.parse_args() + +pycbc.init_logging(args.verbose, default_level=1) + +if args.run_daily_at is not None and args.date is not None: + parser.error('Cannot take --run-daily-at and --date at the same time') + +if args.run_daily_at is not None: + # keep running and repeat the fitting every day at the given hour + if not re.match('[0-9][0-9]:[0-9][0-9]:[0-9][0-9]', args.run_daily_at): + parser.error('--run-daily-at takes a UTC time in the format HH:MM:SS') + logging.info('Starting in daily run mode') + while True: + sv.wait_for_utc_time(args.run_daily_at) + day_dt, day_str = get_yesterday_date() + logging.info('==== Time to update the single fits, waking up ====') + supervise_collation_fits_dq(args, day_dt, day_str) +else: + # run just once + if args.date: + day_str = args.date + day_dt = datetime.strptime(args.date, '%Y_%m_%d') + else: + day_dt, day_str = get_yesterday_date() + supervise_collation_fits_dq(args, day_dt, day_str) diff --git a/bin/live/pycbc_live_supervise_single_significance_fits b/bin/live/pycbc_live_supervise_single_significance_fits deleted file mode 100755 index 0c6561e9cea..00000000000 --- a/bin/live/pycbc_live_supervise_single_significance_fits +++ /dev/null @@ -1,521 +0,0 @@ -#!/usr/bin/env python - -"""Supervise the periodic re-fitting of PyCBC Live single-detector triggers, -and the associated plots. -""" - -import re -import logging -import argparse -from datetime import datetime, timedelta -from dateutil.relativedelta import relativedelta -import time -import copy -import os -import shutil -import subprocess -import numpy as np - -from lal import gpstime - -import pycbc -from pycbc.io.hdf import HFile - - -def symlink(target, link_name): - """Create a symbolic link replacing the destination and checking for - errors. - """ - cp = subprocess.run([ - 'ln', '-sf', target, link_name - ]) - if cp.returncode: - raise subprocess.SubprocessError( - f"Could not link plot {target} to {link_name}" - ) - - -def dict_to_args(opts_dict): - """ - Convert an option dictionary into a list to be used by subprocess.run - """ - dargs = [] - for option in opts_dict.keys(): - dargs.append('--' + option.strip()) - value = opts_dict[option] - if len(value.split()) > 1: - # value is a list, append individually - for v in value.split(): - dargs.append(v) - elif not value: - # option is a flag, do nothing - continue - else: - # Single value option - easy enough - dargs.append(value) - return dargs - - -def mail_volunteers_error(controls, mail_body_lines, subject): - """ - Email a list of people, defined by mail-volunteers-file - To be used for errors or unusual occurences - """ - if 'mail_volunteers_file' not in controls: - logging.info( - "No file containing people to email, logging the error instead" - ) - for line in mail_body_lines: - logging.warning(line) - return - with open(controls['mail_volunteers_file'], 'r') as mail_volunteers_file: - volunteers = [volunteer.strip() for volunteer in - mail_volunteers_file.readlines()] - logging.info("Emailing %s with warnings", ' '.join(volunteers)) - mail_command = [ - 'mail', - '-s', - subject - ] - mail_command += volunteers - mail_body = '\n'.join(mail_body_lines) - subprocess.run(mail_command, input=mail_body, text=True) - - -def check_trigger_files(filenames, test_options, controls): - """ - Check that the fit coefficients meet criteria set - """ - coeff_upper_limit = float(test_options['upper-limit-coefficient']) - coeff_lower_limit = float(test_options['lower-limit-coefficient']) - warnings = [] - warning_files = [] - for filename in filenames: - warnings_thisfile = [] - with HFile(filename, 'r') as trff: - ifos = [k for k in trff.keys() if not k.startswith('bins')] - fit_coeffs = {ifo: trff[ifo]['fit_coeff'][:] for ifo in ifos} - bins_upper = trff['bins_upper'][:] - bins_lower = trff['bins_lower'][:] - # Which bins have at least *some* triggers within the limit - use_bins = bins_lower > float(test_options['duration-bin-lower-limit']) - for ifo in ifos: - coeffs_above = fit_coeffs[ifo][use_bins] > coeff_upper_limit - coeffs_below = fit_coeffs[ifo][use_bins] < coeff_lower_limit - if not any(coeffs_above) and not any(coeffs_below): - continue - # Problem - the fit coefficient is outside the limits - for bl, bu, fc in zip(bins_lower[use_bins], bins_upper[use_bins], - fit_coeffs[ifo][use_bins]): - if fc < coeff_lower_limit or fc > coeff_upper_limit: - warnings_thisfile.append( - f"WARNING - {ifo} fit coefficient {fc:.3f} in bin " - f"{bl}-{bu} outwith limits " - f"{coeff_lower_limit}-{coeff_upper_limit}" - ) - if warnings_thisfile: - warning_files.append(filename) - warnings.append(warnings_thisfile) - - if warnings: - # Some coefficients are outside the range - # Add the fact that this check failed in the logs - logging.warning("Extreme daily fits values found:") - mail_body_lines = ["Extreme daily fits values found:"] - for filename, filewarnings in zip(warning_files, warnings): - logging.warning(filename) - mail_body_lines.append(f"Values in {filename}") - for fw in filewarnings: - logging.warning(" " + fw) - mail_body_lines.append(" " + fw) - mail_volunteers_error( - controls, - mail_body_lines, - 'PyCBC Live single trigger fits extreme value(s)' - ) - - -def run_and_error(command_arguments, controls): - """ - Wrapper around subprocess.run to catch errors and send emails if required - """ - logging.info("Running " + " ".join(command_arguments)) - command_output = subprocess.run(command_arguments, capture_output=True) - if command_output.returncode: - error_contents = [' '.join(command_arguments), - command_output.stderr.decode()] - mail_volunteers_error( - controls, - error_contents, - f"PyCBC live could not run {command_arguments[0]}" - ) - err_msg = f"Could not run {command_arguments[0]}" - raise subprocess.SubprocessError(err_msg) - - -# These are the option used to control the supervision, and will not be passed -# to the subprocesses -control_options = [ - "check-daily-output", - "combined-days", - "mail-volunteers-file", - "output-directory", - "output-id-str", - "public-dir", - "replay-duration", - "replay-start-time", - "submit-dir", - "trfits-format", - "true-start-time", - "variable-trigger-fits", -] - -# these are options which can be taken by both the daily fit code and the -# combined fitting code -options_both = ['ifos', 'verbose'] - -# These options are only for the daily fit code -daily_fit_options = [ - 'cluster', - 'date-directories', - 'duration-bin-edges', - 'duration-bin-spacing', - 'duration-from-bank', - 'file-identifier', - 'fit-function', - 'fit-threshold', - 'num-duration-bins', - 'prune-loudest', - 'prune-stat-threshold', - 'prune-window', - 'sngl-ranking', - 'template-cuts', - 'trigger-cuts', - 'trigger-directory', -] - -combined_fit_options = [ - 'conservative-percentile', -] - -coeff_test_options = [ - 'duration-bin-lower-limit', - 'lower-limit-coefficient', - 'upper-limit-coefficient', -] - -all_options = control_options + options_both + daily_fit_options \ - + combined_fit_options + coeff_test_options - - -def do_fitting(args, day_dt, day_str): - """ - Perform the fits as specified - """ - # Read in the config file and pack into appropriate dictionaries - daily_options = {} - combined_options = {} - test_options = {} - controls = {} - - with open(args.config_file, 'r') as conf_file: - all_lines = conf_file.readlines() - - for line in all_lines: - # Ignore whitespace and comments - line = line.strip() - if not line: - continue - if line.startswith(';'): - continue - - option, value = line.split('=') - option = option.strip() - value = value.strip() - - # If it is a control option, add to the controls dictionary - if option in control_options: - controls[option] = value - - # If the option is not to control the input, then it is passed - # straight to the executable - if option in daily_fit_options or option in options_both: - daily_options[option] = value - - if option in options_both or option in combined_fit_options: - combined_options[option] = value - - if option in coeff_test_options: - test_options[option] = value - - if option not in all_options: - logging.warning("Option %s unrecognised, ignoring", option) - - # The main output directory will have a date subdirectory which we - # put the output into - output_dir = os.path.join(controls['output-directory'], day_str) - subprocess.run(['mkdir', '-p', output_dir]) - if 'public-dir' in controls: - public_dir = os.path.join(controls['public-dir'], *day_str.split('_')) - subprocess.run(['mkdir', '-p', public_dir]) - - if not args.combine_only: - ##### DAILY FITTING ##### - file_id_str = f'{day_str}' - if 'output-id-str' in controls: - file_id_str += f"-{controls['output-id-str']}" - out_fname = f'{file_id_str}-TRIGGER-FITS.hdf' - daily_options['output'] = os.path.join(output_dir, out_fname) - daily_args = ['pycbc_live_single_significance_fits'] - - daily_options['gps-start-time'] = f'{gpstime.utc_to_gps(day_dt).gpsSeconds:d}' - daily_options['gps-end-time'] = f'{gpstime.utc_to_gps(day_dt + timedelta(days=1)).gpsSeconds:d}' - daily_args += dict_to_args(daily_options) - - run_and_error(daily_args, controls) - - # Add plotting for daily fits, and linking to the public directory - logging.info("Plotting daily fits") - daily_plot_output = os.path.join(output_dir, - '{ifo}-' + f'{out_fname[:-3]}png') - daily_plot_arguments = [ - 'pycbc_live_plot_single_significance_fits', - '--trigger-fits-file', - daily_options['output'], - '--output-plot-name-format', - daily_plot_output, - '--log-colormap' - ] - run_and_error(daily_plot_arguments, controls) - - # Link the plots to the public-dir if wanted - if 'public-dir' in controls: - daily_plot_outputs = [daily_plot_output.format(ifo=ifo) for ifo in - daily_options['ifos'].split()] - logging.info("Linking daily fits plots") - for dpo in daily_plot_outputs: - symlink(dpo, public_dir) - - if args.daily_only: - if 'check-daily-output' in controls: - logging.info( - "Checking that fit coefficients above %s for bins above %ss", - test_options['lower-limit-coefficient'], - test_options['duration-bin-lower-limit'] - ) - check_trigger_files( - [daily_options['output']], - test_options, - controls - ) - logging.info('Done') - exit() - - ##### COMBINED FITTING ##### - combined_days = int(controls['combined-days']) - if 'replay-start-time' in controls: - replay_start_time = int(controls['replay-start-time']) - true_start_time = int(controls['true-start-time']) - replay_duration = int(controls['replay-duration']) - dt_replay_start = gpstime.gps_to_utc(replay_start_time) - - td = (day_dt - dt_replay_start).total_seconds() - - # Time since the start of this replay - time_since_replay = np.remainder(td, replay_duration) - - # Add this on to the original start time to get the current time of - # the replay data - dt_true_start = gpstime.gps_to_utc(true_start_time) - - # Original time of the data being replayed right now - current_date = dt_true_start + timedelta(seconds=time_since_replay) - else: - current_date = day_dt - - date_test = current_date + timedelta(days=1) - - logging.info("Finding trigger fit files for combination") - if 'check-daily-output' in controls: - logging.info( - "Checking all files that fit coefficients above %s for bins " - "above %ss", - test_options['lower-limit-coefficient'], - test_options['duration-bin-lower-limit'] - ) - - trfits_files = [] - missed_files = 0 - found_files = 0 - while found_files < combined_days and missed_files < 10: - # Loop through the possible file locations and see if the file exists - date_test -= timedelta(days=1) - date_out = date_test.strftime("%Y_%m_%d") - trfits_filename = controls['trfits-format'].format(date=date_out) - # Check that the file exists: - if not os.path.exists(trfits_filename): - missed_files += 1 - logging.info(f"File {trfits_filename} does not exist - skipping") - continue - if not len(trfits_files): - end_date = date_out - # This is now the oldest file - first_date = date_out - # reset the "missed files" counter, and add to the "found files" - missed_files = 0 - found_files += 1 - trfits_files.append(trfits_filename) - - if 'check-daily-output' in controls: - check_trigger_files(trfits_files, test_options, controls) - - if missed_files == 10: - # If more than 10 days between files, something wrong with analysis. - # warn and use fewer files - 10 here is chosen to be an unusual amount - # of time for the analysis to be down in standard operation - logging.warning('More than 10 days between files, only using ' - f'{found_files} files for combination!') - - file_id_str = f'{first_date}-{end_date}' - if 'output-id-str' in controls: - file_id_str += f"-{controls['output-id-str']}" - out_fname = f'{file_id_str}-TRIGGER_FITS_COMBINED' - combined_options['output'] = os.path.join(output_dir, out_fname + '.hdf') - - if not trfits_files: - raise ValueError("No files meet the criteria") - - combined_options['trfits-files'] = ' '.join(trfits_files) - - combined_args = ['pycbc_live_combine_single_significance_fits'] - combined_args += dict_to_args(combined_options) - - run_and_error(combined_args, controls) - - if 'variable-trigger-fits' in controls: - logging.info('Copying combined fits file to local filesystem') - try: - shutil.copyfile( - combined_options['output'], - controls['variable-trigger-fits'] - ) - except Exception as e: - mail_volunteers_error( - controls, - [str(e)], - "PyCBC live could not copy to variable trigger fits file" - ) - raise e - logging.info( - "%s updated to link to %s", - controls['variable-trigger-fits'], - combined_options['output'] - ) - - logging.info("Plotting combined fits") - # Add plotting for combined fits, and linking to the public directory - combined_plot_output = os.path.join(output_dir, - f"{{ifo}}-{out_fname}-{{type}}.png") - combined_plot_arguments = [ - 'pycbc_live_plot_combined_single_significance_fits', - '--combined-fits-file', - combined_options['output'], - '--output-plot-name-format', - combined_plot_output, - '--log-colormap' - ] - - run_and_error(combined_plot_arguments, controls) - - combined_plot_outputs = [ - combined_plot_output.format(ifo=ifo, type='fit_coeffs') for ifo in - combined_options['ifos'].split() - ] - combined_plot_outputs += [ - combined_plot_output.format(ifo=ifo, type='counts') for ifo in - combined_options['ifos'].split() - ] - - # Link the plots to the public-dir if wanted - if 'public-dir' in controls: - logging.info("Linking combined fits") - for cpo in combined_plot_outputs: - symlink(cpo, public_dir) - - logging.info('Done') - - -def wait_for_utc_time(target_str): - """Wait until the UTC time is as given by `target_str`, in HH:MM:SS format. - """ - target_hour, target_minute, target_second = map(int, target_str.split(':')) - now = datetime.utcnow() - # for today's target, take now and replace the time - target_today = now + relativedelta( - hour=target_hour, minute=target_minute, second=target_second - ) - # for tomorrow's target, take now, add one day, and replace the time - target_tomorrow = now + relativedelta( - days=1, hour=target_hour, minute=target_minute, second=target_second - ) - next_target = target_today if now <= target_today else target_tomorrow - sleep_seconds = (next_target - now).total_seconds() - logging.info('Waiting %.0f s', sleep_seconds) - time.sleep(sleep_seconds) - - -parser = argparse.ArgumentParser(description=__doc__) -pycbc.add_common_pycbc_options(parser) -parser.add_argument( - '--config-file', - required=True -) -parser.add_argument( - '--date', - help='Date to analyse, if not given, will analyse yesterday (UTC). ' - 'Format YYYY_MM_DD. Do not use if using --run-daily-at.' -) -parser.add_argument( - '--combine-only', - action='store_true', - help="Only do the combination of singles fit files." -) -parser.add_argument( - '--daily-only', - action='store_true', - help="Only do the daily singles fitting." -) -parser.add_argument( - '--run-daily-at', - metavar='HH:MM:SS', - help='Stay running and repeat the fitting daily at the given UTC hour.' -) -args = parser.parse_args() - -pycbc.init_logging(args.verbose, default_level=1) - -if args.run_daily_at is not None and args.date is not None: - parser.error('Cannot take --run-daily-at and --date at the same time') - -if args.run_daily_at is not None: - # keep running and repeat the fitting every day at the given hour - if not re.match('[0-9][0-9]:[0-9][0-9]:[0-9][0-9]', args.run_daily_at): - parser.error('--run-daily-at takes a UTC time in the format HH:MM:SS') - logging.info('Starting in daily run mode') - while True: - wait_for_utc_time(args.run_daily_at) - logging.info('==== Time to update the single fits, waking up ====') - # Get the date string for yesterday's triggers - day_dt = datetime.utcnow() - timedelta(days=1) - day_str = day_dt.strftime('%Y_%m_%d') - do_fitting(args, day_dt, day_str) -else: - # run just once - if args.date: - day_str = args.date - day_dt = datetime.strptime(args.date, '%Y_%m_%d') - else: - # Get the date string for yesterday's triggers - day_dt = datetime.utcnow() - timedelta(days=1) - day_str = day_dt.strftime('%Y_%m_%d') - do_fitting(args, day_dt, day_str) diff --git a/bin/plotting/pycbc_plot_bank_corner b/bin/plotting/pycbc_plot_bank_corner index 4494ab56eb8..01fdf9100cf 100644 --- a/bin/plotting/pycbc_plot_bank_corner +++ b/bin/plotting/pycbc_plot_bank_corner @@ -72,10 +72,6 @@ parser.add_argument("--parameters", "property of that parameter will be used. If not " "provided, will plot all of the parameters in the " "bank.") -parser.add_argument("--log-parameters", - nargs="+", - default=[], - help="Plot these parameters on a log scale") parser.add_argument('--plot-histogram', action='store_true', help="Plot 1D histograms of parameters on the " diff --git a/pycbc/live/__init__.py b/pycbc/live/__init__.py index 4037f6634ae..5a3a40c901a 100644 --- a/pycbc/live/__init__.py +++ b/pycbc/live/__init__.py @@ -4,3 +4,4 @@ from .snr_optimizer import * from .significance_fits import * +from .supervision import * diff --git a/pycbc/live/supervision.py b/pycbc/live/supervision.py new file mode 100644 index 00000000000..858dc6c782b --- /dev/null +++ b/pycbc/live/supervision.py @@ -0,0 +1,154 @@ +# Copyright (C) 2023 Arthur Tolley, Gareth Cabourn Davies +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation; either version 3 of the License, or (at your +# option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +""" +This module contains functions for supervising codes to run regularly +during pycbc_live production, taking input from the search and returning +files which can be used in the search. +This module is primarily used in the pycbc_live_supervise_* programs. +""" + +import logging +import subprocess +import time +import os +from datetime import datetime +from dateutil.relativedelta import relativedelta + +import pycbc + +logger = logging.getLogger('pycbc.live.supervision') + + +def symlink(target, link_name): + """ + Create a symbolic link replacing the destination and checking for + errors. + """ + # Ensure that the target and link name are absolute paths + target = os.path.abspath(target) + link_name = os.path.abspath(link_name) + logger.info("Linking %s to %s", target, link_name) + try: + subprocess.run(['ln', '-sf', target, link_name], check=True) + except subprocess.CalledProcessError as sub_err: + logging.error("Could not link %s to %s", target, link_name) + raise sub_err + + +def dict_to_args(opts_dict): + """ + Convert an option dictionary into a list to be used by subprocess.run + """ + dargs = [] + for option, value in opts_dict.items(): + dargs.append('--' + option.strip()) + if value == '': + # option is a flag, do nothing + continue + if len(value.split()) > 1: + # value is a list, append individually + for v in value.split(): + dargs.append(v) + else: + # Single value option - append once + dargs.append(value) + return dargs + + +def mail_volunteers_error(controls, mail_body_lines, subject): + """ + Email a list of people, defined by mail-volunteers-file + To be used for errors or unusual occurences + """ + with open(controls['mail-volunteers-file'], 'r') as mail_volunteers_file: + volunteers = [volunteer.strip() for volunteer in + mail_volunteers_file.readlines()] + logger.info("Emailing %s with warnings", ' '.join(volunteers)) + mail_command = [ + 'mail', + '-s', + subject + ] + mail_command += volunteers + mail_body = '\n'.join(mail_body_lines) + try: + subprocess.run(mail_command, input=mail_body, text=True, check=True) + except subprocess.CalledProcessError as sub_err: + logging.error("Could not send mail on error") + raise sub_err + + +def run_and_error(command_arguments, controls): + """ + Wrapper around subprocess.run to catch errors and send emails if required + """ + logger.info("Running %s", " ".join(command_arguments)) + command_output = subprocess.run( + command_arguments, + capture_output=True + ) + + if command_output.returncode: + error_contents = [' '.join(command_arguments), '\n', + command_output.stderr.decode()] + if 'mail-volunteers-file' in controls: + mail_volunteers_error( + controls, + error_contents, + f"PyCBC live could not run {command_arguments[0]}" + ) + err_msg = f"Could not run {command_arguments[0]}:\n" + err_msg += ' '.join(error_contents) + raise subprocess.SubprocessError(err_msg) + + +def wait_for_utc_time(target_str): + """Wait until the UTC time is as given by `target_str`, in HH:MM:SS format. + """ + target_hour, target_minute, target_second = map(int, target_str.split(':')) + now = datetime.utcnow() + # for today's target, take now and replace the time + target_today = now + relativedelta( + hour=target_hour, minute=target_minute, second=target_second + ) + # for tomorrow's target, take now, add one day, and replace the time + target_tomorrow = now + relativedelta( + days=1, hour=target_hour, minute=target_minute, second=target_second + ) + next_target = target_today if now <= target_today else target_tomorrow + sleep_seconds = (next_target - now).total_seconds() + logger.info('Waiting %.0f s', sleep_seconds) + time.sleep(sleep_seconds) + + +def ensure_directories(control_values, day_str): + """ + Ensure that the required directories exist + """ + output_dir = os.path.join( + control_values['output-directory'], + day_str + ) + pycbc.makedir(output_dir) + if 'public-dir' in control_values: + # The public directory wil be in subdirectories for the year, month, + # day, e.g. 2024_04_12 will be in 2024/04/12. + public_dir = os.path.join( + control_values['public-dir'], + *day_str.split('_') + ) + pycbc.makedir(public_dir) From ab7cf0cacdcf24f47399c036e7d558be7041ecd2 Mon Sep 17 00:00:00 2001 From: Duncan Macleod Date: Sat, 27 Jul 2024 09:11:55 +0100 Subject: [PATCH 09/35] Remove pycbc_coinc_time (#4822) * remove pycbc_coinc_time * remove dqsegdb requirement this is no longer required by any modules, scripts, or actively supported workflow configuration --- bin/pycbc_coinc_time | 185 ---------------------------------- requirements-igwn.txt | 1 - tools/pycbc_test_suite.sh | 2 +- tools/static/cant_be_built | 1 - tools/static/needs_full_build | 1 - 5 files changed, 1 insertion(+), 189 deletions(-) delete mode 100644 bin/pycbc_coinc_time diff --git a/bin/pycbc_coinc_time b/bin/pycbc_coinc_time deleted file mode 100644 index c4476deb1d3..00000000000 --- a/bin/pycbc_coinc_time +++ /dev/null @@ -1,185 +0,0 @@ -#!/bin/env python -import argparse -import logging -import numpy -from dqsegdb.apicalls import dqsegdbQueryTimes as query - -import ligo.segments - -import pycbc - -def sane(seg_list): - """ Convert list of len two lists containing strs to segment list """ - segs = ligo.segments.segmentlist([]) - for seg in seg_list: - segs.append(ligo.segments.segment(int(seg[0]), int(seg[1]))) - return segs - -def parse_veto_definer(veto_def_filename): - """ Parse a veto definer file from the filename and return a dictionary - indexed by ifo and veto definer category level. - - Parameters - ---------- - veto_def_filename: str - The path to the veto definer file - - Returns: - parsed_definition: dict - Returns a dictionary first indexed by ifo, then category level, and - finally a list of veto definitions. - """ - from ligo.lw import table, utils as ligolw_utils - from pycbc.io.ligolw import LIGOLWContentHandler as h - - indoc = ligolw_utils.load_filename(veto_def_filename, False, contenthandler=h) - veto_table = table.Table.get_table(indoc, 'veto_definer') - - ifo = veto_table.getColumnByName('ifo') - name = veto_table.getColumnByName('name') - version = numpy.array(veto_table.getColumnByName('version')) - category = numpy.array(veto_table.getColumnByName('category')) - start = numpy.array(veto_table.getColumnByName('start_time')) - end = numpy.array(veto_table.getColumnByName('end_time')) - start_pad = numpy.array(veto_table.getColumnByName('start_pad')) - end_pad = numpy.array(veto_table.getColumnByName('end_pad')) - - data = {} - for i in range(len(veto_table)): - if ifo[i] not in data: - data[ifo[i]] = {} - - if category[i] not in data[ifo[i]]: - data[ifo[i]][category[i]] = [] - - veto_info = {'name': name[i], - 'version': version[i], - 'start': start[i], - 'end': end[i], - 'start_pad': start_pad[i], - 'end_pad': end_pad[i], - } - data[ifo[i]][category[i]].append(veto_info) - return data - -def get_vetoes(veto_def, ifo, server, veto_name, start_default, end_default): - """ Cycle through the veto name string and collect the vetoes for the - selected categories. Return the final segment list - """ - raise ValueError("This code needs updating to work with the new segment " - "interface. If it's still used please fix this, " - "otherwise we can just remove this code.") - veto_segments = ligo.segments.segmentlist([]) - for cat in veto_name: - #cat = convert_cat(cat) - flags = veto_def[ifo][cat] - - for flag in flags: - start = flag['start'] if flag['start'] >= start_default else start_default - end = flag['end'] if flag['end'] !=0 else end_default - - raw_segs = sane(query("https", server, ifo, - flag['name'], flag['version'], - 'active', start, end)[0]['active']) - - for rseg in raw_segs: - s, e = rseg[0] + flag['start_pad'], rseg[1] + flag['end_pad'] - veto_segments.append(ligo.segments.segment(s, e)) - return veto_segments.coalesce() - - -parser = argparse.ArgumentParser() -pycbc.add_common_pycbc_options(parser) - -parser.add_argument('--gps-start-time', type=int, required=True, - help="integer gps start time") -parser.add_argument('--gps-end-time', type=int, required=True, - help="integer gps end time") -parser.add_argument('--veto-definer', type=str, required=True, - help="path to veto definer xml file") -parser.add_argument('--science-veto-levels', type=str, - help="Veto levels to apply by removing strain data before analysis ex. '1' for CAT1 veto") -parser.add_argument('--trigger-veto-levels', type=str, - help="Veto levels to apply by removing triggers from analyzed times ex. '12H' for CAT 1 and CAT2 vetoes plus hardware injections") -parser.add_argument('--segment-server', type=str, - help="segment server string") -parser.add_argument('--science-names', nargs=2, - help="name of the segment flag IFO:NAME:VERSION to use for science") - -group = parser.add_argument_group("pycbc_inspiral options that determine padding and minimum time analyzable.") -group.add_argument('--segment-length', type=int) -group.add_argument('--min-analysis-segments', type=int) -group.add_argument('--pad-data', type=int) -group.add_argument('--segment-start-pad', type=int) -group.add_argument('--segment-end-pad', type=int) - -args = parser.parse_args() - - -analysis_start_pad = args.segment_start_pad + args.pad_data -analysis_end_pad = args.segment_end_pad + args.pad_data -minimum_segment_length = ((args.segment_length - args.segment_start_pad - - args.segment_end_pad) * args.min_analysis_segments - + analysis_start_pad + analysis_end_pad) - -pycbc.init_logging(args.verbose) - -ifo_segs = [] - -veto_def = parse_veto_definer(args.veto_definer) - -# Read in the science segments for the requested time -for science_name in args.science_names: - ifo, name, version = science_name.split(':') - - logging.info("For IFO: %s, querying science time (%s, %s)" % (ifo, name, version)) - segments = sane(query("https", args.segment_server, ifo, name, version, - 'active', args.gps_start_time, args.gps_end_time)[0]['active']) - - #trim segments to the request time - request = ligo.segments.segment(args.gps_start_time, args.gps_end_time) - segments = (ligo.segments.segmentlist([request]) & segments) - - # apply cat 1 vetoes here - logging.info('Found %ss of data' % abs(segments)) - segments = segments.coalesce() - - cat1_segs = get_vetoes(veto_def, ifo, - args.segment_server, - args.science_veto_levels, - args.gps_start_time, - args.gps_end_time, - ).coalesce() - - segments -= cat1_segs - logging.info('Found %ss after applying CAT1 vetoes' % abs(segments)) - # remove short segments, and account for filter padding - logging.info('Removing segments shorter than %ss' % minimum_segment_length) - lsegments = ligo.segments.segmentlist([]) - segments = segments.coalesce() - for seg in segments: - if abs(seg) >= minimum_segment_length: - start = seg[0] + analysis_start_pad - end = seg[1] - analysis_end_pad - lsegments.append(ligo.segments.segment(start, end)) - segments = lsegments - logging.info('Found %ss after applying removing padding / short segments' % abs(segments)) - - # apply vetoes that remove triggers here - segments = segments.coalesce() - vtrig_segs = get_vetoes(veto_def, ifo, - args.segment_server, - args.trigger_veto_levels, - args.gps_start_time, - args.gps_end_time, - ).coalesce() - segments -= vtrig_segs - - logging.info('Found %ss after applying trigger vetoes' % abs(segments)) - segments.coalesce() - - ifo_segs += [segments] - -coinc_time = abs(ifo_segs[0] & ifo_segs[1]) -print("Available Coincident Time from %s-%s" % (args.gps_start_time, args.gps_end_time)) -print("%s seconds, %5.5f days" % (coinc_time, coinc_time / 86400.0)) diff --git a/requirements-igwn.txt b/requirements-igwn.txt index 3b7e2d2ce84..9486aa71255 100644 --- a/requirements-igwn.txt +++ b/requirements-igwn.txt @@ -3,4 +3,3 @@ ciecplib[kerberos] >= 0.7.0 dqsegdb2 >= 1.1.4 amqplib htchirp >= 2.0 -dqsegdb >= 2.0.0 diff --git a/tools/pycbc_test_suite.sh b/tools/pycbc_test_suite.sh index 3d220599b0c..56dcdcc30a8 100755 --- a/tools/pycbc_test_suite.sh +++ b/tools/pycbc_test_suite.sh @@ -38,7 +38,7 @@ fi if [ "$PYCBC_TEST_TYPE" = "help" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then # check that all executables that do not require # special environments can return a help message - for prog in `find ${PATH//:/ } -maxdepth 1 -name 'pycbc*' -print 2>/dev/null | egrep -v '(pycbc_live_nagios_monitor|pycbc_mvsc_get_features|pycbc_coinc_time)' | sort | uniq` + for prog in `find ${PATH//:/ } -maxdepth 1 -name 'pycbc*' -print 2>/dev/null | egrep -v '(pycbc_live_nagios_monitor|pycbc_mvsc_get_features)' | sort | uniq` do echo -e ">> [`date`] running $prog --help" $prog --help &> $LOG_FILE diff --git a/tools/static/cant_be_built b/tools/static/cant_be_built index a6e0a2a7339..3f43e30cd29 100644 --- a/tools/static/cant_be_built +++ b/tools/static/cant_be_built @@ -5,7 +5,6 @@ pycbc_mvsc_dag pycbc_inspinjfind pycbc_get_loudest_params pycbc_randomize_inj_dist_by_optsnr -pycbc_coinc_time pycbc_inference_plot_prior pycbc_inference_plot_posterior pycbc_fit_sngls_over_param diff --git a/tools/static/needs_full_build b/tools/static/needs_full_build index 941d3e9cf4b..e5349c77b62 100644 --- a/tools/static/needs_full_build +++ b/tools/static/needs_full_build @@ -1,4 +1,3 @@ -pycbc_coinc_time pycbc_geom_aligned_2dstack pycbc_plot_glitchgram pycbc_compute_durations From 7eac2b49e4fe591b43e7fec94f11c2339eb22132 Mon Sep 17 00:00:00 2001 From: Alex Nitz Date: Mon, 29 Jul 2024 09:28:04 +0100 Subject: [PATCH 10/35] qtransform interpolation should be linear to avoid negative values (#4826) --- pycbc/types/timeseries.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/pycbc/types/timeseries.py b/pycbc/types/timeseries.py index a004404e125..d3d378bf0e2 100644 --- a/pycbc/types/timeseries.py +++ b/pycbc/types/timeseries.py @@ -760,10 +760,11 @@ def qtransform(self, delta_t=None, delta_f=None, logfsteps=None, # Interpolate if requested if delta_f or delta_t or logfsteps: if return_complex: - interp_amp = interp2d(times, freqs, abs(q_plane.T)) - interp_phase = interp2d(times, freqs, _numpy.angle(q_plane.T)) + interp_amp = interp2d(freqs, times, abs(q_plane), kx=1, ky=1) + interp_phase = interp2d(freqs, times, _numpy.angle(q_plane), + kx=1, ky=1) else: - interp = interp2d(times, freqs, q_plane.T) + interp = interp2d(freqs, times, q_plane, kx=1, ky=1) if delta_t: times = _numpy.arange(float(self.start_time), @@ -777,12 +778,12 @@ def qtransform(self, delta_t=None, delta_f=None, logfsteps=None, if delta_f or delta_t or logfsteps: if return_complex: - q_plane = _numpy.exp(1.0j * interp_phase(times, freqs)) - q_plane *= interp_amp(times, freqs) + q_plane = _numpy.exp(1.0j * interp_phase(freqs, times)) + q_plane *= interp_amp(freqs, times) else: - q_plane = interp(times, freqs) + q_plane = interp(freqs, times) - return times, freqs, q_plane.T + return times, freqs, q_plane def notch_fir(self, f1, f2, order, beta=5.0, remove_corrupted=True): """ notch filter the time series using an FIR filtered generated from From ba7b9c0a8b2c878a0a25a4af9d9e23059cef68a8 Mon Sep 17 00:00:00 2001 From: segomezlo <92443572+sebastiangomezlopez@users.noreply.github.com> Date: Mon, 29 Jul 2024 11:23:20 +0200 Subject: [PATCH 11/35] PyGRB: Propagating slide_id information across postprocessing plotting scripts (#4809) * Update pygrb_plotting_utils.py Adding the option to feed a pyplot figure object to ease debugging * Update pygrb_postprocessing_utils.py load_triggers: Adding the capability to select triggers with certain slide_id * Update pycbc_pygrb_plot_snr_timeseries pycbc_pygrb_plot_snr_timeseries: propagating the slide_id functionalities across plotting scripts. We only want to show slide_id=0 in this plots * Update pycbc_pygrb_plot_chisq_veto Update pycbc_pygrb_plot_chisq_veto: propagating slide_id functionalities across plotting scripts. Here we only want to display slide_id=0 * Update pycbc_pygrb_plot_coh_ifosnr pycbc_pygrb_plot_coh_ifosnr: propagating slide_id functionalities, here we only want to display slide_id=0 * Update pycbc_pygrb_plot_null_stats pycbc_pygrb_plot_null_stats: propagating slide_id information, here we only want to display slide_id=0 * Update pygrb_postprocessing_utils.py * Update pycbc_pygrb_plot_chisq_veto * Update pycbc_pygrb_plot_chisq_veto * Update pycbc_pygrb_plot_chisq_veto * Update pycbc_pygrb_plot_snr_timeseries * Update pygrb_postprocessing_utils.py Minor fixes to satisfy codeclimate * Update pygrb_postprocessing_utils.py * Update pygrb_postprocessing_utils.py * Update pygrb_postprocessing_utils.py * Update pygrb_postprocessing_utils.py * Update pycbc_pygrb_plot_snr_timeseries * Update pygrb_plotting_utils.py * Update pycbc_pygrb_plot_snr_timeseries * Update pygrb_plotting_utils.py * Update pygrb_plotting_utils.py * Propagating slide_id information to some pygrb plotting scripts * Update pygrb_postprocessing_utils.py * Modification to slides parser. Update to plotting scripts * Update pycbc_pygrb_plot_snr_timeseries * Update pygrb_postprocessing_utils.py * Update pygrb_postprocessing_utils.py * Update pycbc_pygrb_plot_snr_timeseries * Update pycbc_pygrb_plot_snr_timeseries * Update pycbc_pygrb_plot_snr_timeseries * Update pygrb_postprocessing_utils.py * Update pygrb_postprocessing_utils.py * Update pycbc_pygrb_plot_snr_timeseries * Update pygrb_postprocessing_utils.py * Update pygrb_postprocessing_utils.py * Update pycbc/results/pygrb_postprocessing_utils.py Co-authored-by: Francesco Pannarale --------- Co-authored-by: Francesco Pannarale --- bin/pygrb/pycbc_pygrb_plot_chisq_veto | 16 +++++--- bin/pygrb/pycbc_pygrb_plot_coh_ifosnr | 10 +++-- bin/pygrb/pycbc_pygrb_plot_null_stats | 16 +++++--- bin/pygrb/pycbc_pygrb_plot_snr_timeseries | 21 +++++++---- pycbc/results/pygrb_postprocessing_utils.py | 42 ++++++++++++++++++++- 5 files changed, 83 insertions(+), 22 deletions(-) diff --git a/bin/pygrb/pycbc_pygrb_plot_chisq_veto b/bin/pygrb/pycbc_pygrb_plot_chisq_veto index 2a624083a12..966798fc0f2 100644 --- a/bin/pygrb/pycbc_pygrb_plot_chisq_veto +++ b/bin/pygrb/pycbc_pygrb_plot_chisq_veto @@ -49,7 +49,7 @@ __program__ = "pycbc_pygrb_plot_chisq_veto" # Functions # ============================================================================= # Function to load trigger data: includes applying cut in reweighted SNR -def load_data(input_file, ifos, vetoes, opts, injections=False): +def load_data(input_file, ifos, vetoes, opts, injections=False, slide_id=None): """Load data from a trigger/injection file""" snr_type = opts.snr_type @@ -71,12 +71,14 @@ def load_data(input_file, ifos, vetoes, opts, injections=False): # This will eventually become load_injections trigs_or_injs = \ ppu.load_triggers(input_file, ifos, vetoes, - rw_snr_threshold=rw_snr_threshold) + rw_snr_threshold=rw_snr_threshold, + slide_id=slide_id) else: logging.info("Loading triggers...") trigs_or_injs = \ ppu.load_triggers(input_file, ifos, vetoes, - rw_snr_threshold=rw_snr_threshold) + rw_snr_threshold=rw_snr_threshold, + slide_id=slide_id) # Count surviving points num_trigs_or_injs = len(trigs_or_injs['network/reweighted_snr']) @@ -187,7 +189,9 @@ parser.add_argument("--snr-type", default='coherent', 'single'], help="SNR value to plot on x-axis.") ppu.pygrb_add_bestnr_cut_opt(parser) ppu.pygrb_add_bestnr_opts(parser) +ppu.pygrb_add_slide_opts(parser) opts = parser.parse_args() +ppu.slide_opts_helper(opts) init_logging(opts.verbose, format="%(asctime)s: %(levelname)s: %(message)s") @@ -249,10 +253,12 @@ if ifo and ifo not in ifos: raise RuntimeError(err_msg) # Extract trigger data -trig_data = load_data(trig_file, ifos, vetoes, opts) +trig_data = load_data(trig_file, ifos, vetoes, opts, + slide_id=opts.slide_id) # Extract (or initialize) injection data -inj_data = load_data(found_missed_file, ifos, vetoes, opts, injections=True) +inj_data = load_data(found_missed_file, ifos, vetoes, opts, + injections=True, slide_id=0) # Sanity checks if trig_data[snr_type] is None and inj_data[snr_type] is None: diff --git a/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr b/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr index 1770bb27e71..5a2b88321e2 100644 --- a/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr +++ b/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr @@ -54,7 +54,7 @@ __program__ = "pycbc_pygrb_plot_coh_ifosnr" # Functions # ============================================================================= # Function to load trigger data -def load_data(input_file, ifos, vetoes, opts, injections=False): +def load_data(input_file, ifos, vetoes, opts, injections=False, slide_id=None): """Load data from a trigger/injection file""" # Initialize the dictionary @@ -75,6 +75,7 @@ def load_data(input_file, ifos, vetoes, opts, injections=False): ifos, vetoes, rw_snr_threshold=opts.newsnr_threshold, + slide_id=slide_id ) else: logging.info("Loading triggers...") @@ -83,6 +84,7 @@ def load_data(input_file, ifos, vetoes, opts, injections=False): ifos, vetoes, rw_snr_threshold=opts.newsnr_threshold, + slide_id=slide_id ) # Load SNR data @@ -186,7 +188,9 @@ parser.add_argument( help="Output file a zoomed in version of the plot.", ) ppu.pygrb_add_bestnr_cut_opt(parser) +ppu.pygrb_add_slide_opts(parser) opts = parser.parse_args() +ppu.slide_opts_helper(opts) init_logging(opts.verbose, format="%(asctime)s: %(levelname)s: %(message)s") @@ -232,10 +236,10 @@ ifos, vetoes = ppu.extract_ifos_and_vetoes( ) # Extract trigger data -trig_data = load_data(trig_file, ifos, vetoes, opts) +trig_data = load_data(trig_file, ifos, vetoes, opts, slide_id=opts.slide_id) # Extract (or initialize) injection data -inj_data = load_data(found_file, ifos, vetoes, opts, injections=True) +inj_data = load_data(found_file, ifos, vetoes, opts, injections=True, slide_id=0) # Generate plots logging.info("Plotting...") diff --git a/bin/pygrb/pycbc_pygrb_plot_null_stats b/bin/pygrb/pycbc_pygrb_plot_null_stats index 187b366962d..2a3f1b95029 100644 --- a/bin/pygrb/pycbc_pygrb_plot_null_stats +++ b/bin/pygrb/pycbc_pygrb_plot_null_stats @@ -47,7 +47,7 @@ __program__ = "pycbc_pygrb_plot_null_stats" # Functions # ============================================================================= # Function to load trigger data -def load_data(input_file, ifos, vetoes, opts, injections=False): +def load_data(input_file, ifos, vetoes, opts, injections=False, slide_id=None): """Load data from a trigger/injection file""" null_stat_type = opts.y_variable @@ -63,12 +63,14 @@ def load_data(input_file, ifos, vetoes, opts, injections=False): # This will eventually become ppu.load_injections() trigs_or_injs = \ ppu.load_triggers(input_file, ifos, vetoes, - rw_snr_threshold=opts.newsnr_threshold) + rw_snr_threshold=opts.newsnr_threshold, + slide_id=slide_id) else: logging.info("Loading triggers...") trigs_or_injs = \ ppu.load_triggers(input_file, ifos, vetoes, - rw_snr_threshold=opts.newsnr_threshold) + rw_snr_threshold=opts.newsnr_threshold, + slide_id=slide_id) # Coherent SNR is always used data['coherent'] = trigs_or_injs['network/coherent_snr'] @@ -140,7 +142,9 @@ parser.add_argument("-y", "--y-variable", default=None, help="Quantity to plot on the vertical axis.") ppu.pygrb_add_null_snr_opts(parser) ppu.pygrb_add_bestnr_cut_opt(parser) +ppu.pygrb_add_slide_opts(parser) opts = parser.parse_args() +ppu.slide_opts_helper(opts) init_logging(opts.verbose, format="%(asctime)s: %(levelname)s: %(message)s") @@ -182,10 +186,12 @@ ifos, vetoes = ppu.extract_ifos_and_vetoes(trig_file, opts.veto_files, opts.veto_category) # Extract trigger data -trig_data = load_data(trig_file, ifos, vetoes, opts) +trig_data = load_data(trig_file, ifos, vetoes, opts, + slide_id=opts.slide_id) # Extract (or initialize) injection data -inj_data = load_data(found_missed_file, ifos, vetoes, opts, injections=True) +inj_data = load_data(found_missed_file, ifos, vetoes, opts, + injections=True, slide_id=0) # Generate plots logging.info("Plotting...") diff --git a/bin/pygrb/pycbc_pygrb_plot_snr_timeseries b/bin/pygrb/pycbc_pygrb_plot_snr_timeseries index 7fe26f270f3..2f8266cca32 100644 --- a/bin/pygrb/pycbc_pygrb_plot_snr_timeseries +++ b/bin/pygrb/pycbc_pygrb_plot_snr_timeseries @@ -50,7 +50,7 @@ __program__ = "pycbc_pygrb_plot_snr_timeseries" # ============================================================================= # Load trigger data def load_data(input_file, ifos, vetoes, rw_snr_threshold=None, - injections=False): + injections=False, slide_id=None): """Load data from a trigger/injection file""" trigs_or_injs = None @@ -60,12 +60,14 @@ def load_data(input_file, ifos, vetoes, rw_snr_threshold=None, # This will eventually become load_injections trigs_or_injs = \ ppu.load_triggers(input_file, ifos, vetoes, - rw_snr_threshold=rw_snr_threshold) + rw_snr_threshold=rw_snr_threshold, + slide_id=slide_id) else: logging.info("Loading triggers...") trigs_or_injs = \ ppu.load_triggers(input_file, ifos, vetoes, - rw_snr_threshold=rw_snr_threshold) + rw_snr_threshold=rw_snr_threshold, + slide_id=slide_id) return trigs_or_injs @@ -107,7 +109,9 @@ parser.add_argument("-y", "--y-variable", default=None, choices=['coherent', 'single', 'reweighted', 'null'], help="Quantity to plot on the vertical axis.") ppu.pygrb_add_bestnr_cut_opt(parser) +ppu.pygrb_add_slide_opts(parser) opts = parser.parse_args() +ppu.slide_opts_helper(opts) init_logging(opts.verbose, format="%(asctime)s: %(levelname)s: %(message)s") @@ -136,21 +140,24 @@ ifos, vetoes = ppu.extract_ifos_and_vetoes(trig_file, opts.veto_files, # points to show the impact of the cut, otherwise remove points with # reweighted SNR below threshold if snr_type == 'reweighted': - trig_data = load_data(trig_file, ifos, vetoes) + trig_data = load_data(trig_file, ifos, vetoes, + slide_id=opts.slide_id) trig_data['network/reweighted_snr'] = \ reweightedsnr_cut(trig_data['network/reweighted_snr'], opts.newsnr_threshold) - inj_data = load_data(inj_file, ifos, vetoes, injections=True) + inj_data = load_data(inj_file, ifos, vetoes, injections=True, + slide_id=0) if inj_data is not None: inj_data['network/reweighted_snr'] = \ reweightedsnr_cut(inj_data['network/reweighted_snr'], opts.newsnr_threshold) else: trig_data = load_data(trig_file, ifos, vetoes, - rw_snr_threshold=opts.newsnr_threshold) + rw_snr_threshold=opts.newsnr_threshold, + slide_id=opts.slide_id) inj_data = load_data(inj_file, ifos, vetoes, rw_snr_threshold=opts.newsnr_threshold, - injections=True) + injections=True, slide_id=0) # Specify HDF file keys for x quantity (time) and y quantity (SNR) if snr_type == 'single': diff --git a/pycbc/results/pygrb_postprocessing_utils.py b/pycbc/results/pygrb_postprocessing_utils.py index 8f9fd82fb0b..99e562f1df5 100644 --- a/pycbc/results/pygrb_postprocessing_utils.py +++ b/pycbc/results/pygrb_postprocessing_utils.py @@ -92,10 +92,29 @@ def pygrb_initialize_plot_parser(description=None): parser.add_argument('--plot-caption', default=None, help="If provided, use the given string as the plot " + "caption") - return parser +def pygrb_add_slide_opts(parser): + """Add to parser object arguments related to short timeslides""" + parser.add_argument("--slide-id", type=str, default='0', + help="If all, the plotting scripts will use triggers" + + "from all short slides.") + + +def slide_opts_helper(args): + """ + This function overwrites the types of input slide_id information + when loading data in postprocessing scripts. + """ + if args.slide_id.isdigit(): + args.slide_id = int(args.slide_id) + elif args.slide_id.lower() == "all": + args.slide_id = None + else: + raise ValueError("--slide-id must be the string all or an int") + + def pygrb_add_injmc_opts(parser): """Add to parser object the arguments used for Monte-Carlo on distance.""" if parser is None: @@ -175,6 +194,20 @@ def pygrb_add_bestnr_cut_opt(parser): "Default 0: all events are considered.") +# ============================================================================= +# Wrapper to pick triggers with certain slide_ids +# ============================================================================= +def slide_filter(trig_file, data, slide_id=None): + """ + This function adds the capability to select triggers with specific + slide_ids during the postprocessing stage of PyGRB. + """ + if slide_id is None: + return data + mask = numpy.where(trig_file['network/slide_id'][:] == slide_id)[0] + return data[mask] + + # ============================================================================= # Wrapper to read segments files # ============================================================================= @@ -359,7 +392,8 @@ def dataset_iterator(g, prefix=''): yield from dataset_iterator(item, path) -def load_triggers(input_file, ifos, vetoes, rw_snr_threshold=None): +def load_triggers(input_file, ifos, vetoes, rw_snr_threshold=None, + slide_id=None): """Loads triggers from PyGRB output file, returning a dictionary""" trigs = HFile(input_file, 'r') @@ -410,6 +444,10 @@ def load_triggers(input_file, ifos, vetoes, rw_snr_threshold=None): else: trigs_dict[path] = dset[above_thresh] + if trigs_dict[path].size == trigs['network/slide_id'][:].size: + trigs_dict[path] = slide_filter(trigs, trigs_dict[path], + slide_id=slide_id) + return trigs_dict From e4a9db6cba7e62cd203a44214e9813c40a2ddba2 Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Mon, 29 Jul 2024 16:39:50 +0100 Subject: [PATCH 12/35] Fix sngls_minifollowup event selection/ordering [master] (#4823) * Fix sngls_minifollowup event selection/ordering [master] * Remove testing printing * Still create stat class variable even when there are no triggers * Ranking -> statistic rename * minor bugs caught duiring review * oops, accidentally added to this PR --- bin/minifollowups/pycbc_sngl_minifollowup | 57 +++++++++-------------- pycbc/io/hdf.py | 25 ++++++---- 2 files changed, 37 insertions(+), 45 deletions(-) diff --git a/bin/minifollowups/pycbc_sngl_minifollowup b/bin/minifollowups/pycbc_sngl_minifollowup index a73a7f1cd68..ea65cfb97c7 100644 --- a/bin/minifollowups/pycbc_sngl_minifollowup +++ b/bin/minifollowups/pycbc_sngl_minifollowup @@ -33,7 +33,7 @@ import pycbc.workflow.minifollowups as mini import pycbc.workflow as wf import pycbc.events from pycbc.workflow.core import resolve_url_to_file -from pycbc.events import stat, veto, coinc +from pycbc.events import stat, veto from pycbc.io import hdf parser = argparse.ArgumentParser(description=__doc__[1:]) @@ -71,7 +71,8 @@ parser.add_argument('--inspiral-data-analyzed-name', "analyzed by each analysis job.") parser.add_argument('--min-sngl-ranking', type=float, default=6.5, help="Minimum sngl-ranking to consider for loudest " - "triggers. Default=6.5.") + "triggers. Useful for efficiency savings. " + "Default=6.5.") parser.add_argument('--non-coinc-time-only', action='store_true', help="If given remove (veto) single-detector triggers " "that occur during a time when at least one other " @@ -175,7 +176,7 @@ if args.veto_file: logging.info('Getting file vetoes') # veto_mask is an array of indices into the trigger arrays # giving the surviving triggers - veto_file_idx, _ = events.veto.indices_within_segments( + veto_file_idx, _ = pycbc.events.veto.indices_within_segments( trigs.end_time, [args.veto_file], ifo=args.instrument, @@ -237,48 +238,32 @@ if args.maximum_duration is not None: logging.info('Finding loudest clustered events') rank_method = stat.get_statistic_from_opts(args, [args.instrument]) -extra_kwargs = {} -for inputstr in args.statistic_keywords: - try: - key, value = inputstr.split(':') - extra_kwargs[key] = value - except ValueError: - err_txt = "--statistic-keywords must take input in the " \ - "form KWARG1:VALUE1 KWARG2:VALUE2 KWARG3:VALUE3 ... " \ - "Received {}".format(args.statistic_keywords) - raise ValueError(err_txt) - -logging.info("Calculating statistic for %d triggers", len(trigs.snr)) -sds = rank_method.single(trigs) -stat = rank_method.rank_stat_single((args.instrument, sds), **extra_kwargs) -logging.info("Clustering events over %.3fs window", args.cluster_window) -cid = coinc.cluster_over_time(stat, trigs.end_time, - args.cluster_window) -trigs.apply_mask(cid) -stat = stat[cid] -if len(trigs.snr) < num_events: - num_events = len(trigs.snr) - -logging.info("Finding the loudest triggers") -loudest_idx = sorted(numpy.argsort(stat)[::-1][:num_events]) -trigs.apply_mask(loudest_idx) -stat = stat[loudest_idx] +extra_kwargs = stat.parse_statistic_keywords_opt(args.statistic_keywords) + +trigs.mask_to_n_loudest_clustered_events( + rank_method, + n_loudest=num_events, + cluster_window=args.cluster_window, + statistic_kwargs=extra_kwargs, +) times = trigs.end_time -tids = trigs.template_id +if isinstance(trigs.mask, numpy.ndarray) and trigs.mask.dtype == bool: + trigger_ids = numpy.flatnonzero(trigs.mask) +else: + trigger_ids = trigs.mask + +trig_stat = trigs.stat # loop over number of loudest events to be followed up -order = stat.argsort()[::-1] +order = trig_stat.argsort()[::-1] for rank, num_event in enumerate(order): - logging.info('Processing event: %s', num_event) + logging.info('Processing event: %s', rank) files = wf.FileList([]) time = times[num_event] ifo_time = '%s:%s' %(args.instrument, str(time)) - if isinstance(trigs.mask, numpy.ndarray) and trigs.mask.dtype == bool: - tid = numpy.flatnonzero(trigs.mask)[num_event] - else: - tid = trigs.mask[num_event] + tid = trigger_ids[num_event] ifo_tid = '%s:%s' %(args.instrument, str(tid)) layouts += (mini.make_sngl_ifo(workflow, sngl_file, tmpltbank_file, diff --git a/pycbc/io/hdf.py b/pycbc/io/hdf.py index b89681ecc9f..a551fe37f1f 100644 --- a/pycbc/io/hdf.py +++ b/pycbc/io/hdf.py @@ -626,6 +626,7 @@ def trig_dict(self): mtrigs[k] = self.trigs[k][self.mask] else: mtrigs[k] = self.trigs[k][:] + mtrigs['ifo'] = self.ifo return mtrigs @classmethod @@ -687,30 +688,35 @@ def and_masks(self, logic_mask): self.mask[and_indices.astype(np.uint64)] = True def mask_to_n_loudest_clustered_events(self, rank_method, - ranking_threshold=6, + statistic_threshold=None, n_loudest=10, - cluster_window=10): + cluster_window=10, + statistic_kwargs=None): """Edits the mask property of the class to point to the N loudest single detector events as ranked by ranking statistic. Events are clustered so that no more than 1 event within +/- cluster_window will be considered. Can apply a threshold on the - ranking using ranking_threshold + statistic using statistic_threshold """ + if statistic_kwargs is None: + statistic_kwargs = {} sds = rank_method.single(self.trig_dict()) - stat = rank_method.rank_stat_single((self.ifo, sds)) + stat = rank_method.rank_stat_single( + (self.ifo, sds), + **statistic_kwargs + ) if len(stat) == 0: # No triggers at all, so just return here self.apply_mask(np.array([], dtype=np.uint64)) + self.stat = np.array([], dtype=np.uint64) return times = self.end_time - if ranking_threshold: - # Threshold on sngl_ranking - # Note that we can provide None or zero to do no thresholding - # but the default is to do some - keep = stat >= ranking_threshold + if statistic_threshold is not None: + # Threshold on statistic + keep = stat >= statistic_threshold stat = stat[keep] times = times[keep] self.apply_mask(keep) @@ -744,6 +750,7 @@ def mask_to_n_loudest_clustered_events(self, rank_method, index.sort() # Apply to the existing mask self.apply_mask(index) + self.stat = stat[index] @property def mask_size(self): From 013bdd192e325fae0a58f6f4d963bb70e8bfd0e1 Mon Sep 17 00:00:00 2001 From: Alex Nitz Date: Tue, 30 Jul 2024 02:47:18 +0100 Subject: [PATCH 13/35] add python 3.12 to the build and drop python 3.8 (#4716) * drop python3.7, 3.8, 3.9 * add missing quotes * banish distutils * banish p39 in more places * tox needs some things early * move here * maybe? * try el9 * must use 311 for el9 * this might be the right name * updates * try this * more stuff * update * do not use imp module * more places where imp was used * bring back 39 * more reverts * update * update * fixes * this should be here * cc * fix web url * add back 3.9 to basic tests * centralize python12 backwards compatibility function * cc * simplify setup.py, ensure math is linked * cc --- .github/workflows/basic-tests.yml | 2 +- .github/workflows/distribution.yml | 8 +- .github/workflows/inference-workflow.yml | 2 +- .github/workflows/mac-test.yml | 3 +- .github/workflows/search-workflow.yml | 2 +- .github/workflows/tmpltbank-workflow.yml | 2 +- .github/workflows/tut-test.yml | 2 +- .github/workflows/workflow-tests.yml | 2 +- .../pycbc_cut_merge_triggers_to_tmpltbank | 4 +- bin/all_sky_search/pycbc_reduce_template_bank | 4 +- bin/pycbc_banksim_match_combine | 5 +- pycbc/__init__.py | 15 +++ pyproject.toml | 14 +-- requirements-igwn.txt | 1 + setup.py | 103 ++++-------------- tools/docker_build_dist.sh | 1 - tox.ini | 5 +- 17 files changed, 62 insertions(+), 113 deletions(-) diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index d962026e09a..55e9c24ee9a 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -13,7 +13,7 @@ jobs: max-parallel: 60 matrix: os: [ubuntu-20.04] - python-version: [3.8, 3.9, '3.10', '3.11'] + python-version: ['3.9', '3.10', '3.11', '3.12'] test-type: [unittest, search, docs] steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/distribution.yml b/.github/workflows/distribution.yml index 122f01ae03d..1b54b501a3d 100644 --- a/.github/workflows/distribution.yml +++ b/.github/workflows/distribution.yml @@ -20,13 +20,13 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: '3.10' - name: Install cibuildwheel run: python -m pip install cibuildwheel - name: Build wheels run: python -m cibuildwheel --output-dir wheelhouse env: - CIBW_BUILD: cp38-* cp39-* cp310-* cp311-* + CIBW_BUILD: cp39-* cp310-* cp311-* cp312-* CIBW_SKIP: "*musllinux*" CIBW_ARCHS_MACOS: x86_64 arm64 - uses: actions/upload-artifact@v2 @@ -40,10 +40,10 @@ jobs: - uses: actions/checkout@v3 with: fetch-depth: 0 - - name: Set up Python 3.8 + - name: Set up Python 3.10 uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: '3.10' - uses: actions/download-artifact@v2 with: path: ./ diff --git a/.github/workflows/inference-workflow.yml b/.github/workflows/inference-workflow.yml index d95e1c5b714..d31076da5e6 100644 --- a/.github/workflows/inference-workflow.yml +++ b/.github/workflows/inference-workflow.yml @@ -10,7 +10,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: '3.10' - name: install condor run: | wget -qO - https://research.cs.wisc.edu/htcondor/ubuntu/HTCondor-Release.gpg.key | sudo apt-key add - diff --git a/.github/workflows/mac-test.yml b/.github/workflows/mac-test.yml index f441f648d5c..7501eb9d9b5 100644 --- a/.github/workflows/mac-test.yml +++ b/.github/workflows/mac-test.yml @@ -13,7 +13,7 @@ jobs: max-parallel: 4 matrix: os: [macos-12] - python-version: [3.8, 3.9, '3.10', '3.11'] + python-version: ['3.10', '3.11', '3.12'] steps: - uses: actions/checkout@v1 - name: Set up Python ${{ matrix.python-version }} @@ -25,5 +25,4 @@ jobs: pip install --upgrade pip setuptools "tox<4.0.0" - name: run basic pycbc test suite run: | - sudo chmod -R 777 /usr/local/miniconda/ tox -e py-unittest diff --git a/.github/workflows/search-workflow.yml b/.github/workflows/search-workflow.yml index 9383c5066cb..f7f8c744ad5 100644 --- a/.github/workflows/search-workflow.yml +++ b/.github/workflows/search-workflow.yml @@ -15,7 +15,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: '3.10' - name: install condor run: | wget -qO - https://research.cs.wisc.edu/htcondor/ubuntu/HTCondor-Release.gpg.key | sudo apt-key add - diff --git a/.github/workflows/tmpltbank-workflow.yml b/.github/workflows/tmpltbank-workflow.yml index 66a4f1ba3b2..e971e97b2b7 100644 --- a/.github/workflows/tmpltbank-workflow.yml +++ b/.github/workflows/tmpltbank-workflow.yml @@ -14,7 +14,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: '3.10' - name: install condor run: | wget -qO - https://research.cs.wisc.edu/htcondor/ubuntu/HTCondor-Release.gpg.key | sudo apt-key add - diff --git a/.github/workflows/tut-test.yml b/.github/workflows/tut-test.yml index 5abd59c9d75..11928d632f6 100644 --- a/.github/workflows/tut-test.yml +++ b/.github/workflows/tut-test.yml @@ -13,7 +13,7 @@ jobs: max-parallel: 60 matrix: os: [ubuntu-20.04] - python-version: [3.8, 3.9, '3.10', '3.11'] + python-version: ['3.10', '3.11', '3.12'] steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} diff --git a/.github/workflows/workflow-tests.yml b/.github/workflows/workflow-tests.yml index 86410066aff..79643cc8793 100644 --- a/.github/workflows/workflow-tests.yml +++ b/.github/workflows/workflow-tests.yml @@ -19,7 +19,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: '3.10' - name: install condor run: | wget -qO - https://research.cs.wisc.edu/htcondor/ubuntu/HTCondor-Release.gpg.key | sudo apt-key add - diff --git a/bin/all_sky_search/pycbc_cut_merge_triggers_to_tmpltbank b/bin/all_sky_search/pycbc_cut_merge_triggers_to_tmpltbank index 0848aa949c2..fafbb03bf12 100644 --- a/bin/all_sky_search/pycbc_cut_merge_triggers_to_tmpltbank +++ b/bin/all_sky_search/pycbc_cut_merge_triggers_to_tmpltbank @@ -21,12 +21,12 @@ Reduce a MERGE triggers file to a reduced template bank """ import logging -import imp import argparse import numpy import h5py import pycbc from pycbc.io import HFile +from pycbc import load_source parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) @@ -50,7 +50,7 @@ pycbc.init_logging(opt.verbose) bank_fd = HFile(opt.full_template_bank, 'r') -modl = imp.load_source('filter_func', opt.filter_func_file) +modl = load_source('filter_func', opt.filter_func_file) func = modl.filter_tmpltbank bool_arr = func(bank_fd) diff --git a/bin/all_sky_search/pycbc_reduce_template_bank b/bin/all_sky_search/pycbc_reduce_template_bank index 9345ac95dc1..c8010edb36f 100644 --- a/bin/all_sky_search/pycbc_reduce_template_bank +++ b/bin/all_sky_search/pycbc_reduce_template_bank @@ -23,10 +23,10 @@ Reduce a template bank using some input parameter cuts import numpy import logging -import imp import argparse import pycbc from pycbc.io import HFile +from pycbc import load_source parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) @@ -48,7 +48,7 @@ pycbc.init_logging(opt.verbose) bank_fd = HFile(opt.input_bank, 'r') -modl = imp.load_source('filter_func', opt.filter_func_file) +modl = load_source('filter_func', opt.filter_func_file) func = modl.filter_tmpltbank bool_arr = func(bank_fd) diff --git a/bin/pycbc_banksim_match_combine b/bin/pycbc_banksim_match_combine index 111329e8e7e..652fd0b8813 100644 --- a/bin/pycbc_banksim_match_combine +++ b/bin/pycbc_banksim_match_combine @@ -22,7 +22,6 @@ a set of injection files. The *order* of the injection files *must* match the bank files, and the number of injections in each must correspond one-to-one. """ -import imp import argparse import numpy as np @@ -33,7 +32,7 @@ from pycbc import pnutils from pycbc.waveform import TemplateBank from pycbc.io.ligolw import LIGOLWContentHandler from pycbc.io.hdf import HFile - +from pycbc import load_source __author__ = "Ian Harry " __version__ = pycbc.version.git_verbose_msg @@ -137,7 +136,7 @@ for val in trig_par_list: f['trig_params/{}'.format(val)] = trig_params[val] if options.filter_func_file: - modl = imp.load_source('filter_func', options.filter_func_file) + modl = load_source('filter_func', options.filter_func_file) func = modl.filter_injections bool_arr = func(inj_params['mass1'], inj_params['mass2'], inj_params['spin1z'], inj_params['spin2z']) diff --git a/pycbc/__init__.py b/pycbc/__init__.py index 02d8b95e961..d6db0026e0a 100644 --- a/pycbc/__init__.py +++ b/pycbc/__init__.py @@ -32,6 +32,8 @@ import logging import random import string +import importlib.util +import importlib.machinery from datetime import datetime as dt try: @@ -227,3 +229,16 @@ def gps_now(): from astropy.time import Time return float(Time.now().gps) + +# This is needed as a backwards compatibility. The function was removed in +# python 3.12. +def load_source(modname, filename): + loader = importlib.machinery.SourceFileLoader(modname, filename) + spec = importlib.util.spec_from_file_location(modname, filename, + loader=loader) + module = importlib.util.module_from_spec(spec) + # The module is always executed and not cached in sys.modules. + # Uncomment the following line to cache the module. + # sys.modules[module.__name__] = module + loader.exec_module(module) + return module diff --git a/pyproject.toml b/pyproject.toml index af1b3aa113d..1fcf4c3ca7d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,16 +1,6 @@ [build-system] requires = ["setuptools", "wheel", - "cython>=3.0.0", - "numpy==1.16.0; python_version <= '3.7'", - "numpy==1.17.3; python_version == '3.8'", - "numpy==1.19.3; python_version == '3.9'", - "numpy==1.21.4; python_version =='3.10'", - "numpy==1.23.4; python_version=='3.11'", - "numpy; python_version >= '3.12'", + "cython>=0.29.21", + "numpy>=2.0.0", ] - -# To ensure the best compatibility, try to match the numpy reqs -# where possible to the following used by scipy and the minimum -# of our normal numpy requirements in setup.py -# https://github.com/scipy/oldest-supported-numpy/blob/master/setup.cfg diff --git a/requirements-igwn.txt b/requirements-igwn.txt index 9486aa71255..f6199d03283 100644 --- a/requirements-igwn.txt +++ b/requirements-igwn.txt @@ -1,4 +1,5 @@ # For LDG service access +ligo-proxy-utils ciecplib[kerberos] >= 0.7.0 dqsegdb2 >= 1.1.4 amqplib diff --git a/setup.py b/setup.py index d5f769271c9..f39558046d5 100755 --- a/setup.py +++ b/setup.py @@ -20,11 +20,9 @@ """ import sys -import os, subprocess, shutil +import os, subprocess import platform -from distutils.command.clean import clean as _clean - from setuptools import Extension, setup, Command from setuptools.command.build_ext import build_ext as _build_ext from setuptools import find_packages @@ -83,27 +81,6 @@ def run(self): _build_ext.run(self) - -# Add swig-generated files to the list of things to clean, so they -# get regenerated each time. -class clean(_clean): - def finalize_options (self): - _clean.finalize_options(self) - self.clean_files = [] - self.clean_folders = ['docs/_build'] - def run(self): - _clean.run(self) - for f in self.clean_files: - try: - os.unlink(f) - print('removed ' + f) - except: - pass - - for fol in self.clean_folders: - shutil.rmtree(fol, ignore_errors=True) - print('removed ' + fol) - def get_version_info(): """Get VCS info and write version info to version.py. """ @@ -192,7 +169,6 @@ def run(self): cmdclass = { 'build_docs': build_docs, 'build_gh_pages': build_gh_pages, - 'clean': clean, 'build_ext': cbuild_ext } @@ -209,12 +185,21 @@ def run(self): # do the actual work of building the package VERSION = get_version_info() -cythonext = ['waveform.spa_tmplt', - 'waveform.utils', - 'types.array', - 'filter.matchedfilter', - 'vetoes.chisq'] +cythonext = ['pycbc.waveform.spa_tmplt_cpu', + 'pycbc.waveform.utils_cpu', + 'pycbc.types.array_cpu', + 'pycbc.filter.matchedfilter_cpu', + 'pycbc.vetoes.chisq_cpu', + "pycbc.fft.fftw_pruned_cython", + "pycbc.events.eventmgr_cython", + "pycbc.events.simd_threshold_cython", + "pycbc.filter.simd_correlate_cython", + "pycbc.waveform.decompress_cpu_cython", + "pycbc.inference.models.relbin_cpu", + ] ext = [] + +libraries = ['m'] # Some platforms / toolchains don't implicitly link this cython_compile_args = ['-O3', '-w', '-ffast-math', '-ffinite-math-only'] @@ -233,57 +218,18 @@ def run(self): cython_compile_args += ["-stdlib=libc++"] cython_link_args += ["-stdlib=libc++"] + for name in cythonext: - e = Extension("pycbc.%s_cpu" % name, - ["pycbc/%s_cpu.pyx" % name.replace('.', '/')], + fname = name.replace('.', '/') + e = Extension(name, + [f"{fname}.pyx"], + language='c++', extra_compile_args=cython_compile_args, extra_link_args=cython_link_args, + libraries=libraries, compiler_directives={'embedsignature': True}) ext.append(e) -# Not all modules work like this: -e = Extension("pycbc.fft.fftw_pruned_cython", - ["pycbc/fft/fftw_pruned_cython.pyx"], - extra_compile_args=cython_compile_args, - extra_link_args=cython_link_args, - compiler_directives={'embedsignature': True}) -ext.append(e) -e = Extension("pycbc.events.eventmgr_cython", - ["pycbc/events/eventmgr_cython.pyx"], - extra_compile_args=cython_compile_args, - extra_link_args=cython_link_args, - compiler_directives={'embedsignature': True}) -ext.append(e) -e = Extension("pycbc.events.simd_threshold_cython", - ["pycbc/events/simd_threshold_cython.pyx"], - language='c++', - extra_compile_args=cython_compile_args, - extra_link_args=cython_link_args, - compiler_directives={'embedsignature': True}) -ext.append(e) -e = Extension("pycbc.filter.simd_correlate_cython", - ["pycbc/filter/simd_correlate_cython.pyx"], - language='c++', - extra_compile_args=cython_compile_args, - extra_link_args=cython_link_args, - compiler_directives={'embedsignature': True}) -ext.append(e) -e = Extension("pycbc.waveform.decompress_cpu_cython", - ["pycbc/waveform/decompress_cpu_cython.pyx"], - language='c++', - extra_compile_args=cython_compile_args, - extra_link_args=cython_link_args, - compiler_directives={'embedsignature': True}) -ext.append(e) -e = Extension("pycbc.inference.models.relbin_cpu", - ["pycbc/inference/models/relbin_cpu.pyx"], - language='c++', - extra_compile_args=cython_compile_args, - extra_link_args=cython_link_args, - compiler_directives={'embedsignature': True}) -ext.append(e) - - setup( name = 'PyCBC', version = VERSION, @@ -292,7 +238,7 @@ def run(self): long_description_content_type='text/markdown', author = 'The PyCBC team', author_email = 'alex.nitz@gmail.org', - url = 'http://www.pycbc.org/', + url = 'http://pycbc.org/', download_url = f'https://github.com/gwastro/pycbc/tarball/v{VERSION}', keywords = [ 'ligo', @@ -313,14 +259,13 @@ def run(self): 'pycbc.neutron_stars': find_files('pycbc/neutron_stars') }, ext_modules = ext, - python_requires='>=3.7', + python_requires='>=3.9', classifiers=[ 'Programming Language :: Python', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', 'Intended Audience :: Science/Research', 'Natural Language :: English', 'Topic :: Scientific/Engineering', diff --git a/tools/docker_build_dist.sh b/tools/docker_build_dist.sh index 3acaa64cbe7..14d26c33f29 100755 --- a/tools/docker_build_dist.sh +++ b/tools/docker_build_dist.sh @@ -43,7 +43,6 @@ if [ "x${PYCBC_CONTAINER}" == "xpycbc_rhel_virtualenv" ]; then yum clean all yum makecache yum -y install openssl-devel - yum -y install ligo-proxy-utils yum -y install python3-virtualenv yum -y install hdf5-static libxml2-static zlib-static libstdc++-static cfitsio-static glibc-static swig fftw-static gsl-static --skip-broken diff --git a/tox.ini b/tox.ini index 4d0538c3968..ee0d9866c71 100644 --- a/tox.ini +++ b/tox.ini @@ -3,7 +3,8 @@ recreate = true envlist = py-unittest indexserver = preinstall = https://pypi.python.org/simple -requires=tox-conda +requires=tox-conda + setuptools [base] deps = @@ -14,7 +15,7 @@ deps = [testenv] allowlist_externals = bash passenv=LAL_DATA_PATH -conda_deps=openssl=1.1 +conda_deps=openssl conda_channels=conda-forge platform = lin: linux mac: darwin From a2aafefb3f8c6cef64f0288b5ff144f81bd440cc Mon Sep 17 00:00:00 2001 From: Alex Nitz Date: Tue, 30 Jul 2024 10:33:32 +0100 Subject: [PATCH 14/35] deployment of docs mistake (#4830) --- .github/workflows/basic-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index 55e9c24ee9a..ab22c715793 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -50,7 +50,7 @@ jobs: export LAL_DATA_PATH=$PWD tox -e py-inference - name: store documentation page - if: matrix.test-type == 'docs' && matrix.python-version == '3.8' + if: matrix.test-type == 'docs' && matrix.python-version == '3.12' uses: actions/upload-artifact@v2 with: name: documentation-page From e32b429d0144d2b377d764d57289c087a7d62cef Mon Sep 17 00:00:00 2001 From: Alex Nitz Date: Wed, 31 Jul 2024 09:57:22 +0100 Subject: [PATCH 15/35] Update setup.py (#4832) --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index f39558046d5..66474334e32 100755 --- a/setup.py +++ b/setup.py @@ -95,8 +95,8 @@ def __getattr__(self, attr): vinfo = _version_helper.generate_git_version_info() except: vinfo = vdummy() - vinfo.version = '2.4.dev3' - vinfo.release = 'False' + vinfo.version = '2.5.0' + vinfo.release = 'True' version_script = f"""# coding: utf-8 # Generated by setup.py for PyCBC on {vinfo.build_date}. From b3f2d351ed2dbb2bbb6e117c220cc73f4e0fe0d1 Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Wed, 31 Jul 2024 14:39:53 +0100 Subject: [PATCH 16/35] Set back to development (#4833) * Update setup.py * Update tox.ini * Revert "Update tox.ini" This reverts commit 7cbe4dc1199e7d5202c2aaa675b4034e0c080e64. * try this * Revert "try this" This reverts commit c6fd4a7b53d98ba92ca212eb2b4ad3729f95e0cb. * pip install cython and numpy before tox runs * Revert "pip install cython and numpy before tox runs" This reverts commit 67702f30e819b2e382780b453cbe6ad4505fb29f. * Ian's suggestion * try me * Try just not installing BBHx until I work out how to fix this * Turn off lisa examples in teh docs building * Do not try to include image whihc is not longer made --- docs/_include/inference_example_lisa_smbhb_inj.sh | 8 ++++---- docs/_include/inference_example_lisa_smbhb_ldc.sh | 6 +++--- docs/inference/examples/lisa_smbhb_inj_pe.rst | 5 ++++- docs/inference/examples/lisa_smbhb_ldc_pe.rst | 5 ++++- setup.py | 4 ++-- tox.ini | 8 ++++---- 6 files changed, 21 insertions(+), 15 deletions(-) diff --git a/docs/_include/inference_example_lisa_smbhb_inj.sh b/docs/_include/inference_example_lisa_smbhb_inj.sh index d5ae715389b..a0515de3bf8 100644 --- a/docs/_include/inference_example_lisa_smbhb_inj.sh +++ b/docs/_include/inference_example_lisa_smbhb_inj.sh @@ -1,6 +1,6 @@ set -e export OMP_NUM_THREADS=1 -cp ../../examples/inference/lisa_smbhb_inj/injection_smbhb.ini injection_smbhb.ini -sh ../../examples/inference/lisa_smbhb_inj/injection_smbhb.sh -sh ../../examples/inference/lisa_smbhb_inj/run.sh -sh ../../examples/inference/lisa_smbhb_inj/plot.sh +#cp ../../examples/inference/lisa_smbhb_inj/injection_smbhb.ini injection_smbhb.ini +#sh ../../examples/inference/lisa_smbhb_inj/injection_smbhb.sh +#sh ../../examples/inference/lisa_smbhb_inj/run.sh +#sh ../../examples/inference/lisa_smbhb_inj/plot.sh diff --git a/docs/_include/inference_example_lisa_smbhb_ldc.sh b/docs/_include/inference_example_lisa_smbhb_ldc.sh index 29f3a6c8e8b..b83a01c6560 100644 --- a/docs/_include/inference_example_lisa_smbhb_ldc.sh +++ b/docs/_include/inference_example_lisa_smbhb_ldc.sh @@ -1,5 +1,5 @@ set -e export OMP_NUM_THREADS=1 -sh ../../examples/inference/lisa_smbhb_ldc/get.sh -sh ../../examples/inference/lisa_smbhb_ldc/run.sh -python ../../examples/inference/lisa_smbhb_ldc/advanced_plot.py +#sh ../../examples/inference/lisa_smbhb_ldc/get.sh +#sh ../../examples/inference/lisa_smbhb_ldc/run.sh +#python ../../examples/inference/lisa_smbhb_ldc/advanced_plot.py diff --git a/docs/inference/examples/lisa_smbhb_inj_pe.rst b/docs/inference/examples/lisa_smbhb_inj_pe.rst index 7924261c8f9..c74d7b6bfe2 100644 --- a/docs/inference/examples/lisa_smbhb_inj_pe.rst +++ b/docs/inference/examples/lisa_smbhb_inj_pe.rst @@ -57,7 +57,10 @@ To plot the posterior distribution after the last iteration, you can run the fol In this example it will create the following plot: -.. image:: ../../_include/lisa_smbhb_mass_tc.png +.. warning:: + EXAMPLE TURNED OFF FOR NOW +.. + image:: ../../_include/lisa_smbhb_mass_tc.png :scale: 60 :align: center diff --git a/docs/inference/examples/lisa_smbhb_ldc_pe.rst b/docs/inference/examples/lisa_smbhb_ldc_pe.rst index 0e49929d0bc..fbb57f8f134 100644 --- a/docs/inference/examples/lisa_smbhb_ldc_pe.rst +++ b/docs/inference/examples/lisa_smbhb_ldc_pe.rst @@ -61,7 +61,10 @@ Or you can run the advanced one: You can modify this advanced plot script to generate the posterior of any SMBHB signals in the LDC Sangria dataset. In this example it will create the following plot: -.. image:: ../../_include/lisa_smbhb_mass_tc_0.png +.. warning:: + EXAMPLE TURNED OFF FOR NOW +.. + image:: ../../_include/lisa_smbhb_mass_tc_0.png :scale: 60 :align: center diff --git a/setup.py b/setup.py index 66474334e32..33bd8044209 100755 --- a/setup.py +++ b/setup.py @@ -95,8 +95,8 @@ def __getattr__(self, attr): vinfo = _version_helper.generate_git_version_info() except: vinfo = vdummy() - vinfo.version = '2.5.0' - vinfo.release = 'True' + vinfo.version = '2.5.dev1' + vinfo.release = 'False' version_script = f"""# coding: utf-8 # Generated by setup.py for PyCBC on {vinfo.build_date}. diff --git a/tox.ini b/tox.ini index ee0d9866c71..601958256b3 100644 --- a/tox.ini +++ b/tox.ini @@ -26,8 +26,8 @@ deps = {[base]deps} pytest ; Needed for `BBHx` package to work with PyCBC - git+https://github.com/mikekatz04/BBHx.git@4fff509; sys_platform == 'linux' - git+https://github.com/gwastro/BBHX-waveform-model.git; sys_platform == 'linux' + ; git+https://github.com/mikekatz04/BBHx.git@4fff509; sys_platform == 'linux' + ; git+https://github.com/gwastro/BBHX-waveform-model.git; sys_platform == 'linux' conda_deps= mysqlclient lin: gcc_linux-64>=12.2.0 @@ -67,8 +67,8 @@ commands = bash tools/pycbc_test_suite.sh deps = {[base]deps} ; Needed for `BBHx` package to work with PyCBC - git+https://github.com/mikekatz04/BBHx.git@4fff509; sys_platform == 'linux' - git+https://github.com/gwastro/BBHX-waveform-model.git; sys_platform == 'linux' + ; git+https://github.com/mikekatz04/BBHx.git@4fff509; sys_platform == 'linux' + ; git+https://github.com/gwastro/BBHX-waveform-model.git; sys_platform == 'linux' conda_deps= mysqlclient lin: gcc_linux-64>=12.2.0 From 63c0ecfb69696a7272d9df5b17a0f49a14844e65 Mon Sep 17 00:00:00 2001 From: Shichao Wu Date: Wed, 31 Jul 2024 18:01:33 +0100 Subject: [PATCH 17/35] support python3.12 now, so use bbhx master (#4834) * Update tox.ini with Tito's BBHx fork * Update tox.ini * Update tox.ini * Update tox.ini * Update inference_example_lisa_smbhb_inj.sh * Update inference_example_lisa_smbhb_ldc.sh * Update lisa_smbhb_inj_pe.rst * Update lisa_smbhb_ldc_pe.rst --- docs/_include/inference_example_lisa_smbhb_inj.sh | 8 ++++---- docs/_include/inference_example_lisa_smbhb_ldc.sh | 6 +++--- docs/inference/examples/lisa_smbhb_inj_pe.rst | 5 +---- docs/inference/examples/lisa_smbhb_ldc_pe.rst | 5 +---- tox.ini | 8 ++++---- 5 files changed, 13 insertions(+), 19 deletions(-) diff --git a/docs/_include/inference_example_lisa_smbhb_inj.sh b/docs/_include/inference_example_lisa_smbhb_inj.sh index a0515de3bf8..d5ae715389b 100644 --- a/docs/_include/inference_example_lisa_smbhb_inj.sh +++ b/docs/_include/inference_example_lisa_smbhb_inj.sh @@ -1,6 +1,6 @@ set -e export OMP_NUM_THREADS=1 -#cp ../../examples/inference/lisa_smbhb_inj/injection_smbhb.ini injection_smbhb.ini -#sh ../../examples/inference/lisa_smbhb_inj/injection_smbhb.sh -#sh ../../examples/inference/lisa_smbhb_inj/run.sh -#sh ../../examples/inference/lisa_smbhb_inj/plot.sh +cp ../../examples/inference/lisa_smbhb_inj/injection_smbhb.ini injection_smbhb.ini +sh ../../examples/inference/lisa_smbhb_inj/injection_smbhb.sh +sh ../../examples/inference/lisa_smbhb_inj/run.sh +sh ../../examples/inference/lisa_smbhb_inj/plot.sh diff --git a/docs/_include/inference_example_lisa_smbhb_ldc.sh b/docs/_include/inference_example_lisa_smbhb_ldc.sh index b83a01c6560..29f3a6c8e8b 100644 --- a/docs/_include/inference_example_lisa_smbhb_ldc.sh +++ b/docs/_include/inference_example_lisa_smbhb_ldc.sh @@ -1,5 +1,5 @@ set -e export OMP_NUM_THREADS=1 -#sh ../../examples/inference/lisa_smbhb_ldc/get.sh -#sh ../../examples/inference/lisa_smbhb_ldc/run.sh -#python ../../examples/inference/lisa_smbhb_ldc/advanced_plot.py +sh ../../examples/inference/lisa_smbhb_ldc/get.sh +sh ../../examples/inference/lisa_smbhb_ldc/run.sh +python ../../examples/inference/lisa_smbhb_ldc/advanced_plot.py diff --git a/docs/inference/examples/lisa_smbhb_inj_pe.rst b/docs/inference/examples/lisa_smbhb_inj_pe.rst index c74d7b6bfe2..7924261c8f9 100644 --- a/docs/inference/examples/lisa_smbhb_inj_pe.rst +++ b/docs/inference/examples/lisa_smbhb_inj_pe.rst @@ -57,10 +57,7 @@ To plot the posterior distribution after the last iteration, you can run the fol In this example it will create the following plot: -.. warning:: - EXAMPLE TURNED OFF FOR NOW -.. - image:: ../../_include/lisa_smbhb_mass_tc.png +.. image:: ../../_include/lisa_smbhb_mass_tc.png :scale: 60 :align: center diff --git a/docs/inference/examples/lisa_smbhb_ldc_pe.rst b/docs/inference/examples/lisa_smbhb_ldc_pe.rst index fbb57f8f134..0e49929d0bc 100644 --- a/docs/inference/examples/lisa_smbhb_ldc_pe.rst +++ b/docs/inference/examples/lisa_smbhb_ldc_pe.rst @@ -61,10 +61,7 @@ Or you can run the advanced one: You can modify this advanced plot script to generate the posterior of any SMBHB signals in the LDC Sangria dataset. In this example it will create the following plot: -.. warning:: - EXAMPLE TURNED OFF FOR NOW -.. - image:: ../../_include/lisa_smbhb_mass_tc_0.png +.. image:: ../../_include/lisa_smbhb_mass_tc_0.png :scale: 60 :align: center diff --git a/tox.ini b/tox.ini index 601958256b3..0baf40ef5cf 100644 --- a/tox.ini +++ b/tox.ini @@ -26,8 +26,8 @@ deps = {[base]deps} pytest ; Needed for `BBHx` package to work with PyCBC - ; git+https://github.com/mikekatz04/BBHx.git@4fff509; sys_platform == 'linux' - ; git+https://github.com/gwastro/BBHX-waveform-model.git; sys_platform == 'linux' + git+https://github.com/titodalcanton/BBHx.git@py39-and-cleanup; sys_platform == 'linux' + git+https://github.com/gwastro/BBHX-waveform-model.git; sys_platform == 'linux' conda_deps= mysqlclient lin: gcc_linux-64>=12.2.0 @@ -67,8 +67,8 @@ commands = bash tools/pycbc_test_suite.sh deps = {[base]deps} ; Needed for `BBHx` package to work with PyCBC - ; git+https://github.com/mikekatz04/BBHx.git@4fff509; sys_platform == 'linux' - ; git+https://github.com/gwastro/BBHX-waveform-model.git; sys_platform == 'linux' + git+https://github.com/titodalcanton/BBHx.git@py39-and-cleanup; sys_platform == 'linux' + git+https://github.com/gwastro/BBHX-waveform-model.git; sys_platform == 'linux' conda_deps= mysqlclient lin: gcc_linux-64>=12.2.0 From 235c03aef02bb5974e4d55046ae4efa51344eb70 Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Fri, 2 Aug 2024 15:56:58 +0100 Subject: [PATCH 18/35] Add checksums to hdf5 datasets datasets (#4831) * Add a wrapping to h5py groups so that we can set checksumming * use phil, wrap to create_dataset rather than setitem (which uses create_dataset anyway) * Dont run fletcher32 for object dtypes --- pycbc/io/hdf.py | 35 ++++++++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/pycbc/io/hdf.py b/pycbc/io/hdf.py index a551fe37f1f..15911d545c9 100644 --- a/pycbc/io/hdf.py +++ b/pycbc/io/hdf.py @@ -28,7 +28,40 @@ logger = logging.getLogger('pycbc.io.hdf') -class HFile(h5py.File): +class HGroup(h5py.Group): + """ Low level extensions to the h5py group object + """ + def create_group(self, name, track_order=None): + """ + Wrapper around h5py's create_group in order to redirect to the + manual HGroup object defined here + """ + if track_order is None: + track_order = h5py.h5.get_config().track_order + + with h5py._objects.phil: + name, lcpl = self._e(name, lcpl=True) + gcpl = HGroup._gcpl_crt_order if track_order else None + gid = h5py.h5g.create(self.id, name, lcpl=lcpl, gcpl=gcpl) + return HGroup(gid) + + def create_dataset(self, name, shape=None, dtype=None, data=None, **kwds): + """ + Wrapper around h5py's create_dataset so that checksums are used + """ + if hasattr(data, 'dtype') and not data.dtype == object: + kwds['fletcher32'] = True + return h5py.Group.create_dataset( + self, + name, + shape=shape, + dtype=dtype, + data=data, + **kwds + ) + + +class HFile(HGroup, h5py.File): """ Low level extensions to the capabilities of reading an hdf5 File """ def select(self, fcn, *args, chunksize=10**6, derived=None, group='', From 6fd4fdb1e931aa7db127f3913c9e6280c5e222ce Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Fri, 2 Aug 2024 17:59:30 +0100 Subject: [PATCH 19/35] Update pycbc_live (#4827) --- bin/pycbc_live | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/pycbc_live b/bin/pycbc_live index 1b45e22cbb4..7c4eab49ad7 100755 --- a/bin/pycbc_live +++ b/bin/pycbc_live @@ -821,7 +821,7 @@ def check_max_length(args, waveforms): parser = argparse.ArgumentParser(description=__doc__) pycbc.waveform.bank.add_approximant_arg(parser) -parser.add_argument('--verbose', action='store_true') +parser.add_argument('--verbose', action='count') parser.add_argument('--version', action='version', version=version.git_verbose_msg) parser.add_argument('--bank-file', required=True, help="Template bank file in XML or HDF format") From 854fde5e0e9d85f8403c29df01ac815c4187a32f Mon Sep 17 00:00:00 2001 From: Tito Dal Canton Date: Fri, 2 Aug 2024 19:03:29 +0200 Subject: [PATCH 20/35] PyCBC Live: fix errors from stat refresh code when not being used (#4836) * Live: do not fail if statistic refresh is not used * Better UI * Same fixes for singles * Improve help string --- bin/pycbc_live | 3 ++- pycbc/events/coinc.py | 32 ++++++++++++++++---------------- pycbc/events/single.py | 24 +++++++++++------------- 3 files changed, 29 insertions(+), 30 deletions(-) diff --git a/bin/pycbc_live b/bin/pycbc_live index 7c4eab49ad7..7b19eb6faef 100755 --- a/bin/pycbc_live +++ b/bin/pycbc_live @@ -1049,7 +1049,8 @@ parser.add_argument('--psd-variation', action='store_true', "the search. Required when using a single detector " "ranking statistic that includes psd variation.") parser.add_argument("--statistic-refresh-rate", type=float, - help="How often to refresh the statistic object") + help="How often to refresh the statistic object, " + "in seconds. If omitted, no refreshing is done.") scheme.insert_processing_option_group(parser) LiveSingle.insert_args(parser) diff --git a/pycbc/events/coinc.py b/pycbc/events/coinc.py index 2fcf4a48a30..ee729a97bcc 100644 --- a/pycbc/events/coinc.py +++ b/pycbc/events/coinc.py @@ -28,7 +28,6 @@ import numpy import logging import copy -from datetime import datetime as dt import time as timemod import threading @@ -317,7 +316,7 @@ def win(ifo1, ifo2): # tested against fixed and pivot are now present for testing with new # dependent ifos for ifo2 in ids: - logger.info('added ifo %s, testing against %s' % (ifo1, ifo2)) + logger.info('added ifo %s, testing against %s', ifo1, ifo2) w = win(ifo1, ifo2) left = time1.searchsorted(ctimes[ifo2] - w) right = time1.searchsorted(ctimes[ifo2] + w) @@ -887,7 +886,7 @@ class (in seconds), default not do do this **kwargs ) - self.time_stat_refreshed = dt.now() + self.time_stat_refreshed = timemod.time() self.stat_calculator_lock = threading.Lock() self.statistic_refresh_rate = statistic_refresh_rate @@ -1406,13 +1405,18 @@ def start_refresh_thread(self): """ Start a thread managing whether the stat_calculator will be updated """ + if self.statistic_refresh_rate is None: + logger.info( + "Statistic refresh disabled for %s", ppdets(self.ifos, "-") + ) + return thread = threading.Thread( target=self.refresh_statistic, - daemon=True + daemon=True, + name="Stat refresh " + ppdets(self.ifos, "-") ) logger.info( - "Starting %s statistic refresh thread", - ''.join(self.ifos), + "Starting %s statistic refresh thread", ppdets(self.ifos, "-") ) thread.start() @@ -1422,29 +1426,25 @@ def refresh_statistic(self): """ while True: # How long since the statistic was last updated? - since_stat_refresh = \ - (dt.now() - self.time_stat_refreshed).total_seconds() + since_stat_refresh = timemod.time() - self.time_stat_refreshed if since_stat_refresh > self.statistic_refresh_rate: - self.time_stat_refreshed = dt.now() + self.time_stat_refreshed = timemod.time() logger.info( "Checking %s statistic for updated files", - ''.join(self.ifos), + ppdets(self.ifos, "-"), ) with self.stat_calculator_lock: self.stat_calculator.check_update_files() # Sleep one second for safety timemod.sleep(1) # Now include the time it took the check / update the statistic - since_stat_refresh = \ - (dt.now() - self.time_stat_refreshed).total_seconds() + since_stat_refresh = timemod.time() - self.time_stat_refreshed logger.debug( "%s statistic: Waiting %.3fs for next refresh", - ''.join(self.ifos), + ppdets(self.ifos, "-"), self.statistic_refresh_rate - since_stat_refresh, ) - timemod.sleep( - self.statistic_refresh_rate - since_stat_refresh + 1 - ) + timemod.sleep(self.statistic_refresh_rate - since_stat_refresh + 1) __all__ = [ diff --git a/pycbc/events/single.py b/pycbc/events/single.py index e62b3d8379d..db22fd08712 100644 --- a/pycbc/events/single.py +++ b/pycbc/events/single.py @@ -3,7 +3,6 @@ import logging import copy import threading -from datetime import datetime as dt import time import numpy as np @@ -76,7 +75,7 @@ class (in seconds), default not do do this self.fixed_ifar = fixed_ifar self.maximum_ifar = maximum_ifar - self.time_stat_refreshed = dt.now() + self.time_stat_refreshed = time.time() self.stat_calculator_lock = threading.Lock() self.statistic_refresh_rate = statistic_refresh_rate @@ -360,9 +359,13 @@ def start_refresh_thread(self): """ Start a thread managing whether the stat_calculator will be updated """ + if self.statistic_refresh_rate is None: + logger.info("Statistic refresh disabled for %s", self.ifo) + return thread = threading.Thread( target=self.refresh_statistic, - daemon=True + daemon=True, + name="Stat refresh " + self.ifo ) logger.info("Starting %s statistic refresh thread", self.ifo) thread.start() @@ -373,26 +376,21 @@ def refresh_statistic(self): """ while True: # How long since the statistic was last updated? - since_stat_refresh = \ - (dt.now() - self.time_stat_refreshed).total_seconds() + since_stat_refresh = time.time() - self.time_stat_refreshed if since_stat_refresh > self.statistic_refresh_rate: - self.time_stat_refreshed = dt.now() + self.time_stat_refreshed = time.time() logger.info( - "Checking %s statistic for updated files", - self.ifo, + "Checking %s statistic for updated files", self.ifo ) with self.stat_calculator_lock: self.stat_calculator.check_update_files() # Sleep one second for safety time.sleep(1) # Now use the time it took the check / update the statistic - since_stat_refresh = \ - (dt.now() - self.time_stat_refreshed).total_seconds() + since_stat_refresh = time.time() - self.time_stat_refreshed logger.debug( "%s statistic: Waiting %.3fs for next refresh", self.ifo, self.statistic_refresh_rate - since_stat_refresh ) - time.sleep( - self.statistic_refresh_rate - since_stat_refresh - ) + time.sleep(self.statistic_refresh_rate - since_stat_refresh) From c0b798e4acffc06aa7a9e1138cc94be9c04a4079 Mon Sep 17 00:00:00 2001 From: Tito Dal Canton Date: Sat, 3 Aug 2024 23:22:51 +0200 Subject: [PATCH 21/35] Fix subtle bug when using `LIGOTimeGPS` with `TimeSeries.at_time()` (#4838) * Fix subtle bug when using `LIGOTimeGPS` with `TimeSeries.at_time()` * typos * Tom's comment & expand unittest * comment --------- Co-authored-by: Thomas Dent --- pycbc/types/timeseries.py | 16 +++++++++------- test/test_timeseries.py | 13 +++++++++---- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/pycbc/types/timeseries.py b/pycbc/types/timeseries.py index d3d378bf0e2..3405d8d0942 100644 --- a/pycbc/types/timeseries.py +++ b/pycbc/types/timeseries.py @@ -243,10 +243,13 @@ def get_sample_times(self): def at_time(self, time, nearest_sample=False, interpolate=None, extrapolate=None): - """ Return the value at the specified gps time + """Return the value of the TimeSeries at the specified GPS time. Parameters ---------- + time: scalar or array-like + GPS time at which the value is wanted. Note that LIGOTimeGPS + objects count as scalar. nearest_sample: bool Return the sample at the time nearest to the chosen time rather than rounded down. @@ -254,7 +257,7 @@ def at_time(self, time, nearest_sample=False, Return the interpolated value of the time series. Choices are simple linear or quadratic interpolation. extrapolate: str or float, None - Value to return if time is outsidde the range of the vector or + Value to return if time is outside the range of the vector or method of extrapolating the value. """ if nearest_sample: @@ -278,9 +281,9 @@ def at_time(self, time, nearest_sample=False, keep_idx = _numpy.where(left & right)[0] vtime = vtime[keep_idx] else: - raise ValueError("Unsuported extrapolate: %s" % extrapolate) + raise ValueError(f"Unsupported extrapolate: {extrapolate}") - fi = (vtime - float(self.start_time))*self.sample_rate + fi = (vtime - float(self.start_time)) * self.sample_rate i = _numpy.asarray(_numpy.floor(fi)).astype(int) di = fi - i @@ -305,10 +308,9 @@ def at_time(self, time, nearest_sample=False, ans[keep_idx] = old ans = _numpy.array(ans, ndmin=1) - if _numpy.isscalar(time): + if _numpy.ndim(time) == 0: return ans[0] - else: - return ans + return ans at_times = at_time diff --git a/test/test_timeseries.py b/test/test_timeseries.py index cfc9488e8b5..82445f3ab90 100644 --- a/test/test_timeseries.py +++ b/test/test_timeseries.py @@ -49,7 +49,7 @@ from numpy import ndarray as CPUArray -class TestTimeSeriesBase(array_base,unittest.TestCase): +class TestTimeSeriesBase(array_base, unittest.TestCase): __test__ = False def setUp(self): self.scheme = _scheme @@ -481,10 +481,10 @@ def test_at_time(self): a = TimeSeries([0, 1, 2, 3, 4, 5, 6, 7], delta_t=1.0) self.assertAlmostEqual(a.at_time(0.5), 0.0) - self.assertAlmostEqual(a.at_time(0.6, nearest_sample=True), 1.0) + self.assertAlmostEqual(a.at_time(0.6, nearest_sample=True), 1.0) self.assertAlmostEqual(a.at_time(0.5, interpolate='linear'), 0.5) - self.assertAlmostEqual(a.at_time([2.5], - interpolate='quadratic'), 2.5) + self.assertAlmostEqual(a.at_time([2.5], interpolate='quadratic'), 2.5) + self.assertAlmostEqual(a.at_time(lal.LIGOTimeGPS(2.1)), 2.0) i = numpy.array([-0.2, 0.5, 1.5, 7.0]) @@ -504,6 +504,11 @@ def test_at_time(self): n = numpy.array([0, 0.0, 1.5, 0.0]) self.assertAlmostEqual((x-n).sum(), 0) + # Check that the output corresponds to input being scalar/array. + self.assertEqual(numpy.ndim(a.at_time(0.5)), 0) + self.assertEqual(numpy.ndim(a.at_time(lal.LIGOTimeGPS(2.1))), 0) + self.assertEqual(numpy.ndim(a.at_time(i)), 1) + def test_inject(self): a = TimeSeries(numpy.zeros(2**20, dtype=numpy.float32), delta_t=1.0) From 1c6ad7e0a11b2d764257f9bd2a8a545750030c56 Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Mon, 5 Aug 2024 10:02:54 +0100 Subject: [PATCH 22/35] pycbc_test_suite to be able to run from anywhere within the git repo (#4774) * make it so that pycbc_test_suite will run from anywhere within the git repo * Stop using git, change directory to the base in the script --- tools/pycbc_test_suite.sh | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tools/pycbc_test_suite.sh b/tools/pycbc_test_suite.sh index 56dcdcc30a8..75cc706a49c 100755 --- a/tools/pycbc_test_suite.sh +++ b/tools/pycbc_test_suite.sh @@ -7,6 +7,11 @@ echo -e "\\n>> [`date`] Python Major Version:" $PYTHON_VERSION PYTHON_MINOR_VERSION=`python -c 'import sys; print(sys.version_info.minor)'` echo -e "\\n>> [`date`] Python Minor Version:" $PYTHON_MINOR_VERSION +# This will work from anywhere within the pycbc directory +this_script_dir=`dirname -- "$( readlink -f -- "$0"; )"` +cd $this_script_dir +cd .. + LOG_FILE=$(mktemp -t pycbc-test-log.XXXXXXXXXX) RESULT=0 @@ -29,7 +34,8 @@ function test_result { if [ "$PYCBC_TEST_TYPE" = "unittest" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then for prog in `find test -name '*.py' -print | egrep -v '(long|lalsim|test_waveform)'` do - echo -e ">> [`date`] running unit test for $prog" + prog_short=`echo $prog | rev | cut -d"/" -f1 | rev` + echo -e ">> [`date`] running unit test for $prog_short" python $prog &> $LOG_FILE test_result done From f2a1e1f9bd07388278ae11a8ecff7cc02f8fc5d7 Mon Sep 17 00:00:00 2001 From: Tito Dal Canton Date: Mon, 5 Aug 2024 16:50:41 +0200 Subject: [PATCH 23/35] Use more f-strings in `pycbc_live` (#4843) --- bin/pycbc_live | 89 ++++++++++++++++++++++++++++---------------------- 1 file changed, 50 insertions(+), 39 deletions(-) diff --git a/bin/pycbc_live b/bin/pycbc_live index 7b19eb6faef..be649e6ae0e 100755 --- a/bin/pycbc_live +++ b/bin/pycbc_live @@ -330,7 +330,7 @@ class LiveEventManager(object): * self.bank.sample_rate) flen = int(tlen / 2 + 1) delta_f = self.bank.sample_rate / float(tlen) - cmd = 'timeout {} '.format(args.snr_opt_timeout) + cmd = f'timeout {args.snr_opt_timeout} ' exepath = which('pycbc_optimize_snr') cmd += exepath + ' ' @@ -342,17 +342,16 @@ class LiveEventManager(object): data_fils_str = '--data-files ' psd_fils_str = '--psd-files ' for ifo in live_ifos: - curr_fname = \ - fname.replace('.xml.gz', - '_{}_data_overwhitened.hdf'.format(ifo)) + curr_fname = fname.replace( + '.xml.gz', f'_{ifo}_data_overwhitened.hdf' + ) curr_data = self.data_readers[ifo].overwhitened_data(delta_f) curr_data.save(curr_fname) - data_fils_str += '{}:{} ' .format(ifo, curr_fname) - curr_fname = fname.replace('.xml.gz', - '_{}_psd.hdf'.format(ifo)) + data_fils_str += f'{ifo}:{curr_fname} ' + curr_fname = fname.replace('.xml.gz', f'_{ifo}_psd.hdf') curr_psd = curr_data.psd curr_psd.save(curr_fname) - psd_fils_str += '{}:{} ' .format(ifo, curr_fname) + psd_fils_str += f'{ifo}:{curr_fname} ' cmd += data_fils_str cmd += psd_fils_str @@ -386,10 +385,10 @@ class LiveEventManager(object): 'mc_area_args/', self.mc_area_args) - cmd += '--params-file {} '.format(curr_fname) - cmd += '--approximant {} '.format(apr) - cmd += '--gracedb-server {} '.format(self.gracedb_server) - cmd += '--gracedb-search {} '.format(self.gracedb_search) + cmd += f'--params-file {curr_fname} ' + cmd += f'--approximant {apr} ' + cmd += f'--gracedb-server {self.gracedb_server} ' + cmd += f'--gracedb-search {self.gracedb_search} ' labels = self.snr_opt_label labels += ' '.join(self.gracedb_labels or []) @@ -408,7 +407,7 @@ class LiveEventManager(object): cmd += '--enable-gracedb-upload ' if self.fu_cores: - cmd += '--cores {} '.format(self.fu_cores) + cmd += f'--cores {self.fu_cores} ' if args.processing_scheme: # we will use the cores for multiple workers of the @@ -420,11 +419,11 @@ class LiveEventManager(object): # unlikely to benefit from a processing scheme with more # than 1 thread anyway. opt_scheme = args.processing_scheme.split(':')[0] - cmd += '--processing-scheme {}:1 '.format(opt_scheme) + cmd += f'--processing-scheme {opt_scheme}:1 ' # Save the command which would be used: snroc_fname = os.path.join(out_dir_path, 'snr_optimize_command.txt') - with open(snroc_fname,'w') as snroc_file: + with open(snroc_fname, 'w') as snroc_file: snroc_file.write(cmd) return cmd, out_dir_path @@ -490,10 +489,10 @@ class LiveEventManager(object): if optimize_snr_checks: logging.info('Optimizing SNR for event above threshold ..') self.run_optimize_snr( - cmd, - out_dir_path, - fname.replace('.xml.gz', '_attributes.hdf'), - gid + cmd, + out_dir_path, + fname.replace('.xml.gz', '_attributes.hdf'), + gid ) def check_coincs(self, ifos, coinc_results, psds): @@ -572,8 +571,7 @@ class LiveEventManager(object): # prevent singles being uploaded as well for coinc events self.last_few_coincs_uploaded.append(event.merger_time) # Only need to keep a few (10) events - self.last_few_coincs_uploaded = \ - self.last_few_coincs_uploaded[-10:] + self.last_few_coincs_uploaded = self.last_few_coincs_uploaded[-10:] # Save the event if not upload_checks: @@ -773,8 +771,7 @@ class LiveEventManager(object): for ifo in results: for k in results[ifo]: - f['%s/%s' % (ifo, k)] = \ - h5py_unicode_workaround(results[ifo][k]) + f[f'{ifo}/{k}'] = h5py_unicode_workaround(results[ifo][k]) for key in raw_results: f[key] = h5py_unicode_workaround(raw_results[key]) @@ -797,12 +794,11 @@ class LiveEventManager(object): gate_dtype = [('center_time', float), ('zero_half_width', float), ('taper_width', float)] - f['{}/gates'.format(ifo)] = \ - numpy.array(gates[ifo], dtype=gate_dtype) + f[f'{ifo}/gates'] = numpy.array(gates[ifo], dtype=gate_dtype) for ifo in (store_psd or {}): if store_psd[ifo] is not None: - store_psd[ifo].save(fname, group='%s/psd' % ifo) + store_psd[ifo].save(fname, group=f'{ifo}/psd') def check_max_length(args, waveforms): @@ -1072,9 +1068,14 @@ if not args.enable_gracedb_upload and args.enable_single_detector_upload: parser.error('You are not allowed to enable single ifo upload without the ' '--enable-gracedb-upload option!') -log_format = '%(asctime)s {} {} %(message)s'.format(platform.node(), - mpi.COMM_WORLD.Get_rank()) -pycbc.init_logging(args.verbose, format=log_format) +# Configure the log messages so that they are prefixed by the timestamp, the +# hostname of the originating node and the MPI rank of the originating process +pycbc.init_logging( + args.verbose, + format='%(asctime)s {} {} %(message)s'.format( + platform.node(), mpi.COMM_WORLD.Get_rank() + ) +) ctx = scheme.from_cli(args) fft.from_cli(args) @@ -1084,8 +1085,13 @@ valid_pad = args.analysis_chunk total_pad = args.trim_padding * 2 + valid_pad lfc = None if args.enable_bank_start_frequency else args.low_frequency_cutoff bank = waveform.LiveFilterBank( - args.bank_file, args.sample_rate, total_pad, low_frequency_cutoff=lfc, - approximant=args.approximant, increment=args.increment) + args.bank_file, + args.sample_rate, + total_pad, + low_frequency_cutoff=lfc, + approximant=args.approximant, + increment=args.increment +) if bank.min_f_lower < args.low_frequency_cutoff: parser.error('--low-frequency-cutoff ({} Hz) must not be larger than the ' 'minimum f_lower across all templates ' @@ -1138,12 +1144,16 @@ with ctx: bank.table.sort(order='mchirp') waveforms = list(bank[evnt.rank-1::evnt.size-1]) check_max_length(args, waveforms) - mf = LiveBatchMatchedFilter(waveforms, args.snr_threshold, - args.chisq_bins, sg_chisq, - snr_abort_threshold=args.snr_abort_threshold, - newsnr_threshold=args.newsnr_threshold, - max_triggers_in_batch=args.max_triggers_in_batch, - maxelements=args.max_batch_size) + mf = LiveBatchMatchedFilter( + waveforms, + args.snr_threshold, + args.chisq_bins, + sg_chisq, + snr_abort_threshold=args.snr_abort_threshold, + newsnr_threshold=args.newsnr_threshold, + max_triggers_in_batch=args.max_triggers_in_batch, + maxelements=args.max_batch_size + ) # Synchronize start time if not provided on the command line if not args.start_time: @@ -1188,8 +1198,9 @@ with ctx: global my_coinc_id my_coinc_id = i c = estimators[my_coinc_id] - setproctitle('PyCBC Live {} bg estimator'.format( - ppdets(c.ifos, '-'))) + setproctitle( + 'PyCBC Live {} bg estimator'.format(ppdets(c.ifos, '-')) + ) def estimator_refresh_threads(_): c = estimators[my_coinc_id] From e889ae1b28f1e4658a8640f67364d4504b51f081 Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Tue, 6 Aug 2024 11:25:42 +0100 Subject: [PATCH 24/35] Prep for release (#4835) --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 33bd8044209..0beb9aa7a0b 100755 --- a/setup.py +++ b/setup.py @@ -95,8 +95,8 @@ def __getattr__(self, attr): vinfo = _version_helper.generate_git_version_info() except: vinfo = vdummy() - vinfo.version = '2.5.dev1' - vinfo.release = 'False' + vinfo.version = '2.5.1' + vinfo.release = 'True' version_script = f"""# coding: utf-8 # Generated by setup.py for PyCBC on {vinfo.build_date}. From f0da4e949bf75042229d17eda53e1ffb0096bb37 Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Tue, 6 Aug 2024 12:57:19 +0100 Subject: [PATCH 25/35] Set back to9 v2.5.dev2 development (#4844) --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 0beb9aa7a0b..652544bbd0e 100755 --- a/setup.py +++ b/setup.py @@ -95,8 +95,8 @@ def __getattr__(self, attr): vinfo = _version_helper.generate_git_version_info() except: vinfo = vdummy() - vinfo.version = '2.5.1' - vinfo.release = 'True' + vinfo.version = '2.5.dev2' + vinfo.release = 'False' version_script = f"""# coding: utf-8 # Generated by setup.py for PyCBC on {vinfo.build_date}. From 69929d61a91bbc522eeca2329a91c94d66d76526 Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Tue, 6 Aug 2024 15:29:50 +0100 Subject: [PATCH 26/35] Fix small typo in pycbc_live_supervise_collated_trigger_fits (#4846) * Fix small typo in pycbc_live_supervise_collated_trigger_fits * Update bin/live/pycbc_live_supervise_collated_trigger_fits --- bin/live/pycbc_live_supervise_collated_trigger_fits | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/live/pycbc_live_supervise_collated_trigger_fits b/bin/live/pycbc_live_supervise_collated_trigger_fits index 3206f549c44..71398621405 100755 --- a/bin/live/pycbc_live_supervise_collated_trigger_fits +++ b/bin/live/pycbc_live_supervise_collated_trigger_fits @@ -569,7 +569,7 @@ def get_yesterday_date(): day_dt = datetime.utcnow() - timedelta(days=1) day_dt = datetime.combine(day_dt, datetime.min.time()) day_str = day_dt.strftime('%Y_%m_%d') - return date_dt, date_str + return day_dt, day_str parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) From 144f1c36181e564e0b2d40cbd1c075bf9b0852bb Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Wed, 7 Aug 2024 13:58:18 +0100 Subject: [PATCH 27/35] Various improvements to minifollowup tables (#4839) * Use extra spij parameters in inj info * Use statistic in coincinfo as well, allow tables to run over multiple lines, provide row names * sngl-ranking first, warn if ranking-xstatistic is supplied * Dont fail if (un-necessary) statistic files are not given * some tidying * specify sngl-ranking for coincinfo * try to fix CC issues * Fix minor bug, allow scrollable tables iin results pages * Dont duplicate info in the case that no DQ summary links are used --- bin/minifollowups/pycbc_page_coincinfo | 56 ++++++++++--- bin/minifollowups/pycbc_page_injinfo | 42 ++++++---- bin/minifollowups/pycbc_page_snglinfo | 34 ++++---- examples/search/plotting.ini | 2 + pycbc/results/dq.py | 6 +- pycbc/results/static/css/pycbc/orange.css | 14 ++++ pycbc/results/table_utils.py | 96 +++++++++++++++++------ 7 files changed, 183 insertions(+), 67 deletions(-) diff --git a/bin/minifollowups/pycbc_page_coincinfo b/bin/minifollowups/pycbc_page_coincinfo index 10f08bc2a01..b2dc9e363b7 100644 --- a/bin/minifollowups/pycbc_page_coincinfo +++ b/bin/minifollowups/pycbc_page_coincinfo @@ -30,7 +30,7 @@ from pycbc import add_common_pycbc_options import pycbc.results import pycbc.pnutils from pycbc.io.hdf import HFile -from pycbc.events import ranking +from pycbc.events import ranking, stat as pystat from pycbc.results import followup parser = argparse.ArgumentParser() @@ -69,11 +69,22 @@ parser.add_argument('--include-summary-page-link', action='store_true', parser.add_argument('--include-gracedb-link', action='store_true', help="If given, will provide a link to search GraceDB for events " "within a 3s window around the coincidence time.") - +parser.add_argument('--max-columns', type=int, + help="Maximum number of columns allowed in the table (not including detector names)") +pystat.insert_statistic_option_group(parser, + default_ranking_statistic='single_ranking_only') args = parser.parse_args() pycbc.init_logging(args.verbose) +if args.ranking_statistic not in ['quadsum', 'single_ranking_only']: + logging.warning( + "For the coincident info table, we only use single ranking, not %s, " + "this option will be ignored", + args.ranking_statistic + ) + args.ranking_statistic = 'quadsum' + # Get the nth loudest trigger from the output of pycbc_coinc_statmap f = HFile(args.statmap_file, 'r') d = f[args.statmap_file_subspace_name] @@ -146,12 +157,16 @@ statmapfile = d # table. Each entry in data corresponds to each row in the final table and # should be a list of values. So data is will be a list of lists. data = [] +row_labels = [] +rank_method = pystat.get_statistic_from_opts(args, list(files.keys())) + for ifo in files.keys(): # ignore ifo if coinc didn't participate (only for multi-ifo workflow) if (statmapfile['%s/time' % ifo][n] == -1.0): continue + row_labels.append(ifo) d = files[ifo] i = idx[ifo] tid = d['template_id'][i] @@ -161,7 +176,12 @@ for ifo in files.keys(): time = d['end_time'][i] utc = lal.GPSToUTC(int(time))[0:6] - + trig_dict = { + k: numpy.array([d[k][i]]) + for k in d.keys() + if not k.endswith('_template') + and k not in ['gating', 'search', 'template_boundaries'] + } # Headers will store the headers that will appear in the table. headers = [] data.append([]) @@ -170,9 +190,6 @@ for ifo in files.keys(): if args.include_summary_page_link: data[-1].append(pycbc.results.dq.get_summary_page_link(ifo, utc)) headers.append("Detector status") - else: - data[-1].append(ifo) - headers.append("Ifo") # End times data[-1].append(str(datetime.datetime(*utc))) @@ -180,14 +197,28 @@ for ifo in files.keys(): headers.append("UTC End Time") headers.append("GPS End time") + #headers.append("Stat") + # Determine statistic naming + if args.sngl_ranking == "newsnr": + sngl_stat_name = "Reweighted SNR" + elif args.sngl_ranking == "newsnr_sgveto": + sngl_stat_name = "Reweighted SNR (+sgveto)" + elif args.sngl_ranking == "newsnr_sgveto_psdvar": + sngl_stat_name = "Reweighted SNR (+sgveto+psdvar)" + elif args.sngl_ranking == "snr": + sngl_stat_name = "SNR" + else: + sngl_stat_name = args.sngl_ranking + + stat = rank_method.get_sngl_ranking(trig_dict) + headers.append(sngl_stat_name) + data[-1].append('%5.2f' % stat[0]) + # SNR and phase (not showing any single-det stat here) data[-1].append('%5.2f' % d['snr'][i]) data[-1].append('%5.2f' % d['coa_phase'][i]) - #data[-1].append('%5.2f' % ranking.newsnr(d['snr'][i], rchisq)) headers.append("ρ") headers.append("Phase") - #headers.append("Stat") - # Signal-glitch discrimators data[-1].append('%5.2f' % rchisq) data[-1].append('%i' % d['chisq_dof'][i]) @@ -218,7 +249,12 @@ for ifo in files.keys(): headers.append("s2z") headers.append("Duration") -html += str(pycbc.results.static_table(data, headers)) +html += str(pycbc.results.static_table( + data, + headers, + columns_max=args.max_columns, + row_labels=row_labels +)) ############################################################################### pycbc.results.save_fig_with_metadata(html, args.output_file, {}, diff --git a/bin/minifollowups/pycbc_page_injinfo b/bin/minifollowups/pycbc_page_injinfo index a09292ecc6e..e4bf1cfe8e8 100644 --- a/bin/minifollowups/pycbc_page_injinfo +++ b/bin/minifollowups/pycbc_page_injinfo @@ -20,6 +20,7 @@ import sys import numpy import pycbc.results +from pycbc import conversions as conv import pycbc.pnutils from pycbc import init_logging, add_common_pycbc_options from pycbc.detector import Detector @@ -34,6 +35,8 @@ parser.add_argument('--injection-index', type=int, required=True, help="The index of the injection to print out. Required") parser.add_argument('--n-nearest', type=int, help="Optional, used in the title") +parser.add_argument('--max-columns', type=int, + help="Optional, maximum number of columns used for the table") args = parser.parse_args() @@ -65,13 +68,24 @@ labels = { 'spin1z': 's1z', 'spin2x': 's2x', 'spin2y': 's2y', - 'spin2z': 's2z' + 'spin2z': 's2z', + 'chieff': 'χeff', + 'chip': 'χp', } params += ['tc'] m1, m2 = f['injections']['mass1'][iidx], f['injections']['mass2'][iidx] -mchirp, eta = pycbc.pnutils.mass1_mass2_to_mchirp_eta(m1, m2) +s1x, s2x = f['injections']['spin1x'][iidx], f['injections']['spin2x'][iidx] +s1y, s2y = f['injections']['spin1y'][iidx], f['injections']['spin2y'][iidx] +s1z, s2z = f['injections']['spin1z'][iidx], f['injections']['spin2z'][iidx] + +derived = {} +derived['mchirp'], derived['eta'] = \ + pycbc.pnutils.mass1_mass2_to_mchirp_eta(m1, m2) +derived['mtotal'] = conv.mtotal_from_mass1_mass2(m1, m2) +derived['chieff'] = conv.chi_eff(m1, m2, s1z, s2z) +derived['chip'] = conv.chi_p(m1, m2, s1x, s1y, s2x, s2y) if 'optimal_snr' in ' '.join(list(f['injections'].keys())): ifolist = f.attrs['ifos'].split(' ') @@ -82,32 +96,30 @@ else: eff_dist = {} for ifo in ['H1', 'L1', 'V1']: eff_dist[ifo] = Detector(ifo).effective_distance( - f['injections/distance'][iidx], - f['injections/ra'][iidx], - f['injections/dec'][iidx], - f['injections/polarization'][iidx], - f['injections/tc'][iidx], - f['injections/inclination'][iidx]) - + f['injections/distance'][iidx], + f['injections/ra'][iidx], + f['injections/dec'][iidx], + f['injections/polarization'][iidx], + f['injections/tc'][iidx], + f['injections/inclination'][iidx] + ) params += ['dec_chirp_dist', 'eff_dist_h', 'eff_dist_l', 'eff_dist_v'] dec_dist = max(eff_dist['H1'], eff_dist['L1']) dec_chirp_dist = pycbc.pnutils.chirp_distance(dec_dist, mchirp) params += ['mass1', 'mass2', 'mchirp', 'eta', 'ra', 'dec', 'inclination', 'spin1x', 'spin1y', 'spin1z', 'spin2x', 'spin2y', - 'spin2z'] + 'spin2z', 'chieff', 'chip'] for p in params: if p in f['injections']: data += ["%.2f" % f['injections'][p][iidx]] + elif p in derived.keys(): + data += [f'{derived[p]:.2f}'] elif 'eff_dist' in p: ifo = '%s1' % p.split('_')[-1] data += ["%.2f" % eff_dist[ifo.upper()]] - elif p == 'mchirp': - data += ["%.2f" % mchirp] - elif p == 'eta': - data += ["%.2f" % eta] elif p == 'dec_chirp_dist': data += ["%.2f" % dec_chirp_dist] else: @@ -117,7 +129,7 @@ for p in params: headers += [labels[p]] table = numpy.array([data], dtype=str) -html = str(pycbc.results.static_table(table, headers)) +html = str(pycbc.results.static_table(table, headers, columns_max=args.max_columns)) tag = '' if args.n_nearest is not None: diff --git a/bin/minifollowups/pycbc_page_snglinfo b/bin/minifollowups/pycbc_page_snglinfo index 99f3a1629d2..2ce04edbcc1 100644 --- a/bin/minifollowups/pycbc_page_snglinfo +++ b/bin/minifollowups/pycbc_page_snglinfo @@ -68,6 +68,8 @@ parser.add_argument('--include-gracedb-link', action='store_true', parser.add_argument('--significance-file', help="If given, will search for this trigger's id in the file to see if " "stat and p_astro values exists for this trigger.") +parser.add_argument('--max-columns', type=int, + help="Optional. Set a maximum number of columns to be used in the output table") pystat.insert_statistic_option_group(parser, default_ranking_statistic='single_ranking_only') @@ -117,7 +119,7 @@ if args.n_loudest is not None: sngl_file.apply_mask(l[0]) # make a table for the single detector information ############################ -time = sngl_file.end_time +time = sngl_file.end_time[0] utc = lal.GPSToUTC(int(time))[0:6] # Headers here will contain the list of headers that will appear in the @@ -129,6 +131,8 @@ headers = [] # single list that will hold the values to go into the table. data = [[]] +row_labels = [args.instrument] + # DQ summary link if args.include_summary_page_link: data[0].append(pycbc.results.dq.get_summary_page_link(args.instrument, utc)) @@ -141,11 +145,10 @@ headers.append("UTC") headers.append("End time") # SNR and statistic -data[0].append('%5.2f' % sngl_file.snr) -data[0].append('%5.2f' % sngl_file.get_column('coa_phase')) -data[0].append('%5.2f' % stat) headers.append("ρ") +data[0].append('%5.2f' % sngl_file.snr[0]) headers.append("Phase") +data[0].append('%5.2f' % sngl_file.get_column('coa_phase')[0]) # Determine statistic naming if args.sngl_ranking == "newsnr": sngl_stat_name = "Reweighted SNR" @@ -169,30 +172,31 @@ else: ) headers.append(stat_name) +data[0].append('%5.2f' % stat[0]) # Signal-glitch discrimators -data[0].append('%5.2f' % sngl_file.rchisq) -data[0].append('%i' % sngl_file.get_column('chisq_dof')) +data[0].append('%5.2f' % sngl_file.rchisq[0]) +data[0].append('%i' % sngl_file.get_column('chisq_dof')[0]) headers.append("χ2r") headers.append("χ2 bins") try: - data[0].append('%5.2f' % sngl_file.sgchisq) + data[0].append('%5.2f' % sngl_file.sgchisq[0]) headers.append("sgχ2") except: pass try: - data[0].append('%5.2f' % sngl_file.psd_var_val) + data[0].append('%5.2f' % sngl_file.psd_var_val[0]) headers.append("PSD var") except: pass # Template parameters -data[0].append('%5.2f' % sngl_file.mass1) -data[0].append('%5.2f' % sngl_file.mass2) -data[0].append('%5.2f' % sngl_file.mchirp) -data[0].append('%5.2f' % sngl_file.spin1z) -data[0].append('%5.2f' % sngl_file.spin2z) -data[0].append('%5.2f' % sngl_file.template_duration) +data[0].append('%5.2f' % sngl_file.mass1[0]) +data[0].append('%5.2f' % sngl_file.mass2[0]) +data[0].append('%5.2f' % sngl_file.mchirp[0]) +data[0].append('%5.2f' % sngl_file.spin1z[0]) +data[0].append('%5.2f' % sngl_file.spin2z[0]) +data[0].append('%5.2f' % sngl_file.template_duration[0]) headers.append("m1") headers.append("m2") headers.append("Mc") @@ -221,7 +225,7 @@ if args.include_gracedb_link: data[0].append(gdb_search_link) html = pycbc.results.dq.redirect_javascript + \ - str(pycbc.results.static_table(data, headers)) + str(pycbc.results.static_table(data, headers, row_labels=row_labels, columns_max=args.max_columns)) ############################################################################### # Set up default titles and the captions for the file diff --git a/examples/search/plotting.ini b/examples/search/plotting.ini index 0b1ab2a2cbe..5a7d4f55837 100644 --- a/examples/search/plotting.ini +++ b/examples/search/plotting.ini @@ -61,6 +61,8 @@ window = 0.1 [html_snippet] [page_coincinfo] +sngl-ranking = newsnr_sgveto_psdvar + [page_coincinfo-background] statmap-file-subspace-name=background_exc diff --git a/pycbc/results/dq.py b/pycbc/results/dq.py index ce3aeb43b8f..7be7bb1d4ff 100644 --- a/pycbc/results/dq.py +++ b/pycbc/results/dq.py @@ -22,8 +22,7 @@ """ -data_h1_string = """H1 -  +data_h1_string = """ Summary   @@ -31,8 +30,7 @@ 'https://alog.ligo-wa.caltech.edu/aLOG/includes/search.php?adminType=search'); return true;">aLOG""" -data_l1_string="""L1 -  +data_l1_string=""" Summary   diff --git a/pycbc/results/static/css/pycbc/orange.css b/pycbc/results/static/css/pycbc/orange.css index 8b2b44b0aea..1674f8ae1c2 100644 --- a/pycbc/results/static/css/pycbc/orange.css +++ b/pycbc/results/static/css/pycbc/orange.css @@ -92,3 +92,17 @@ font-size:16px; a { color:#000000; } + +table { + display: block; + overflow-x: auto; + white-space: nowrap; +} + +td { + text-align: center; +} + +th { + text-align: center; +} diff --git a/pycbc/results/table_utils.py b/pycbc/results/table_utils.py index dfdaa7297d2..aec7c62ffdb 100644 --- a/pycbc/results/table_utils.py +++ b/pycbc/results/table_utils.py @@ -23,7 +23,10 @@ # """ This module provides functions to generate sortable html tables """ -import mako.template, uuid +import mako.template +import uuid +import copy +import numpy google_table_template = mako.template.Template(""" @@ -103,42 +106,89 @@ def html_table(columns, names, page_size=None, format_strings=None): static_table_template = mako.template.Template(""" - % if titles is not None: - - % for i in range(len(titles)): - - % endfor - - % endif - - % for i in range(len(data)): - - % for j in range(len(data[i])): - + % for row in range(n_rows): + % if titles is not None: + + % if row_labels is not None: + + % endif + % for i in range(n_columns): + + % endfor + + % endif + + % for i in range(len(data)): + + % if row_labels is not None: + + % endif + % for j in range(n_columns): + + % endfor + % endfor - % endfor
- ${titles[i]} -
- ${data[i][j]} -
+ + ${titles[row * n_columns + i]} +
+ ${row_labels[i]} + + ${data[i][row * n_columns + j]} +
""") -def static_table(data, titles=None): - """ Return an html tableo of this data +def static_table(data, titles=None, columns_max=None, row_labels=None): + """ Return an html table of this data Parameters ---------- - data : two-dimensional numpy string array + data : two-dimensional string array Array containing the cell values titles : numpy array - Vector str of titles + Vector str of titles, must be the same length as data + columns_max : integer or None + If given, will restrict the number of columns in the table + row_labels : list of strings + Optional list of row labels to be given as the first cell in + each data row. Does not count towards columns_max Returns ------- html_table : str A string containing the html table. """ - return static_table_template.render(data=data, titles=titles) + data = copy.deepcopy(data) + titles = copy.deepcopy(titles) + row_labels = copy.deepcopy(row_labels) + drows, dcols = numpy.array(data).shape + if titles is not None and not len(titles) == dcols: + raise ValueError("titles and data lengths do not match") + + if row_labels is not None and not len(row_labels) == drows: + raise ValueError( + "row_labels must be the same number of rows supplied to data" + ) + + if columns_max is not None: + n_rows = int(numpy.ceil(len(data[0]) / columns_max)) + n_columns = min(columns_max, len(data[0])) + if len(data[0]) < n_rows * n_columns: + # Pad the data and titles with empty strings + n_missing = int(n_rows * n_columns - len(data[0])) + data = numpy.hstack((data, numpy.zeros((len(data), n_missing), dtype='U1'))) + if titles is not None: + titles += [' '] * n_missing + else: + n_rows = 1 + n_columns = len(data[0]) + + return static_table_template.render( + data=data, + titles=titles, + n_columns=n_columns, + n_rows=n_rows, + row_labels=row_labels, + ) From 85129407cfc6e19a87abf0834a3a4211f4cc941a Mon Sep 17 00:00:00 2001 From: Tito Dal Canton Date: Wed, 7 Aug 2024 18:24:10 +0200 Subject: [PATCH 28/35] Add Cython `.cpp` files to `.gitignore` (#4842) --- .gitignore | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index be6fd9999fb..405cf23fa4a 100644 --- a/.gitignore +++ b/.gitignore @@ -4,8 +4,18 @@ *.log dist/ html/ -pycbc_inspiralc build/ *.pyc docs/Makefile -PyCBC.egg-info +PyCBC.egg-info/ +pycbc/events/eventmgr_cython.cpp +pycbc/events/simd_threshold_cython.cpp +pycbc/fft/fftw_pruned_cython.cpp +pycbc/filter/matchedfilter_cpu.cpp +pycbc/filter/simd_correlate_cython.cpp +pycbc/inference/models/relbin_cpu.cpp +pycbc/types/array_cpu.cpp +pycbc/vetoes/chisq_cpu.cpp +pycbc/waveform/decompress_cpu_cython.cpp +pycbc/waveform/spa_tmplt_cpu.cpp +pycbc/waveform/utils_cpu.cpp From e9bdd6a7d807715ec06135cff96879257326cd72 Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Fri, 9 Aug 2024 16:49:50 +0100 Subject: [PATCH 29/35] pycbc_plot_bank_corner improvements for live (#4849) * Add IFO to fits plot title * allow log parameters in pycbc_plot_bank_corner * Fix use of log scales in the case that histograms are being used * Accidentally made some linear parameters log-scaled * Deal with case where not using any log parameters * CC --- ...pycbc_live_supervise_collated_trigger_fits | 5 +- bin/plotting/pycbc_plot_bank_corner | 55 +++++++++++++++++-- pycbc/results/scatter_histograms.py | 35 ++++++++++-- 3 files changed, 84 insertions(+), 11 deletions(-) diff --git a/bin/live/pycbc_live_supervise_collated_trigger_fits b/bin/live/pycbc_live_supervise_collated_trigger_fits index 71398621405..6562d427f3b 100755 --- a/bin/live/pycbc_live_supervise_collated_trigger_fits +++ b/bin/live/pycbc_live_supervise_collated_trigger_fits @@ -270,7 +270,10 @@ def plot_fits( ] fits_plot_arguments += sv.dict_to_args(plot_fit_options) - title = "Fit parameters for pycbc-live, triggers from " + day_title_str + title = "Fit parameters for pycbc-live, triggers from {}, {}".format( + ifo, + day_title_str + ) if smoothed == True: title += ', smoothed' fits_plot_arguments += ['--title', title] diff --git a/bin/plotting/pycbc_plot_bank_corner b/bin/plotting/pycbc_plot_bank_corner index 01fdf9100cf..6d55b2fbcab 100644 --- a/bin/plotting/pycbc_plot_bank_corner +++ b/bin/plotting/pycbc_plot_bank_corner @@ -72,6 +72,13 @@ parser.add_argument("--parameters", "property of that parameter will be used. If not " "provided, will plot all of the parameters in the " "bank.") +parser.add_argument( + '--log-parameters', + nargs='+', + default=[], + help="Which parameters are to be plotted on a log scale? " + "Must be also given in parameters" +) parser.add_argument('--plot-histogram', action='store_true', help="Plot 1D histograms of parameters on the " @@ -103,6 +110,11 @@ parser.add_argument("--color-parameter", help="Color scatter points according to the parameter given. " "May optionally provide a label in the same way as for " "--parameters. Default=No scatter point coloring.") +parser.add_argument( + '--log-colormap', + action='store_true', + help="Should the colorbar be plotted on a log scale?" +) parser.add_argument('--dpi', type=int, default=200, @@ -117,6 +129,13 @@ parser.add_argument('--title', add_style_opt_to_parser(parser) args = parser.parse_args() +for lp in args.log_parameters: + if not lp in args.parameters: + parser.error( + "--log-parameters should be in --parameters. " + f"{lp} not in [{', '.join(args.parameters)}]" + ) + pycbc.init_logging(args.verbose) set_style_from_cli(args) @@ -146,7 +165,7 @@ if args.fits_file is not None: param = fits_f[p][:].astype(float) # We need to check for the cardinal '-1' value which means # that the fit is invalid - param[param <= 0] = 0 if 'count' in p and 'log' not in p else np.nan + param[param <= 0] = np.nan bank[p] = param logging.info("Got %d templates from the bank", banklen) @@ -227,12 +246,21 @@ if cpar: for p in required_minmax: minval = np.nanmin(bank_fa[p][bank_fa[p] != -np.inf]) maxval = np.nanmax(bank_fa[p][bank_fa[p] != np.inf]) - valrange = maxval - minval + if (p in args.log_parameters) or (p == cpar and args.log_colormap): + # Extend the range by 10% in log-space + logvalrange = np.log(maxval) - np.log(minval) + if p not in mins: + mins[p] = np.exp(np.log(minval) - 0.05 * logvalrange) + if p not in maxs: + maxs[p] = np.exp(np.log(maxval) + 0.05 * logvalrange) + else: + # Extend the range by 10% + valrange = maxval - minval + if p not in mins: + mins[p] = minval - 0.05 * valrange + if p not in maxs: + maxs[p] = maxval + 0.05 * valrange - if p not in mins: - mins[p] = minval - 0.05 * valrange - if p not in maxs: - maxs[p] = maxval + 0.05 * valrange # Deal with non-coloring case: zvals = bank_fa[cpar] if cpar else None @@ -247,6 +275,7 @@ fig, axis_dict = create_multidim_plot( plot_scatter=True, plot_contours=False, scatter_cmap="viridis", + scatter_log_cmap=args.log_colormap, marginal_title=False, marginal_percentiles=[], fill_color='g', @@ -258,6 +287,7 @@ fig, axis_dict = create_multidim_plot( hist_color=hist_color, mins=mins, maxs=maxs, + log_parameters=args.log_parameters, ) title_text = f"{os.path.basename(args.bank_file)}" @@ -293,6 +323,19 @@ for i in range(len(args.parameters)): for s0, s1 in zip(sharex_axes[:-1], sharex_axes[1:]): s0.sharex(s1) +for (p1, p2), ax in axis_dict.items(): + if p1 == p2 and p1 in args.log_parameters: + if p1 == args.parameters[-1] and len(args.parameters) == 2: + # This will be turned on its side, so set _y_ axis to log + ax[0].semilogy() + else: + ax[0].semilogx() + else: + if p1 in args.log_parameters: + ax[0].semilogx() + if p2 in args.log_parameters: + ax[0].semilogy() + logging.info("Plot generated") fig.set_dpi(args.dpi) diff --git a/pycbc/results/scatter_histograms.py b/pycbc/results/scatter_histograms.py index f89cc5a563f..dac56ab79cc 100644 --- a/pycbc/results/scatter_histograms.py +++ b/pycbc/results/scatter_histograms.py @@ -43,7 +43,7 @@ if 'matplotlib.backends' not in sys.modules: # nopep8 matplotlib.use('agg') -from matplotlib import (offsetbox, pyplot, gridspec) +from matplotlib import (offsetbox, pyplot, gridspec, colors) from pycbc.results import str_utils from pycbc.io import FieldArray @@ -337,7 +337,7 @@ def create_marginalized_hist(ax, values, label, percentiles=None, linestyle='-', plot_marginal_lines=True, title=True, expected_value=None, expected_color='red', rotated=False, - plot_min=None, plot_max=None): + plot_min=None, plot_max=None, log_scale=False): """Plots a 1D marginalized histogram of the given param from the given samples. @@ -380,6 +380,8 @@ def create_marginalized_hist(ax, values, label, percentiles=None, creates. scalefac : {1., float} Factor to scale the default font sizes by. Default is 1 (no scaling). + log_scale : boolean + Should the histogram bins be logarithmically spaced """ if fillcolor is None: htype = 'step' @@ -389,7 +391,19 @@ def create_marginalized_hist(ax, values, label, percentiles=None, orientation = 'horizontal' else: orientation = 'vertical' - ax.hist(values, bins=50, histtype=htype, orientation=orientation, + if log_scale: + bins = numpy.logspace( + numpy.log10(numpy.nanmin(values)), + numpy.log10(numpy.nanmax(values)), + 50 + ) + else: + bins = numpy.linspace( + numpy.nanmin(values), + numpy.nanmax(values), + 50, + ) + ax.hist(values, bins=bins, histtype=htype, orientation=orientation, facecolor=fillcolor, edgecolor=color, ls=linestyle, lw=2, density=True) if percentiles is None: @@ -545,6 +559,7 @@ def create_multidim_plot(parameters, samples, labels=None, marginal_title=True, marginal_linestyle='-', zvals=None, show_colorbar=True, cbar_label=None, vmin=None, vmax=None, scatter_cmap='plasma', + scatter_log_cmap=False, log_parameters=None, plot_density=False, plot_contours=True, density_cmap='viridis', contour_color=None, label_contours=True, @@ -614,6 +629,10 @@ def create_multidim_plot(parameters, samples, labels=None, zvals. scatter_cmap : {'plasma', string} The color map to use for the scatter points. Default is 'plasma'. + scatter_log_cmap : boolean + Should the scatter point coloring be on a log scale? Default False + log_parameters : list or None + Which parameters should be plotted on a log scale plot_density : {False, bool} Plot the density of points as a color map. plot_contours : {True, bool} @@ -649,6 +668,8 @@ def create_multidim_plot(parameters, samples, labels=None, """ if labels is None: labels = {p: p for p in parameters} + if log_parameters is None: + log_parameters = [] # set up the figure with a grid of axes # if only plotting 2 parameters, make the marginal plots smaller nparams = len(parameters) @@ -732,6 +753,7 @@ def create_multidim_plot(parameters, samples, labels=None, create_marginalized_hist( ax, samples[param], label=labels[param], color=hist_color, fillcolor=fill_color, + log_scale=param in log_parameters, plot_marginal_lines=plot_marginal_lines, linestyle=marginal_linestyle, linecolor=line_color, title=marginal_title, expected_value=expected_value, @@ -749,8 +771,13 @@ def create_multidim_plot(parameters, samples, labels=None, alpha = 0.3 else: alpha = 1. + if scatter_log_cmap: + cmap_norm = colors.LogNorm(vmin=vmin, vmax=vmax) + else: + cmap_norm = colors.Normalize(vmin=vmin, vmax=vmax) + plt = ax.scatter(x=samples[px], y=samples[py], c=zvals, s=5, - edgecolors='none', vmin=vmin, vmax=vmax, + edgecolors='none', norm=cmap_norm, cmap=scatter_cmap, alpha=alpha, zorder=2) if plot_contours or plot_density: From ff8e49916b4a53ab8f8e3a442d983a23d9816b89 Mon Sep 17 00:00:00 2001 From: Shichao Wu Date: Mon, 12 Aug 2024 16:56:49 +0100 Subject: [PATCH 30/35] remove readfp, use read_file instead (#4852) --- pycbc/types/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pycbc/types/config.py b/pycbc/types/config.py index c690c8f4906..cdbf67bff95 100644 --- a/pycbc/types/config.py +++ b/pycbc/types/config.py @@ -49,7 +49,7 @@ def __deepcopy__(self, memo): self.write(config_string) config_string.seek(0) new_config = self.__class__() - new_config.readfp(config_string) + new_config.read_file(config_string) return new_config From 0a7b2edbe034cda34047feb1a06321a19e081624 Mon Sep 17 00:00:00 2001 From: Yumeng Xu Date: Tue, 20 Aug 2024 23:57:11 +0200 Subject: [PATCH 31/35] Add check before setting multiprocessing context to prevent the RuntimeError (#4620) * Add check before setting multiprocessing context to prevent the runtime error: context has already been set When `pycbc` is used with some multiprocessing package such as `dask`, importing `pycbc` will cause setting the multiprocessing context repeatedly. The `RuntimeError: context has already been set` will happen. This fix will check the context before setting it. * Update __init__.py Change raising error to warning * wrap the lines --------- Co-authored-by: Yumeng Xu --- pycbc/__init__.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/pycbc/__init__.py b/pycbc/__init__.py index d6db0026e0a..99f4cc6ef37 100644 --- a/pycbc/__init__.py +++ b/pycbc/__init__.py @@ -212,8 +212,13 @@ def makedir(path): # preserve common state information which we have relied on when using # multiprocessing based pools. import multiprocessing - if hasattr(multiprocessing, 'set_start_method'): - multiprocessing.set_start_method('fork') + if multiprocessing.get_start_method(allow_none=True) is None: + if hasattr(multiprocessing, 'set_start_method'): + multiprocessing.set_start_method('fork') + elif multiprocessing.get_start_method() != 'fork': + warnings.warn("PyCBC requires the use of the 'fork' start method" + " for multiprocessing, it is currently set to {}" + .format(multiprocessing.get_start_method())) else: HAVE_OMP = True From 81305b4657836f1eb011ef2b6389772bf62e7aa1 Mon Sep 17 00:00:00 2001 From: Tito Dal Canton Date: Wed, 21 Aug 2024 13:28:41 +0200 Subject: [PATCH 32/35] Make sure BBHx knows how to link locally-installed LAPACK (#4855) * Make sure BBHx knows how to link locally-installed LAPACK * Woops, fix a mistake * That did not work, but this looks better --- tox.ini | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 0baf40ef5cf..d3d96b2039b 100644 --- a/tox.ini +++ b/tox.ini @@ -38,6 +38,10 @@ conda_deps= gsl lapack==3.6.1 conda_channels=conda-forge +setenv = + ; Tell the linker to look for shared libs inside the temporary Conda env. + ; Needed to build BBHx's wheel, whick links to LAPACK. + LIBRARY_PATH={envdir}/lib:{env:LIBRARY_PATH:} commands = pytest # The following are long running or may require @@ -81,5 +85,9 @@ conda_deps= lapack==3.6.1 openmpi conda_channels=conda-forge -setenv = PYCBC_TEST_TYPE=docs +setenv = + PYCBC_TEST_TYPE=docs + ; Tell the linker to look for shared libs inside the temporary Conda env. + ; Needed to build BBHx's wheel, whick links to LAPACK. + LIBRARY_PATH={envdir}/lib:{env:LIBRARY_PATH:} commands = bash tools/pycbc_test_suite.sh From d9f728dc183d788a17f15578d0777f1cb4ff4026 Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Wed, 21 Aug 2024 13:53:46 +0100 Subject: [PATCH 33/35] Live singles fits plot names (#4856) * Forgot to remove unused test options * Dont report all ifos in significance plots * Update bin/live/pycbc_live_supervise_collated_trigger_fits Co-authored-by: Tito Dal Canton * Update bin/live/pycbc_live_supervise_collated_trigger_fits Co-authored-by: Tito Dal Canton * Update bin/live/pycbc_live_supervise_collated_trigger_fits Co-authored-by: Tito Dal Canton --------- Co-authored-by: Tito Dal Canton --- ...pycbc_live_supervise_collated_trigger_fits | 25 ++++++++++++++----- 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/bin/live/pycbc_live_supervise_collated_trigger_fits b/bin/live/pycbc_live_supervise_collated_trigger_fits index 6562d427f3b..087ae5d1339 100755 --- a/bin/live/pycbc_live_supervise_collated_trigger_fits +++ b/bin/live/pycbc_live_supervise_collated_trigger_fits @@ -226,6 +226,11 @@ def fit_over_multiparam( "specified parameters", len(daily_files) ) + logging.info( + "Smoothing fits using fit_over_multiparam with %d files and " + "specified parameters", + len(daily_files) + ) file_id_str = f'{first_date}-{end_date}' out_fname = fit_over_controls['fit-over-format'].format( dates=file_id_str, @@ -293,7 +298,6 @@ def single_significance_fits( day_str, day_dt, controls, - test_options, stat_files=None, ): """ @@ -302,7 +306,10 @@ def single_significance_fits( """ daily_options['output'] = os.path.join( output_dir, - daily_controls['sig-daily-format'].format(date=day_str), + daily_controls['sig-daily-format'].format( + ifos=''.join(sorted(controls['ifos'].split())), + date=day_str + ), ) daily_args = ['pycbc_live_single_significance_fits'] @@ -322,7 +329,10 @@ def plot_single_significance_fits(daily_output, daily_plot_options, controls): """ Plotting daily significance fits, and link to public directory if wanted """ - daily_plot_output = f'{daily_output[:-4]}_{{ifo}}.png' + daily_plot_output = daily_output[:-4].replace( + ''.join(sorted(controls['ifos'].split())), + '{ifo}' + ) + '.png' logging.info( "Plotting daily significance fits from %s to %s", daily_output, @@ -365,9 +375,14 @@ def combine_significance_fits( Supervise the smoothing of live trigger significance fits using pycbc_live_combine_single_significance_fits """ + # This has a trick to do partial formatting, get the IFOs into the + # string, but not the date daily_files, first_date, end_date = find_daily_fit_files( combined_controls, - combined_controls['daily-format'], + combined_controls['daily-format'].format( + ifos=''.join(sorted(controls['ifos'].split())), + date='{date}' + ), controls['output-directory'], ) logging.info( @@ -466,7 +481,6 @@ def supervise_collation_fits_dq(args, day_dt, day_str): combined_control_options = config_opts['significance_combined_fits_control'] combined_plot_options = config_opts['plot_significance_combined'] combined_plot_control_options = config_opts['plot_significance_combined_control'] - test_options = config_opts['test'] # The main output directory will have a date subdirectory which we # put the output into @@ -541,7 +555,6 @@ def supervise_collation_fits_dq(args, day_dt, day_str): day_str, day_dt, controls, - test_options, stat_files=stat_files, ) plot_single_significance_fits( From cf6447ebd37848b0f7633397b4ea65cd30580c7a Mon Sep 17 00:00:00 2001 From: maxtrevor <65971534+maxtrevor@users.noreply.github.com> Date: Thu, 22 Aug 2024 10:59:00 -0400 Subject: [PATCH 34/35] Idq live (#4850) * Have pycbc live mark flagged triggers instead of removing * Make stat usable in low-latency * Add command line argument for whether to use idq for reweighting * Add logging for iDQ flagged triggers * Fix bug when using ifo with no dq * Improve logic for getting ifo frm trigs * Update for compatibility with Gareth's stat reloading code * Modify how trig ifo is gotten and add debug statements * Use logging not print for debugging * logger not logging * Fix where tnum is set * Get rid of excess logging * Address Gareth's comments * Codeclimate * Apply suggestions from code review Co-authored-by: Gareth S Cabourn Davies --------- Co-authored-by: Gareth S Cabourn Davies --- bin/pycbc_live | 36 ++++++++++---- pycbc/events/stat.py | 112 +++++++++++++++++++++++++++++-------------- pycbc/frame/frame.py | 35 +++++++++----- 3 files changed, 127 insertions(+), 56 deletions(-) diff --git a/bin/pycbc_live b/bin/pycbc_live index be649e6ae0e..52a0de2433a 100755 --- a/bin/pycbc_live +++ b/bin/pycbc_live @@ -848,6 +848,8 @@ parser.add_argument('--idq-state-channel', action=MultiDetMultiColonOptionAction parser.add_argument('--idq-threshold', type=float, help='Threshold used to veto triggers at times of ' 'low iDQ False Alarm Probability') +parser.add_argument('--idq-reweighting', action='store_true',default=False, + help='Reweight triggers based on iDQ False Alarm Probability') parser.add_argument('--data-quality-channel', action=MultiDetMultiColonOptionAction, help="Channel containing data quality information. Used " @@ -1311,17 +1313,35 @@ with ctx: if len(results[ifo][key]): results[ifo][key] = results[ifo][key][idx] if data_reader[ifo].idq is not None: - logging.info("Checking %s's iDQ information", ifo) + logging.info("Reading %s's iDQ information", ifo) start = data_reader[ifo].start_time times = results[ifo]['end_time'] - idx = data_reader[ifo].idq.indices_of_flag( + flag_active = data_reader[ifo].idq.flag_at_times( start, valid_pad, times, - padding=data_reader[ifo].dq_padding) - logging.info('Keeping %d/%d %s triggers after iDQ', - len(idx), len(times), ifo) - for key in results[ifo]: - if len(results[ifo][key]): - results[ifo][key] = results[ifo][key][idx] + padding=data_reader[ifo].dq_padding + ) + + if args.idq_reweighting: + logging.info( + 'iDQ flagged %d/%d %s triggers', + numpy.sum(flag_active), + len(times), + ifo + ) + results[ifo]['dq_state'] = flag_active.astype(int) + else: + # use idq as a veto + keep = numpy.logical_not(flag_active) + logging.info( + 'Keeping %d/%d %s triggers after iDQ', + numpy.sum(keep), + len(times), + ifo + ) + for key in results[ifo]: + if len(results[ifo][key]): + results[ifo][key] = \ + results[ifo][key][keep] # Calculate and add the psd variation for the results if args.psd_variation: diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index f61e7c55b66..ff8da9a2f19 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -1670,6 +1670,12 @@ def single(self, trigs): numpy.ndarray The array of single detector values """ + try: + # exists if accessed via coinc_findtrigs + self.curr_tnum = trigs.template_num + except AttributeError: + # exists for SingleDetTriggers & pycbc_live get_coinc + self.curr_tnum = trigs['template_id'] # single-ifo stat = log of noise rate sngl_stat = self.lognoiserate(trigs) @@ -1681,12 +1687,6 @@ def single(self, trigs): singles['end_time'] = trigs['end_time'][:] singles['sigmasq'] = trigs['sigmasq'][:] singles['snr'] = trigs['snr'][:] - try: - # exists if accessed via coinc_findtrigs - self.curr_tnum = trigs.template_num - except AttributeError: - # exists for SingleDetTriggers & pycbc_live get_coinc - self.curr_tnum = trigs['template_id'] # Store benchmark log volume as single-ifo information since the coinc # method does not have access to template id @@ -2271,14 +2271,46 @@ def __init__(self, sngl_ranking, files=None, ifos=None, ifos=ifos, **kwargs) self.dq_rates_by_state = {} self.dq_bin_by_tid = {} - self.dq_state_segments = {} + self.dq_state_segments = None + self.low_latency = False + self.single_dtype.append(('dq_state', int)) for ifo in self.ifos: key = f'{ifo}-dq_stat_info' if key in self.files.keys(): self.dq_rates_by_state[ifo] = self.assign_dq_rates(key) self.dq_bin_by_tid[ifo] = self.assign_template_bins(key) - self.dq_state_segments[ifo] = self.setup_segments(key) + self.check_low_latency(key) + if not self.low_latency: + if self.dq_state_segments is None: + self.dq_state_segments = {} + self.dq_state_segments[ifo] = self.setup_segments(key) + + def check_low_latency(self, key): + """ + Check if the statistic file indicates low latency mode. + Parameters + ---------- + key: str + Statistic file key string. + Returns + ------- + None + """ + ifo = key.split('-')[0] + with h5py.File(self.files[key], 'r') as dq_file: + ifo_grp = dq_file[ifo] + if 'dq_segments' not in ifo_grp.keys(): + # if segs are not in file, we must be in LL + if self.dq_state_segments is not None: + raise ValueError( + 'Either all dq stat files must have segments or none' + ) + self.low_latency = True + elif self.low_latency: + raise ValueError( + 'Either all dq stat files must have segments or none' + ) def assign_template_bins(self, key): """ @@ -2337,9 +2369,7 @@ def assign_dq_rates(self, key): def setup_segments(self, key): """ - Check if segments definitions are in stat file - If they are, we are running offline and need to store them - If they aren't, we are running online + Store segments from stat file """ ifo = key.split('-')[0] with h5py.File(self.files[key], 'r') as dq_file: @@ -2368,35 +2398,32 @@ def update_file(self, key): return True # We also need to check if the DQ files have updated if key.endswith('dq_stat_info'): + ifo = key.split('-')[0] logger.info( "Updating %s statistic %s file", - ''.join(self.ifos), + ifo, key ) - self.assign_dq_rates(key) - self.assign_template_bins(key) - self.setup_segments(key) + self.dq_rates_by_state[ifo] = self.assign_dq_rates(key) + self.dq_bin_by_tid[ifo] = self.assign_template_bins(key) return True return False - def find_dq_noise_rate(self, trigs, dq_state): + def find_dq_noise_rate(self, trigs): """Get dq values for a specific ifo and dq states""" - try: - tnum = trigs.template_num - except AttributeError: - tnum = trigs['template_id'] - try: ifo = trigs.ifo except AttributeError: - ifo = trigs['ifo'] - assert len(numpy.unique(ifo)) == 1 - # Should be exactly one ifo provided - ifo = ifo[0] + ifo = trigs.get('ifo', None) + if ifo is None: + ifo = self.ifos[0] + assert ifo in self.ifos - dq_val = numpy.zeros(len(dq_state)) + dq_state = trigs['dq_state'] + dq_val = numpy.ones(len(dq_state)) + tnum = self.curr_tnum if ifo in self.dq_rates_by_state: for (i, st) in enumerate(dq_state): if isinstance(tnum, numpy.ndarray): @@ -2437,17 +2464,7 @@ def lognoiserate(self, trigs): Array of log noise rate density for each input trigger. """ - # make sure every trig has a dq state - try: - ifo = trigs.ifo - except AttributeError: - ifo = trigs['ifo'] - assert len(numpy.unique(ifo)) == 1 - # Should be exactly one ifo provided - ifo = ifo[0] - - dq_state = self.find_dq_state_by_time(ifo, trigs['end_time'][:]) - dq_rate = self.find_dq_noise_rate(trigs, dq_state) + dq_rate = self.find_dq_noise_rate(trigs) dq_rate = numpy.maximum(dq_rate, 1) logr_n = ExpFitFgBgNormStatistic.lognoiserate( @@ -2455,6 +2472,27 @@ def lognoiserate(self, trigs): logr_n += numpy.log(dq_rate) return logr_n + def single(self, trigs): + # make sure every trig has a dq state + try: + ifo = trigs.ifo + except AttributeError: + ifo = trigs.get('ifo', None) + if ifo is None: + ifo = self.ifos[0] + assert ifo in self.ifos + + singles = ExpFitFgBgNormStatistic.single(self, trigs) + + if self.low_latency: + # trigs should already have a dq state assigned + singles['dq_state'] = trigs['dq_state'][:] + else: + singles['dq_state'] = self.find_dq_state_by_time( + ifo, trigs['end_time'][:] + ) + return singles + class DQExpFitFgBgKDEStatistic(DQExpFitFgBgNormStatistic): """ diff --git a/pycbc/frame/frame.py b/pycbc/frame/frame.py index a67a3d090d9..bea7386418a 100644 --- a/pycbc/frame/frame.py +++ b/pycbc/frame/frame.py @@ -896,8 +896,8 @@ def __init__(self, frame_src, force_update_cache=force_update_cache, increment_update_cache=increment_update_cache) - def indices_of_flag(self, start_time, duration, times, padding=0): - """ Return the indices of the times lying in the flagged region + def flag_at_times(self, start_time, duration, times, padding=0): + """ Check whether the idq flag was on at given times Parameters ---------- @@ -905,32 +905,45 @@ def indices_of_flag(self, start_time, duration, times, padding=0): Beginning time to request for duration: int Number of seconds to check. + times: array of floats + Times to check for an active flag padding: float - Number of seconds to add around flag inactive times to be considered - inactive as well. + Amount of time in seconds to flag around samples + below the iDQ FAP threshold Returns ------- - indices: numpy.ndarray - Array of indices marking the location of triggers within valid - time. + flag_state: numpy.ndarray + Boolean array of whether flag was on at given times """ - from pycbc.events.veto import indices_outside_times + from pycbc.events.veto import indices_within_times + + # convert start and end times to buffer indices sr = self.idq.raw_buffer.sample_rate s = int((start_time - self.idq.raw_buffer.start_time - padding) * sr) - 1 e = s + int((duration + padding) * sr) + 1 + + # find samples when iDQ FAP is below threshold and state is valid idq_fap = self.idq.raw_buffer[s:e] - stamps = idq_fap.sample_times.numpy() low_fap = idq_fap.numpy() <= self.threshold idq_valid = self.idq_state.raw_buffer[s:e] idq_valid = idq_valid.numpy().astype(bool) valid_low_fap = numpy.logical_and(idq_valid, low_fap) + + # find times corresponding to the valid low FAP samples glitch_idx = numpy.flatnonzero(valid_low_fap) + stamps = idq_fap.sample_times.numpy() glitch_times = stamps[glitch_idx] + + # construct start and end times of flag segments starts = glitch_times - padding ends = starts + 1.0 / sr + padding * 2.0 - idx = indices_outside_times(times, starts, ends) - return idx + + # check if times were flagged + idx = indices_within_times(times, starts, ends) + flagged_bool = numpy.zeros(len(times), dtype=bool) + flagged_bool[idx] = True + return flagged_bool def advance(self, blocksize): """ Add blocksize seconds more to the buffer, push blocksize seconds From ec82874e078039cc5dc6bf68a4a7c6cbd4f9c224 Mon Sep 17 00:00:00 2001 From: Ian Harry Date: Fri, 23 Aug 2024 16:21:58 +0100 Subject: [PATCH 35/35] Add pegasus 5.0.8 (#4854) --- .github/workflows/inference-workflow.yml | 2 +- .github/workflows/search-workflow.yml | 2 +- .github/workflows/tmpltbank-workflow.yml | 2 +- .github/workflows/workflow-tests.yml | 2 +- requirements.txt | 12 ++++++++++-- setup.py | 2 +- 6 files changed, 15 insertions(+), 7 deletions(-) diff --git a/.github/workflows/inference-workflow.yml b/.github/workflows/inference-workflow.yml index d31076da5e6..704351cdb7e 100644 --- a/.github/workflows/inference-workflow.yml +++ b/.github/workflows/inference-workflow.yml @@ -25,7 +25,7 @@ jobs: wget -qO - https://download.pegasus.isi.edu/pegasus/gpg.txt | sudo apt-key add - echo "deb https://download.pegasus.isi.edu/pegasus/ubuntu bionic main" | sudo tee -a /etc/apt/sources.list sudo apt-get -o Acquire::Retries=3 update - sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.6-1+ubuntu18 + sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.8-1+ubuntu18 - run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl* - name: Install pycbc run: | diff --git a/.github/workflows/search-workflow.yml b/.github/workflows/search-workflow.yml index f7f8c744ad5..d3cd392ebb9 100644 --- a/.github/workflows/search-workflow.yml +++ b/.github/workflows/search-workflow.yml @@ -30,7 +30,7 @@ jobs: wget -qO - https://download.pegasus.isi.edu/pegasus/gpg.txt | sudo apt-key add - echo "deb https://download.pegasus.isi.edu/pegasus/ubuntu bionic main" | sudo tee -a /etc/apt/sources.list sudo apt-get -o Acquire::Retries=3 update - sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.6-1+ubuntu18 + sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.8-1+ubuntu18 - run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl* - name: Install pycbc run: | diff --git a/.github/workflows/tmpltbank-workflow.yml b/.github/workflows/tmpltbank-workflow.yml index e971e97b2b7..c460bcc0637 100644 --- a/.github/workflows/tmpltbank-workflow.yml +++ b/.github/workflows/tmpltbank-workflow.yml @@ -29,7 +29,7 @@ jobs: wget -qO - https://download.pegasus.isi.edu/pegasus/gpg.txt | sudo apt-key add - echo "deb https://download.pegasus.isi.edu/pegasus/ubuntu bionic main" | sudo tee -a /etc/apt/sources.list sudo apt-get -o Acquire::Retries=3 update - sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.6-1+ubuntu18 + sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.8-1+ubuntu18 - run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl* - name: Install pycbc run: | diff --git a/.github/workflows/workflow-tests.yml b/.github/workflows/workflow-tests.yml index 79643cc8793..1b49d7427a5 100644 --- a/.github/workflows/workflow-tests.yml +++ b/.github/workflows/workflow-tests.yml @@ -34,7 +34,7 @@ jobs: wget -qO - https://download.pegasus.isi.edu/pegasus/gpg.txt | sudo apt-key add - echo "deb https://download.pegasus.isi.edu/pegasus/ubuntu bionic main" | sudo tee -a /etc/apt/sources.list sudo apt-get -o Acquire::Retries=3 update - sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.6-1+ubuntu18 + sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.8-1+ubuntu18 - run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl* - name: Install pycbc run: | diff --git a/requirements.txt b/requirements.txt index 25f99b30322..3128f18f0a3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,9 +17,17 @@ tqdm gwdatafind>=1.1.3 # Requirements for full pegasus env -pegasus-wms.api >= 5.0.6 -# Need GitPython: See discussion in https://github.com/gwastro/pycbc/pull/4454 +# https://pegasus.isi.edu/documentation/user-guide/installation.html#mixing-environments-system-venv-conda +# six is listed, but is now not needed. +pegasus-wms.api >= 5.0.8 +boto3 +certifi GitPython +pyjwt +pyyaml +s3transfer +urllib3 + # need to pin until pegasus for further upstream # addresses incompatibility between old flask/jinja2 and latest markupsafe markupsafe <= 2.0.1 diff --git a/setup.py b/setup.py index 652544bbd0e..03e2dbf8e9b 100755 --- a/setup.py +++ b/setup.py @@ -45,7 +45,7 @@ 'tqdm', 'setuptools', 'gwdatafind', - 'pegasus-wms.api >= 5.0.6', + 'pegasus-wms.api >= 5.0.8', 'python-ligo-lw >= 1.7.0', 'ligo-segments', 'lalsuite!=7.2',