pycbc_inference
¶import os
run_dir = '/home/prayush/research/test_pycbc_inj'
try:
os.makedirs(run_dir)
except:
pass
os.chdir(run_dir)
!pwd
/home/prayush/research/test_pycbc_inj
!rm -rf *
!ls
!gwnr_write_pycbc_inference_configs -h
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. result = np.asarray(values, dtype=dtype) usage: /home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/gwnrtools-2020.10.30-py3.9.egg/EGG-INFO/scripts/gwnrtools_write_pycbc_inference_configs [--options] Get and write configuration files for generating a workflow to perform Bayesian parameter estimation runs on a set of signals with Pycbc inference optional arguments: -h, --help show this help message and exit --version Prints version information. --verbose Print logging messages. --write-data-config WRITE_DATA_CONFIG Write data config files and exit. --write-sampler-config WRITE_SAMPLER_CONFIG Write sampler config files and exit. --write-inference-config WRITE_INFERENCE_CONFIG Write inference config files and exit. --n-cpus N_CPUS --checkpoint-interval CHECKPOINT_INTERVAL --n-live N_LIVE --n-maxmcmc N_MAXMCMC --dlogz DLOGZ --n-walkers N_WALKERS --n-temperatures N_TEMPERATURES --n-maxsamps-per-walker N_MAXSAMPS_PER_WALKER --n-eff-samples N_EFF_SAMPLES --show-available-configs Show available options for all configurations. --output-dir OUTPUT_DIR Output directory path.
!gwnr_write_pycbc_inference_configs --show-available-configs
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. result = np.asarray(values, dtype=dtype) data: ['gw150914-like-gaussian', 'gw150914-like-zeronoise', 'GW150914-v3', 'GW151012-v3', 'GW151226-v2', 'GW170104-v2', 'GW170608-v3', 'GW170729-v1', 'GW170809-v1', 'GW170814-v3', 'GW170817-v3', 'GW170818-v1', 'GW170823-v1'] sampler: ['emcee', 'emcee_pt', 'epsie', 'dynesty', 'ultranest', 'multinest', 'cpnest'] inference: ['bbh_precessing', 'bbh_alignedspin']
!gwnr_write_pycbc_inference_configs --verbose\
--write-sampler-config emcee_pt\
--write-inference-config bbh_precessing\
--write-data-config gw150914-like-zeronoise
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. result = np.asarray(values, dtype=dtype) 2021-09-20 12:04:47,882 Writing config file for data settings.. 2021-09-20 12:04:47,883 Writing config file for sampler settings.. 2021-09-20 12:04:47,883 Writing config file for inference settings.. 2021-09-20 12:04:47,883 Done
!ls
bbh_precessing.ini emcee_pt.ini gw150914-like-zeronoise.ini
!cat bbh_precessing.ini
[model] name = gaussian_noise low-frequency-cutoff = 20.0 [variable_params] ; waveform parameters that will vary in MCMC delta_tc = mass1 = mass2 = spin1_a = spin1_azimuthal = spin1_polar = spin2_a = spin2_azimuthal = spin2_polar = distance = coa_phase = inclination = polarization = ra = dec = [static_params] ; waveform parameters that will not change in MCMC approximant = IMRPhenomPv2 f_lower = 20 f_ref = 20 ; we'll set the tc by using the trigger time in the data ; section of the config file + delta_tc trigger_time = ${data|trigger-time} [prior-delta_tc] ; coalescence time prior name = uniform min-delta_tc = -0.1 max-delta_tc = 0.1 [waveform_transforms-tc] ; we need to provide tc to the waveform generator name = custom inputs = delta_tc tc = ${data|trigger-time} + delta_tc ;Mass1 of GW151012 $\in$ [28.7, 38.1] ;Mass1 of GW170608 $\in$ [12.7, 16.5] ;Mass1 of GW170729 $\in$ [60.4, 66.4] ;Mass1 of GW150914 $\in$ [38.7, 40.3] ;Mass1 of GW151226 $\in$ [16.9, 22.5] ;Mass1 of GW170814 $\in$ [33.6, 36.2] ;Mass1 of GW170817 $\in$ [1.56, 1.58] ;Mass1 of GW170104 $\in$ [36.4, 38.1] ;Mass1 of GW170809 $\in$ [40.9, 43.3] ;Mass1 of GW170818 $\in$ [40.1, 42.9] ;Mass1 of GW170823 $\in$ [46.2, 50.7] [prior-mass1] name = uniform min-mass1 = 10. max-mass1 = 80. ;Mass2 of GW151012 $\in$ [18.4, 17.7] ;Mass2 of GW170608 $\in$ [9.8, 9.0] ;Mass2 of GW170729 $\in$ [44.1, 43.1] ;Mass2 of GW150914 $\in$ [35.0, 33.6] ;Mass2 of GW151226 $\in$ [10.2, 9.9] ;Mass2 of GW170814 $\in$ [29.2, 28.0] ;Mass2 of GW170817 $\in$ [1.36, 1.36] ;Mass2 of GW170104 $\in$ [24.6, 24.9] ;Mass2 of GW170809 $\in$ [29.0, 28.9] ;Mass2 of GW170818 $\in$ [31.9, 31.0] ;Mass2 of GW170823 $\in$ [36.8, 35.7] [prior-mass2] name = uniform min-mass2 = 10. max-mass2 = 80. [prior-spin1_a] name = uniform min-spin1_a = 0.0 max-spin1_a = 0.99 [prior-spin1_polar+spin1_azimuthal] name = uniform_solidangle polar-angle = spin1_polar azimuthal-angle = spin1_azimuthal [prior-spin2_a] name = uniform min-spin2_a = 0.0 max-spin2_a = 0.99 [prior-spin2_polar+spin2_azimuthal] name = uniform_solidangle polar-angle = spin2_polar azimuthal-angle = spin2_azimuthal [prior-distance] ; following gives a uniform volume prior name = uniform_radius min-distance = 10 max-distance = 1000 [prior-coa_phase] ; coalescence phase prior name = uniform_angle [prior-inclination] ; inclination prior name = sin_angle [prior-ra+dec] ; sky position prior name = uniform_sky [prior-polarization] ; polarization prior name = uniform_angle
!cat emcee_pt.ini
[sampler] name = emcee_pt nwalkers = 500 ntemps = 20 ;##### Other possible options effective-nsamples = 4000 checkpoint-interval = 2000 max-samples-per-chain = 1000 [sampler-burn_in] burn-in-test = nacl & max_posterior ; ; Sampling transforms ; [sampling_params] ; parameters on the left will be sampled in ; parametes on the right mass1, mass2 : mchirp, q [sampling_transforms-mchirp+q] ; inputs mass1, mass2 ; outputs mchirp, q name = mass1_mass2_to_mchirp_q
!cat gw150914-like-zeronoise.ini
[data] instruments = H1 L1 trigger-time = 1126259462.42 analysis-start-time = -6 analysis-end-time = 2 ; strain settings sample-rate = 2048 fake-strain = H1:zeroNoise L1:zeroNoise ; psd settings psd-model = aLIGOZeroDetHighPower psd-inverse-length = 0 ; even though we're making fake strain, the strain ; module requires a channel to be provided, so we'll ; just make one up channel-name = H1:STRAIN L1:STRAIN ; Providing an injection file will cause a simulated ; signal to be added to the data injection-file = injection.hdf ; We'll use a high-pass filter so as not to get numerical errors from the large ; amplitude low frequency noise. Here we use 15 Hz, which is safely below the ; low frequency cutoff of our likelihood integral (20 Hz) strain-high-pass = 15 ; The pad-data argument is for the high-pass filter: 8s are added to the ; beginning/end of the analysis/psd times when the data is loaded. After the ; high pass filter is applied, the additional time is discarded. This pad is ; *in addition to* the time added to the analysis start/end time for the PSD ; inverse length. Since it is discarded before the data is transformed for the ; likelihood integral, it has little affect on the run time. pad-data = 8
# Write CONFIGS
with open("injection.ini", "w") as fout:
fout.write("""\
[static_params]
tc = 1126259462.420
;mass1 = 37
mass2 = 32
ra = 2.2
dec = -1.25
inclination = 2.5
coa_phase = 1.5
polarization = 1.75
distance = 100
f_ref = 20
f_lower = 18
approximant = SEOBNRv4
taper = start
[variable_params]
mass1 =
eccentricity =
mean_per_ano =
[prior-mass1]
name = uniform
min-mass1 = 10.
max-mass1 = 80.
[prior-eccentricity]
name = uniform
min-eccentricity = 0.
max-eccentricity = 0.2
[prior-mean_per_ano]
name = uniform
min-mean_per_ano = 0.
max-mean_per_ano = 3.1416
""")
!ls
bbh_precessing.ini emcee_pt.ini gw150914-like-zeronoise.ini injection.ini
# Workflow.ini
with open("config.ini", "w") as fout:
fout.write("""\
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Executables
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
[executables]
inspinj = ${which:pycbc_create_injections}
inference = ${which:pycbc_inference}
plot = ${which:pycbc_inference_plot_posterior}
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Workflow
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
[workflow]
accounting-group = ligo.dev.o3.cbc.explore.test
templates-per-job = 100
log-path = log
banksim-request-memory = 8G
data = gw150914-like-zeronoise.ini
sampler = emcee_pt.ini
inference = bbh_precessing.ini
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Injections
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
[inspinj]
config-files = injection.ini
ninjections = 10
seed = 10
output-file = injection.hdf
variable-params-section = variable_params
static-params-section = static_params
dist-section prior =
force =
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Inference
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
[inference]
verbose =
seed = 12
config-files = inference.ini data.ini sampler.ini
output-file = inference.hdf
nprocesses = 10
force =
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Visualize
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
[plot]
input-file = inference.hdf
output-file = plots/posteriors.png
plot-scatter =
plot-marginal =
plot-prior = inference.ini data.ini
""")
!cat injection.ini
[static_params] tc = 1126259462.420 ;mass1 = 37 mass2 = 32 ra = 2.2 dec = -1.25 inclination = 2.5 coa_phase = 1.5 polarization = 1.75 distance = 100 f_ref = 20 f_lower = 18 approximant = SEOBNRv4 taper = start [variable_params] mass1 = eccentricity = mean_per_ano = [prior-mass1] name = uniform min-mass1 = 10. max-mass1 = 80. [prior-eccentricity] name = uniform min-eccentricity = 0. max-eccentricity = 0.2 [prior-mean_per_ano] name = uniform min-mean_per_ano = 0. max-mean_per_ano = 3.1416
!cat config.ini
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; Executables ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; [executables] inspinj = ${which:pycbc_create_injections} inference = ${which:pycbc_inference} plot = ${which:pycbc_inference_plot_posterior} ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; Workflow ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; [workflow] accounting-group = ligo.dev.o3.cbc.explore.test templates-per-job = 100 log-path = log banksim-request-memory = 8G data = gw150914-like-zeronoise.ini sampler = emcee_pt.ini inference = bbh_precessing.ini ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; Injections ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; [inspinj] config-files = injection.ini ninjections = 10 seed = 10 output-file = injection.hdf variable-params-section = variable_params static-params-section = static_params dist-section prior = force = ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; Inference ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; [inference] verbose = seed = 12 config-files = inference.ini data.ini sampler.ini output-file = inference.hdf nprocesses = 10 force = ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; Visualize ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; [plot] input-file = inference.hdf output-file = plots/posteriors.png plot-scatter = plot-marginal = plot-prior = inference.ini data.ini
!gwnr_create_injections_pycbc_inference_workflow -h
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. result = np.asarray(values, dtype=dtype) usage: /home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/gwnrtools-2020.10.30-py3.9.egg/EGG-INFO/scripts/gwnrtools_create_injections_pycbc_inference_workflow [--options] Setup workflow to perform Bayesian parameter estimation runs on a custom set of simulated signals optional arguments: -h, --help show this help message and exit --version Prints version information. --verbose Print logging messages. --skip-creating-injections Skip calling lalapps_inspinj and assume injections already exist --output-dir OUTPUT_DIR Output directory path. --force If the output-dir already exists, overwrite it. Otherwise, an OSError is raised. --save-backup Don't delete the backup file after the run has completed. --nprocesses NPROCESSES Number of processes to use. If not given then only a single core will be used. --use-mpi Use MPI to parallelize the sampler --samples-file SAMPLES_FILE Use an iteration from an InferenceFile as the initial proposal distribution. The same number of walkers and the same [variable_params] section in the configuration file should be used. The priors must allow encompass the initial positions from the InferenceFile being read. --seed SEED Seed to use for the random number generator that initially distributes the walkers. Default is 0. Configuration: Options needed for parsing config file(s). --config-files CONFIGFILE [CONFIGFILE ...] List of config files to be used in analysis. --config-overrides [SECTION:OPTION:VALUE ...] List of section,option,value combinations to add into the configuration file. Normally the gps start and end times might be provided this way, and user specific locations (ie. output directories). This can also be provided as SECTION:OPTION or SECTION:OPTION: both of which indicate that the corresponding value is left blank. --config-delete [SECTION:OPTION ...] List of section,option combinations to delete from the configuration file. This can also be provided as SECTION which deletes the enture section from the configuration file or SECTION:OPTION which deletes a specific option from a given section. Options for selecting the FFT backend and controlling its performance in this program.: --fft-backends [FFT_BACKENDS ...] Preference list of the FFT backends. Choices are: ['fftw', 'numpy'] --fftw-measure-level FFTW_MEASURE_LEVEL Determines the measure level used in planning FFTW FFTs; allowed values are: [0, 1, 2, 3] --fftw-threads-backend FFTW_THREADS_BACKEND Give 'openmp', 'pthreads' or 'unthreaded' to specify which threaded FFTW to use --fftw-input-float-wisdom-file FFTW_INPUT_FLOAT_WISDOM_FILE Filename from which to read single-precision wisdom --fftw-input-double-wisdom-file FFTW_INPUT_DOUBLE_WISDOM_FILE Filename from which to read double-precision wisdom --fftw-output-float-wisdom-file FFTW_OUTPUT_FLOAT_WISDOM_FILE Filename to which to write single-precision wisdom --fftw-output-double-wisdom-file FFTW_OUTPUT_DOUBLE_WISDOM_FILE Filename to which to write double-precision wisdom --fftw-import-system-wisdom If given, call fftw[f]_import_system_wisdom() Options for selecting optimization-specific settings: --cpu-affinity CPU_AFFINITY A set of CPUs on which to run, specified in a format suitable to pass to taskset. --cpu-affinity-from-env CPU_AFFINITY_FROM_ENV The name of an enivornment variable containing a set of CPUs on which to run, specified in a format suitable to pass to taskset. Options for selecting the processing scheme in this program.: --processing-scheme PROCESSING_SCHEME The choice of processing scheme. Choices are ['cpu', 'numpy', 'cuda', 'mkl']. (optional for CPU scheme) The number of execution threads can be indicated by cpu:NUM_THREADS, where NUM_THREADS is an integer. The default is a single thread. If the scheme is provided as cpu:env, the number of threads can be provided by the PYCBC_NUM_THREADS environment variable. If the environment variable is not set, the number of threads matches the number of logical cores. --processing-device-id PROCESSING_DEVICE_ID (optional) ID of GPU to use for accelerated processing
!gwnr_create_injections_inference_workflow --config-files config.ini\
--output-dir . --force --verbose
Could not import ligolw in /home/prayush/src/GWNRTools/GWNRTools/Stats/FisherMatrixUtilities.pyc, LIGO XML tables wont be read 2020-03-04 18:50:57,970 Using seed 0 2020-03-04 18:50:57,971 Will setup analyses in . 2020-03-04 18:50:57,971 Running with CPU support: 1 threads 2020-03-04 18:50:58,056 Reading configuration file 2020-03-04 18:50:58,057 Making workspace directories 2020-03-04 18:50:58,073 Creating DAG 2020-03-04 18:50:58,074 Making injection002/gw150914-like-zeronoise/emcee_pt/bbh_precessing in /home/prayush/research/test_pycbc_inj 2020-03-04 18:50:58,101 Copying config files to injection002/gw150914-like-zeronoise/emcee_pt/bbh_precessing 2020-03-04 18:50:58,102 Copying executables to injection002/gw150914-like-zeronoise/emcee_pt/bbh_precessing/scripts/ 2020-03-04 18:50:58,104 Making injection003/gw150914-like-zeronoise/emcee_pt/bbh_precessing in /home/prayush/research/test_pycbc_inj 2020-03-04 18:50:58,131 Copying config files to injection003/gw150914-like-zeronoise/emcee_pt/bbh_precessing 2020-03-04 18:50:58,132 Copying executables to injection003/gw150914-like-zeronoise/emcee_pt/bbh_precessing/scripts/ 2020-03-04 18:50:58,133 Making injection004/gw150914-like-zeronoise/emcee_pt/bbh_precessing in /home/prayush/research/test_pycbc_inj 2020-03-04 18:50:58,160 Copying config files to injection004/gw150914-like-zeronoise/emcee_pt/bbh_precessing 2020-03-04 18:50:58,161 Copying executables to injection004/gw150914-like-zeronoise/emcee_pt/bbh_precessing/scripts/ 2020-03-04 18:50:58,162 Making injection005/gw150914-like-zeronoise/emcee_pt/bbh_precessing in /home/prayush/research/test_pycbc_inj 2020-03-04 18:50:58,192 Copying config files to injection005/gw150914-like-zeronoise/emcee_pt/bbh_precessing 2020-03-04 18:50:58,193 Copying executables to injection005/gw150914-like-zeronoise/emcee_pt/bbh_precessing/scripts/ 2020-03-04 18:50:58,194 Making injection006/gw150914-like-zeronoise/emcee_pt/bbh_precessing in /home/prayush/research/test_pycbc_inj 2020-03-04 18:50:58,220 Copying config files to injection006/gw150914-like-zeronoise/emcee_pt/bbh_precessing 2020-03-04 18:50:58,220 Copying executables to injection006/gw150914-like-zeronoise/emcee_pt/bbh_precessing/scripts/ 2020-03-04 18:50:58,221 Making injection009/gw150914-like-zeronoise/emcee_pt/bbh_precessing in /home/prayush/research/test_pycbc_inj 2020-03-04 18:50:58,248 Copying config files to injection009/gw150914-like-zeronoise/emcee_pt/bbh_precessing 2020-03-04 18:50:58,248 Copying executables to injection009/gw150914-like-zeronoise/emcee_pt/bbh_precessing/scripts/ 2020-03-04 18:50:58,249 Making injection001/gw150914-like-zeronoise/emcee_pt/bbh_precessing in /home/prayush/research/test_pycbc_inj 2020-03-04 18:50:58,275 Copying config files to injection001/gw150914-like-zeronoise/emcee_pt/bbh_precessing 2020-03-04 18:50:58,276 Copying executables to injection001/gw150914-like-zeronoise/emcee_pt/bbh_precessing/scripts/ 2020-03-04 18:50:58,277 Making injection007/gw150914-like-zeronoise/emcee_pt/bbh_precessing in /home/prayush/research/test_pycbc_inj 2020-03-04 18:50:58,304 Copying config files to injection007/gw150914-like-zeronoise/emcee_pt/bbh_precessing 2020-03-04 18:50:58,305 Copying executables to injection007/gw150914-like-zeronoise/emcee_pt/bbh_precessing/scripts/ 2020-03-04 18:50:58,306 Making injection008/gw150914-like-zeronoise/emcee_pt/bbh_precessing in /home/prayush/research/test_pycbc_inj 2020-03-04 18:50:58,336 Copying config files to injection008/gw150914-like-zeronoise/emcee_pt/bbh_precessing 2020-03-04 18:50:58,336 Copying executables to injection008/gw150914-like-zeronoise/emcee_pt/bbh_precessing/scripts/ 2020-03-04 18:50:58,337 Making injection000/gw150914-like-zeronoise/emcee_pt/bbh_precessing in /home/prayush/research/test_pycbc_inj 2020-03-04 18:50:58,361 Copying config files to injection000/gw150914-like-zeronoise/emcee_pt/bbh_precessing 2020-03-04 18:50:58,362 Copying executables to injection000/gw150914-like-zeronoise/emcee_pt/bbh_precessing/scripts/ 2020-03-04 18:50:58,365 Done
!tree
. ├── bbh_precessing.ini ├── config.ini ├── emcee_pt.ini ├── gw150914-like-zeronoise.ini ├── injection000 │ └── gw150914-like-zeronoise │ └── emcee_pt │ └── bbh_precessing │ ├── data.ini │ ├── inference.ini │ ├── injection.ini │ ├── log │ ├── make_injection │ ├── make_injection.sub │ ├── make_plot │ ├── make_plot.sub │ ├── plots │ ├── run_inference │ ├── run_inference.sub │ ├── sampler.ini │ └── scripts │ ├── pycbc_create_injections │ ├── pycbc_inference │ └── pycbc_inference_plot_posterior ├── injection001 │ └── gw150914-like-zeronoise │ └── emcee_pt │ └── bbh_precessing │ ├── data.ini │ ├── inference.ini │ ├── injection.ini │ ├── log │ ├── make_injection │ ├── make_injection.sub │ ├── make_plot │ ├── make_plot.sub │ ├── plots │ ├── run_inference │ ├── run_inference.sub │ ├── sampler.ini │ └── scripts │ ├── pycbc_create_injections │ ├── pycbc_inference │ └── pycbc_inference_plot_posterior ├── injection002 │ └── gw150914-like-zeronoise │ └── emcee_pt │ └── bbh_precessing │ ├── data.ini │ ├── inference.ini │ ├── injection.ini │ ├── log │ ├── make_injection │ ├── make_injection.sub │ ├── make_plot │ ├── make_plot.sub │ ├── plots │ ├── run_inference │ ├── run_inference.sub │ ├── sampler.ini │ └── scripts │ ├── pycbc_create_injections │ ├── pycbc_inference │ └── pycbc_inference_plot_posterior ├── injection003 │ └── gw150914-like-zeronoise │ └── emcee_pt │ └── bbh_precessing │ ├── data.ini │ ├── inference.ini │ ├── injection.ini │ ├── log │ ├── make_injection │ ├── make_injection.sub │ ├── make_plot │ ├── make_plot.sub │ ├── plots │ ├── run_inference │ ├── run_inference.sub │ ├── sampler.ini │ └── scripts │ ├── pycbc_create_injections │ ├── pycbc_inference │ └── pycbc_inference_plot_posterior ├── injection004 │ └── gw150914-like-zeronoise │ └── emcee_pt │ └── bbh_precessing │ ├── data.ini │ ├── inference.ini │ ├── injection.ini │ ├── log │ ├── make_injection │ ├── make_injection.sub │ ├── make_plot │ ├── make_plot.sub │ ├── plots │ ├── run_inference │ ├── run_inference.sub │ ├── sampler.ini │ └── scripts │ ├── pycbc_create_injections │ ├── pycbc_inference │ └── pycbc_inference_plot_posterior ├── injection005 │ └── gw150914-like-zeronoise │ └── emcee_pt │ └── bbh_precessing │ ├── data.ini │ ├── inference.ini │ ├── injection.ini │ ├── log │ ├── make_injection │ ├── make_injection.sub │ ├── make_plot │ ├── make_plot.sub │ ├── plots │ ├── run_inference │ ├── run_inference.sub │ ├── sampler.ini │ └── scripts │ ├── pycbc_create_injections │ ├── pycbc_inference │ └── pycbc_inference_plot_posterior ├── injection006 │ └── gw150914-like-zeronoise │ └── emcee_pt │ └── bbh_precessing │ ├── data.ini │ ├── inference.ini │ ├── injection.ini │ ├── log │ ├── make_injection │ ├── make_injection.sub │ ├── make_plot │ ├── make_plot.sub │ ├── plots │ ├── run_inference │ ├── run_inference.sub │ ├── sampler.ini │ └── scripts │ ├── pycbc_create_injections │ ├── pycbc_inference │ └── pycbc_inference_plot_posterior ├── injection007 │ └── gw150914-like-zeronoise │ └── emcee_pt │ └── bbh_precessing │ ├── data.ini │ ├── inference.ini │ ├── injection.ini │ ├── log │ ├── make_injection │ ├── make_injection.sub │ ├── make_plot │ ├── make_plot.sub │ ├── plots │ ├── run_inference │ ├── run_inference.sub │ ├── sampler.ini │ └── scripts │ ├── pycbc_create_injections │ ├── pycbc_inference │ └── pycbc_inference_plot_posterior ├── injection008 │ └── gw150914-like-zeronoise │ └── emcee_pt │ └── bbh_precessing │ ├── data.ini │ ├── inference.ini │ ├── injection.ini │ ├── log │ ├── make_injection │ ├── make_injection.sub │ ├── make_plot │ ├── make_plot.sub │ ├── plots │ ├── run_inference │ ├── run_inference.sub │ ├── sampler.ini │ └── scripts │ ├── pycbc_create_injections │ ├── pycbc_inference │ └── pycbc_inference_plot_posterior ├── injection009 │ └── gw150914-like-zeronoise │ └── emcee_pt │ └── bbh_precessing │ ├── data.ini │ ├── inference.ini │ ├── injection.ini │ ├── log │ ├── make_injection │ ├── make_injection.sub │ ├── make_plot │ ├── make_plot.sub │ ├── plots │ ├── run_inference │ ├── run_inference.sub │ ├── sampler.ini │ └── scripts │ ├── pycbc_create_injections │ ├── pycbc_inference │ └── pycbc_inference_plot_posterior ├── injection.ini ├── log ├── plots ├── pycbc_inference_injections.dag ├── pycbc_inference_injections.sh └── scripts 73 directories, 137 files
!condor_submit_dag pycbc_inference_injections.dag >> dag.out
!tail *dagman.out
03/04/20 18:51:03 Number of idle job procs: 4 03/04/20 18:51:03 Reassigning the id of job bef8a4cbe1f89655c581f3272242dc6f from (148.0.0) to (148.0.0) 03/04/20 18:51:03 Event: ULOG_SUBMIT for HTCondor Node bef8a4cbe1f89655c581f3272242dc6f (148.0.0) {03/04/20 18:51:03} 03/04/20 18:51:03 Number of idle job procs: 5 03/04/20 18:51:03 DAG status: 0 (DAG_STATUS_OK) 03/04/20 18:51:03 Of 30 nodes total: 03/04/20 18:51:03 Done Pre Queued Post Ready Un-Ready Failed 03/04/20 18:51:03 === === === === === === === 03/04/20 18:51:03 0 0 5 0 5 20 0 03/04/20 18:51:03 0 job proc(s) currently held
!ls
bbh_precessing.ini injection009 config.ini injection.ini dag.out log emcee_pt.ini plots gw150914-like-zeronoise.ini pycbc_inference_injections.dag injection000 pycbc_inference_injections.dag.condor.sub injection001 pycbc_inference_injections.dag.dagman.log injection002 pycbc_inference_injections.dag.dagman.out injection003 pycbc_inference_injections.dag.lib.err injection004 pycbc_inference_injections.dag.lib.out injection005 pycbc_inference_injections.dag.lock injection006 pycbc_inference_injections.sh injection007 scripts injection008
bilby
¶import os
run_dir = '/home/prayush/research/test_bilby_inj'
try:
os.makedirs(run_dir)
except:
pass
os.chdir(run_dir)
!pwd
/home/prayush/research/test_bilby_inj
!rm -fr *
!ls
!ls
bbh-alignedspin-prior-aligned_spins_bbh.ini emcee_pt.ini bbh_precessing.ini gw150914-like-zeronoise.ini
!rm -rf *
!gwnr_write_bilby_configs --help
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. result = np.asarray(values, dtype=dtype) usage: /home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/gwnrtools-2020.10.30-py3.9.egg/EGG-INFO/scripts/gwnrtools_write_bilby_configs [--options] Get and write configuration files for generating a workflow to perform Bayesian parameter estimation runs on a custom set of signals with Bilby optional arguments: -h, --help show this help message and exit --version Prints version information. --verbose Print logging messages. --write-config WRITE_CONFIG Comma-separated name of config-type,config-subtype, e.g.'prior,precessing_spins_bbh' --write-injection-config WRITE_INJECTION_CONFIG Write config files for injections and exit. --write-event-config WRITE_EVENT_CONFIG Write config files for events and exit. --show-available-configs Show available options for all configurations. --output-dir OUTPUT_DIR Output directory path.
!gwnr_write_bilby_configs --show-available-configs
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. result = np.asarray(values, dtype=dtype) prior: ['default', 'eccentric-nonspin', 'precessing_spins_bns', 'GW170817', 'GW150914', 'precessing_spins_bbh_tides_on', 'aligned_spins_bbh_tides_on', 'precessing_spins_bns_tides_on', 'aligned_spins_bbh', 'precessing_spins_bbh', 'aligned_spins_bns', 'aligned_spins_bns_tides_on'] injection: ['bbh-prior-default', 'bbh-alignedspin-prior-default', 'bbh-eccentric-nonspin-prior-default', 'bbh-prior-eccentric-nonspin', 'bbh-alignedspin-prior-eccentric-nonspin', 'bbh-eccentric-nonspin-prior-eccentric-nonspin', 'bbh-prior-precessing_spins_bns', 'bbh-alignedspin-prior-precessing_spins_bns', 'bbh-eccentric-nonspin-prior-precessing_spins_bns', 'bbh-prior-GW170817', 'bbh-alignedspin-prior-GW170817', 'bbh-eccentric-nonspin-prior-GW170817', 'bbh-prior-GW150914', 'bbh-alignedspin-prior-GW150914', 'bbh-eccentric-nonspin-prior-GW150914', 'bbh-prior-precessing_spins_bbh_tides_on', 'bbh-alignedspin-prior-precessing_spins_bbh_tides_on', 'bbh-eccentric-nonspin-prior-precessing_spins_bbh_tides_on', 'bbh-prior-aligned_spins_bbh_tides_on', 'bbh-alignedspin-prior-aligned_spins_bbh_tides_on', 'bbh-eccentric-nonspin-prior-aligned_spins_bbh_tides_on', 'bbh-prior-precessing_spins_bns_tides_on', 'bbh-alignedspin-prior-precessing_spins_bns_tides_on', 'bbh-eccentric-nonspin-prior-precessing_spins_bns_tides_on', 'bbh-prior-aligned_spins_bbh', 'bbh-alignedspin-prior-aligned_spins_bbh', 'bbh-eccentric-nonspin-prior-aligned_spins_bbh', 'bbh-prior-precessing_spins_bbh', 'bbh-alignedspin-prior-precessing_spins_bbh', 'bbh-eccentric-nonspin-prior-precessing_spins_bbh', 'bbh-prior-aligned_spins_bns', 'bbh-alignedspin-prior-aligned_spins_bns', 'bbh-eccentric-nonspin-prior-aligned_spins_bns', 'bbh-prior-aligned_spins_bns_tides_on', 'bbh-alignedspin-prior-aligned_spins_bns_tides_on', 'bbh-eccentric-nonspin-prior-aligned_spins_bns_tides_on'] event: ['bbh-event-prior-default', 'bbh-event-prior-eccentric-nonspin', 'bbh-event-prior-precessing_spins_bns', 'bbh-event-prior-GW170817', 'bbh-event-prior-GW150914', 'bbh-event-prior-precessing_spins_bbh_tides_on', 'bbh-event-prior-aligned_spins_bbh_tides_on', 'bbh-event-prior-precessing_spins_bns_tides_on', 'bbh-event-prior-aligned_spins_bbh', 'bbh-event-prior-precessing_spins_bbh', 'bbh-event-prior-aligned_spins_bns', 'bbh-event-prior-aligned_spins_bns_tides_on']
!gwnr_write_bilby_configs --write-config injection,bbh-alignedspin-prior-aligned_spins_bbh
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. result = np.asarray(values, dtype=dtype)
!cat bbh-alignedspin-prior-aligned_spins_bbh.ini
[workflow] tag = test accounting-group = ligo.dev.o3.cbc.explore.test log-path = log request-memory = 2G request-cpus = 1 [inference] duration = 4 sample_rate = 2048 lower_frequency_cutoff = 30 upper_frequency_cutoff = 1024 reference_frequency = 30 phase_marginalization = time_marginalization = ;distance_marginalization = [sampler] name = dynesty npoints = 2000 maxmcmc = 2000 n_check_point = 1000 [data] analysis_type = event source_type = bbh event_name = GW150914 interferometers = H1,L1 [injection] noise_type = gaussian asd-file = H1:PATH,L1:PATH approximant = IMRPhenomPv2 taper = start num_injections = 10 [injection-static_params] geocent_time = 1126259462.420 ;mass_1 = 37 mass_2 = 32 chi_1 = 0 ;chi_2 = 0 ra = 2.2 dec = -1.25 theta_jn = 2.5 phase = 1.5 psi = 1.75 luminosity_distance = 1000 [injection-variable_params] mass_1 = chi_2 = [injection-prior_mass_1] name = uniform min-mass_1 = 10. max-mass_1 = 80. [injection-prior_chi_2] name = uniform min-chi_2 = -0.2 max-chi_2 = 0.2 [template] source_model = bilby.gw.source.lal_binary_black_hole approximant = IMRPhenomPv2 sample_rate = 2048 lower_frequency_cutoff = 30 upper_frequency_cutoff = 1024 reference_frequency = 30 [prior] # These are the default priors we use for BBH systems. # Note that you may wish to use more specific mass and distance parameters. # These commands are all known to bilby.gw.prior. # Lines beginning "#" are ignored. mass_1 = Constraint(name='mass_1', minimum=5, maximum=100) mass_2 = Constraint(name='mass_2', minimum=5, maximum=100) mass_ratio = Uniform(name='mass_ratio', minimum=0.125, maximum=1) chirp_mass = Uniform(name='chirp_mass', minimum=25, maximum=100) luminosity_distance = bilby.gw.prior.UniformSourceFrame(name='luminosity_distance', minimum=1e2, maximum=5e3) dec = Cosine(name='dec') ra = Uniform(name='ra', minimum=0, maximum=2 * np.pi, boundary='periodic') theta_jn = Sine(name='theta_jn') psi = Uniform(name='psi', minimum=0, maximum=np.pi, boundary='periodic') phase = Uniform(name='phase', minimum=0, maximum=2 * np.pi, boundary='periodic') chi_1 = bilby.gw.prior.AlignedSpin(name='chi_1', a_prior=Uniform(minimum=0, maximum=0.99)) chi_2 = bilby.gw.prior.AlignedSpin(name='chi_2', a_prior=Uniform(minimum=0, maximum=0.99))
# Now edit this workflow config file to your desired settings
# All options that are illustrated in this file should be filled-in
# The pipeline wrapper will consciously not use any "reasonable defaults".
!gwnr_create_injections_bilby_workflow --help
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. result = np.asarray(values, dtype=dtype) usage: /home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/gwnrtools-2020.10.30-py3.9.egg/EGG-INFO/scripts/gwnrtools_create_injections_bilby_workflow [--options] Setup workflow to perform Bayesian parameter estimation runs on a custom set of simulated signals optional arguments: -h, --help show this help message and exit --version Prints version information. --verbose Print logging messages. --config-file CONFIG_FILE Configuration file with details of analyses. See `gwnrtools_write_bilby_inference_configs` for help. --skip-creating-injections Skip calling lalapps_inspinj and assume injections already exist --output-dir OUTPUT_DIR Output directory path. --force If the output-dir already exists, overwrite it. Otherwise, an OSError is raised. --save-backup Don't delete the backup file after the run has completed. --nprocesses NPROCESSES Number of processes to use. If not given then only a single core will be used. --use-mpi Use MPI to parallelize the sampler --seed SEED Seed to use for the random number generator that initially distributes the walkers. Default is 0.
!ls
bbh-alignedspin-prior-aligned_spins_bbh.ini
!gwnr_create_injections_bilby_workflow --config-file bbh-alignedspin-prior-aligned_spins_bbh.ini\
--output-dir . --force --verbose
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. result = np.asarray(values, dtype=dtype) 2021-09-20 13:03:07,754 Using seed 0 cp: 'bbh-alignedspin-prior-aligned_spins_bbh.ini' and './bbh-alignedspin-prior-aligned_spins_bbh.ini' are the same file 2021-09-20 13:03:07,763 Will setup analyses in . 2021-09-20 13:03:07,764 Creating DAG 2021-09-20 13:03:07,764 --- verifying injection params config 2021-09-20 13:03:07,764 --- injection params config verified 2021-09-20 13:03:07,764 --- sampling 10 injection params 2021-09-20 13:03:09,210 --- injection params sampled 2021-09-20 13:03:09,210 --- reading injection params 2021-09-20 13:03:09,217 --- 10 injection params read 2021-09-20 13:03:09,217 --- creating script writer objects for injections 2021-09-20 13:03:09,218 ----- borrowing source_model for injections from [template] 2021-09-20 13:03:09,218 --- script writer object created for injection 0 2021-09-20 13:03:09,218 --- analysis objects created for injection 0 2021-09-20 13:03:09,219 --- script writer object created for injection 1 2021-09-20 13:03:09,219 --- analysis objects created for injection 1 2021-09-20 13:03:09,220 --- script writer object created for injection 2 2021-09-20 13:03:09,220 --- analysis objects created for injection 2 2021-09-20 13:03:09,220 --- script writer object created for injection 3 2021-09-20 13:03:09,221 --- analysis objects created for injection 3 2021-09-20 13:03:09,221 --- script writer object created for injection 4 2021-09-20 13:03:09,221 --- analysis objects created for injection 4 2021-09-20 13:03:09,221 --- script writer object created for injection 5 2021-09-20 13:03:09,221 --- analysis objects created for injection 5 2021-09-20 13:03:09,222 --- script writer object created for injection 6 2021-09-20 13:03:09,222 --- analysis objects created for injection 6 2021-09-20 13:03:09,222 --- script writer object created for injection 7 2021-09-20 13:03:09,222 --- analysis objects created for injection 7 2021-09-20 13:03:09,223 --- script writer object created for injection 8 2021-09-20 13:03:09,223 --- analysis objects created for injection 8 2021-09-20 13:03:09,223 --- script writer object created for injection 9 2021-09-20 13:03:09,223 --- analysis objects created for injection 9 2021-09-20 13:03:09,223 Making ./injection000 in /home/prayush/research/test_bilby_inj Writing script for injection data Please do not forget to write 'priors.prior' 2021-09-20 13:03:09,240 Making ./injection001 in /home/prayush/research/test_bilby_inj Writing script for injection data Please do not forget to write 'priors.prior' 2021-09-20 13:03:09,256 Making ./injection002 in /home/prayush/research/test_bilby_inj Writing script for injection data Please do not forget to write 'priors.prior' 2021-09-20 13:03:09,273 Making ./injection003 in /home/prayush/research/test_bilby_inj Writing script for injection data Please do not forget to write 'priors.prior' 2021-09-20 13:03:09,290 Making ./injection004 in /home/prayush/research/test_bilby_inj Writing script for injection data Please do not forget to write 'priors.prior' 2021-09-20 13:03:09,307 Making ./injection005 in /home/prayush/research/test_bilby_inj Writing script for injection data Please do not forget to write 'priors.prior' 2021-09-20 13:03:09,325 Making ./injection006 in /home/prayush/research/test_bilby_inj Writing script for injection data Please do not forget to write 'priors.prior' 2021-09-20 13:03:09,341 Making ./injection007 in /home/prayush/research/test_bilby_inj Writing script for injection data Please do not forget to write 'priors.prior' 2021-09-20 13:03:09,360 Making ./injection008 in /home/prayush/research/test_bilby_inj Writing script for injection data Please do not forget to write 'priors.prior' 2021-09-20 13:03:09,382 Making ./injection009 in /home/prayush/research/test_bilby_inj Writing script for injection data Please do not forget to write 'priors.prior' 2021-09-20 13:03:09,404 Done
!tree
. ├── bbh-alignedspin-prior-aligned_spins_bbh.ini ├── bilby_injections.dag ├── bilby_injections.sh ├── injection000 │ ├── log │ ├── priors.prior │ ├── run_inference │ └── run_inference.sub ├── injection001 │ ├── log │ ├── priors.prior │ ├── run_inference │ └── run_inference.sub ├── injection002 │ ├── log │ ├── priors.prior │ ├── run_inference │ └── run_inference.sub ├── injection003 │ ├── log │ ├── priors.prior │ ├── run_inference │ └── run_inference.sub ├── injection004 │ ├── log │ ├── priors.prior │ ├── run_inference │ └── run_inference.sub ├── injection005 │ ├── log │ ├── priors.prior │ ├── run_inference │ └── run_inference.sub ├── injection006 │ ├── log │ ├── priors.prior │ ├── run_inference │ └── run_inference.sub ├── injection007 │ ├── log │ ├── priors.prior │ ├── run_inference │ └── run_inference.sub ├── injection008 │ ├── log │ ├── priors.prior │ ├── run_inference │ └── run_inference.sub ├── injection009 │ ├── log │ ├── priors.prior │ ├── run_inference │ └── run_inference.sub ├── injection.hdf └── injection.ini 20 directories, 35 files
!ls
bbh-alignedspin-prior-aligned_spins_bbh.ini injection002 injection007 bilby_injections.dag injection003 injection008 bilby_injections.sh injection004 injection009 injection000 injection005 injection.hdf injection001 injection006 injection.ini
!condor_submit_dag bilby_injections.dag >> dag.out
!tail *dagman.out
09/20/21 17:51:28 Number of idle job procs: 9 09/20/21 17:51:28 Reassigning the id of job 7da849fe15e4ede711c49fb84e5e9cf2 from (109.0.0) to (109.0.0) 09/20/21 17:51:28 Event: ULOG_SUBMIT for HTCondor Node 7da849fe15e4ede711c49fb84e5e9cf2 (109.0.0) {09/20/21 17:51:28} 09/20/21 17:51:28 Number of idle job procs: 10 09/20/21 17:51:28 DAG status: 0 (DAG_STATUS_OK) 09/20/21 17:51:28 Of 10 nodes total: 09/20/21 17:51:28 Done Pre Queued Post Ready Un-Ready Failed 09/20/21 17:51:28 === === === === === === === 09/20/21 17:51:28 0 0 10 0 0 0 0 09/20/21 17:51:28 0 job proc(s) currently held
!ls
bbh-alignedspin-prior-aligned_spins_bbh.ini injection001 bilby_injections.dag injection002 bilby_injections.dag.condor.sub injection003 bilby_injections.dag.dagman.log injection004 bilby_injections.dag.dagman.out injection005 bilby_injections.dag.lib.err injection006 bilby_injections.dag.lib.out injection007 bilby_injections.dag.lock injection008 bilby_injections.dag.nodes.log injection009 bilby_injections.sh injection.hdf dag.out injection.ini injection000