pycbc_inference¶import os
run_dir = '/home/prayush/research/test_pycbc_gw150914'
try: os.makedirs(run_dir)
except: pass
os.chdir(run_dir)
!pwd
/home/prayush/research/test_pycbc_gw150914
!rm -rf *
!ls
!gwnr_write_pycbc_inference_configs -h
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray.
result = np.asarray(values, dtype=dtype)
usage: /home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/gwnrtools-2020.10.30-py3.9.egg/EGG-INFO/scripts/gwnrtools_write_pycbc_inference_configs [--options]
Get and write configuration files for generating a workflow to perform
Bayesian parameter estimation runs on a set of signals with Pycbc inference
optional arguments:
-h, --help show this help message and exit
--version Prints version information.
--verbose Print logging messages.
--write-data-config WRITE_DATA_CONFIG
Write data config files and exit.
--write-sampler-config WRITE_SAMPLER_CONFIG
Write sampler config files and exit.
--write-inference-config WRITE_INFERENCE_CONFIG
Write inference config files and exit.
--n-cpus N_CPUS
--checkpoint-interval CHECKPOINT_INTERVAL
--n-live N_LIVE
--n-maxmcmc N_MAXMCMC
--dlogz DLOGZ
--n-walkers N_WALKERS
--n-temperatures N_TEMPERATURES
--n-maxsamps-per-walker N_MAXSAMPS_PER_WALKER
--n-eff-samples N_EFF_SAMPLES
--show-available-configs
Show available options for all configurations.
--output-dir OUTPUT_DIR
Output directory path.
!gwnr_write_pycbc_inference_configs --show-available-configs
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. result = np.asarray(values, dtype=dtype) data: ['gw150914-like-gaussian', 'gw150914-like-zeronoise', 'GW150914-v3', 'GW151012-v3', 'GW151226-v2', 'GW170104-v2', 'GW170608-v3', 'GW170729-v1', 'GW170809-v1', 'GW170814-v3', 'GW170817-v3', 'GW170818-v1', 'GW170823-v1'] sampler: ['emcee', 'emcee_pt', 'epsie', 'dynesty', 'ultranest', 'multinest', 'cpnest'] inference: ['bbh_precessing', 'bbh_alignedspin']
!gwnr_write_pycbc_inference_configs --verbose\
--write-sampler-config emcee_pt --write-inference-config bbh_precessing
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. result = np.asarray(values, dtype=dtype) 2021-09-20 19:37:49,110 Writing config file for sampler settings.. 2021-09-20 19:37:49,111 Writing config file for inference settings.. 2021-09-20 19:37:49,111 Done
!ls
bbh_precessing.ini emcee_pt.ini
!cat emcee_pt.ini bbh_precessing.ini
[sampler]
name = emcee_pt
nprocesses = 10
nwalkers = 1000
ntemps = 20
effective-nsamples = 4000
max-samples-per-chain = 1000
checkpoint-interval = 2000
[sampler-burn_in]
burn-in-test = nacl & max_posterior
;
; Sampling transforms
;
[sampling_params]
; parameters on the left will be sampled in
; parametes on the right
mass1, mass2 : mchirp, q
[sampling_transforms-mchirp+q]
; inputs mass1, mass2
; outputs mchirp, q
name = mass1_mass2_to_mchirp_q
[model]
name = gaussian_noise
low-frequency-cutoff = 20.0
[variable_params]
; waveform parameters that will vary in MCMC
delta_tc =
mass1 =
mass2 =
spin1_a =
spin1_azimuthal =
spin1_polar =
spin2_a =
spin2_azimuthal =
spin2_polar =
distance =
coa_phase =
inclination =
polarization =
ra =
dec =
[static_params]
; waveform parameters that will not change in MCMC
approximant = IMRPhenomPv2
f_lower = 20
f_ref = 20
; we'll set the tc by using the trigger time in the data
; section of the config file + delta_tc
trigger_time = ${data|trigger-time}
[prior-delta_tc]
; coalescence time prior
name = uniform
min-delta_tc = -0.1
max-delta_tc = 0.1
[waveform_transforms-tc]
; we need to provide tc to the waveform generator
name = custom
inputs = delta_tc
tc = ${data|trigger-time} + delta_tc
;Mass1 of GW151012 $\in$ [28.7, 38.1]
;Mass1 of GW170608 $\in$ [12.7, 16.5]
;Mass1 of GW170729 $\in$ [60.4, 66.4]
;Mass1 of GW150914 $\in$ [38.7, 40.3]
;Mass1 of GW151226 $\in$ [16.9, 22.5]
;Mass1 of GW170814 $\in$ [33.6, 36.2]
;Mass1 of GW170817 $\in$ [1.56, 1.58]
;Mass1 of GW170104 $\in$ [36.4, 38.1]
;Mass1 of GW170809 $\in$ [40.9, 43.3]
;Mass1 of GW170818 $\in$ [40.1, 42.9]
;Mass1 of GW170823 $\in$ [46.2, 50.7]
[prior-mass1]
name = uniform
min-mass1 = 10.
max-mass1 = 80.
;Mass2 of GW151012 $\in$ [18.4, 17.7]
;Mass2 of GW170608 $\in$ [9.8, 9.0]
;Mass2 of GW170729 $\in$ [44.1, 43.1]
;Mass2 of GW150914 $\in$ [35.0, 33.6]
;Mass2 of GW151226 $\in$ [10.2, 9.9]
;Mass2 of GW170814 $\in$ [29.2, 28.0]
;Mass2 of GW170817 $\in$ [1.36, 1.36]
;Mass2 of GW170104 $\in$ [24.6, 24.9]
;Mass2 of GW170809 $\in$ [29.0, 28.9]
;Mass2 of GW170818 $\in$ [31.9, 31.0]
;Mass2 of GW170823 $\in$ [36.8, 35.7]
[prior-mass2]
name = uniform
min-mass2 = 10.
max-mass2 = 80.
[prior-spin1_a]
name = uniform
min-spin1_a = 0.0
max-spin1_a = 0.99
[prior-spin1_polar+spin1_azimuthal]
name = uniform_solidangle
polar-angle = spin1_polar
azimuthal-angle = spin1_azimuthal
[prior-spin2_a]
name = uniform
min-spin2_a = 0.0
max-spin2_a = 0.99
[prior-spin2_polar+spin2_azimuthal]
name = uniform_solidangle
polar-angle = spin2_polar
azimuthal-angle = spin2_azimuthal
[prior-distance]
; following gives a uniform volume prior
name = uniform_radius
min-distance = 10
max-distance = 1000
[prior-coa_phase]
; coalescence phase prior
name = uniform_angle
[prior-inclination]
; inclination prior
name = sin_angle
[prior-ra+dec]
; sky position prior
name = uniform_sky
[prior-polarization]
; polarization prior
name = uniform_angle
with open('config.ini', 'w') as fout:
fout.write('''\
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Executables
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
[executables]
inference = ${which:pycbc_inference}
plot = ${which:pycbc_inference_plot_posterior}
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Workflow
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
[workflow]
accounting-group = ligo.dev.o3.cbc.explore.test
log-path = log
sampler = emcee_pt.ini
inference = bbh_precessing.ini
events = GW150914 GW170104
sample-rate = 2048
data-sample-rate = 4096
data-duration = 4096
psd-estimation = download ; or data-standard
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Inference
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
[inference]
verbose =
seed = 12
config-files = inference.ini data.ini sampler.ini
output-file = inference.hdf
nprocesses = 10
force =
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Visualize
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
[plot]
input-file = inference.hdf
plot-scatter =
plot-marginal =
plot-prior = inference.ini data.ini
[plot-mass1mass2]
output-file = plots/posteriors.png
parameters = 'mass1 mass2'
''')
!cat config.ini
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Executables
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
[executables]
inference = ${which:pycbc_inference}
plot = ${which:pycbc_inference_plot_posterior}
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Workflow
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
[workflow]
accounting-group = ligo.dev.o3.cbc.explore.test
log-path = log
sampler = emcee_pt.ini
inference = bbh_precessing.ini
events = GW150914 GW170104
sample-rate = 2048
data-sample-rate = 4096
data-duration = 4096
psd-estimation = download ; or data-standard
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Inference
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
[inference]
verbose =
seed = 12
config-files = inference.ini data.ini sampler.ini
output-file = inference.hdf
nprocesses = 10
force =
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; Visualize
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
[plot]
input-file = inference.hdf
plot-scatter =
plot-marginal =
plot-prior = inference.ini data.ini
[plot-mass1mass2]
output-file = plots/posteriors.png
parameters = 'mass1 mass2'
!gwnr_create_public_events_pycbc_inference_workflow -h
usage: /home/prayush/src/GWNRTools/bin//gwnrtools_create_public_events_inference_workflow [--options]
Setup workflow to perform Bayesian parameter estimation runs on a custom set
of public gravitational-wave events using open data
optional arguments:
-h, --help show this help message and exit
--version Prints version information.
--verbose Print logging messages.
--output-dir OUTPUT_DIR
Output directory path.
--force If the output-dir already exists, overwrite it.
Otherwise, an OSError is raised.
--do-not-fetch-data Don't fetch GWOSC data.
--nprocesses NPROCESSES
Number of processes to use. If not given then only a
single core will be used.
--use-mpi Use MPI to parallelize the sampler
--seed SEED Seed to use for the random number generator that
initially distributes the walkers. Default is 0.
Configuration:
Options needed for parsing config file(s).
--config-files CONFIGFILE [CONFIGFILE ...]
List of config files to be used in analysis.
--config-overrides [SECTION:OPTION:VALUE [SECTION:OPTION:VALUE ...]]
List of section,option,value combinations to add into
the configuration file. Normally the gps start and end
times might be provided this way, and user specific
locations (ie. output directories). This can also be
provided as SECTION:OPTION or SECTION:OPTION: both of
which indicate that the corresponding value is left
blank.
--config-delete [SECTION:OPTION [SECTION:OPTION ...]]
List of section,option combinations to delete from the
configuration file. This can also be provided as
SECTION which deletes the enture section from the
configuration file or SECTION:OPTION which deletes a
specific option from a given section.
Options for selecting the FFT backend and controlling its performance in this program.:
--fft-backends [FFT_BACKENDS [FFT_BACKENDS ...]]
Preference list of the FFT backends. Choices are:
['fftw', 'numpy']
--fftw-measure-level FFTW_MEASURE_LEVEL
Determines the measure level used in planning FFTW
FFTs; allowed values are: [0, 1, 2, 3]
--fftw-threads-backend FFTW_THREADS_BACKEND
Give 'openmp', 'pthreads' or 'unthreaded' to specify
which threaded FFTW to use
--fftw-input-float-wisdom-file FFTW_INPUT_FLOAT_WISDOM_FILE
Filename from which to read single-precision wisdom
--fftw-input-double-wisdom-file FFTW_INPUT_DOUBLE_WISDOM_FILE
Filename from which to read double-precision wisdom
--fftw-output-float-wisdom-file FFTW_OUTPUT_FLOAT_WISDOM_FILE
Filename to which to write single-precision wisdom
--fftw-output-double-wisdom-file FFTW_OUTPUT_DOUBLE_WISDOM_FILE
Filename to which to write double-precision wisdom
--fftw-import-system-wisdom
If given, call fftw[f]_import_system_wisdom()
Options for selecting optimization-specific settings:
--cpu-affinity CPU_AFFINITY
A set of CPUs on which to run, specified in a format
suitable to pass to taskset.
--cpu-affinity-from-env CPU_AFFINITY_FROM_ENV
The name of an enivornment variable containing a set
of CPUs on which to run, specified in a format
suitable to pass to taskset.
Options for selecting the processing scheme in this program.:
--processing-scheme PROCESSING_SCHEME
The choice of processing scheme. Choices are ['mkl',
'numpy', 'cpu', 'cuda']. (optional for CPU scheme) The
number of execution threads can be indicated by
cpu:NUM_THREADS, where NUM_THREADS is an integer. The
default is a single thread. If the scheme is provided
as cpu:env, the number of threads can be provided by
the PYCBC_NUM_THREADS environment variable. If the
environment variable is not set, the number of threads
matches the number of logical cores.
--processing-device-id PROCESSING_DEVICE_ID
(optional) ID of GPU to use for accelerated processing
!gwnr_create_public_events_pycbc_inference_workflow --config-files config.ini --output-dir . --force --verbose
2020-03-17 17:49:18,790 Reading configuration file 2020-03-17 17:49:18,792 Using seed 0 2020-03-17 17:49:18,794 Running with CPU support: 1 threads 2020-03-17 17:49:18,904 Will setup analyses in . 2020-03-17 17:49:18,906 Making workspace directories 2020-03-17 17:49:18,907 Creating DAG 2020-03-17 17:49:18,914 Making eventGW170104/emcee_pt/bbh_precessing in /home/prayush/research/test_pycbc_gw150914 2020-03-17 17:49:18,984 Copying config files to eventGW170104/emcee_pt/bbh_precessing 2020-03-17 17:49:18,986 Copying executables to eventGW170104/emcee_pt/bbh_precessing/scripts/ 2020-03-17 17:49:18,988 Fetching GWOSC frame data
Downloading https://www.gw-openscience.org/catalog/GWTC-1-confident/data/GW170104/H-H1_GWOSC_4KHZ_R1-1167557889-4096.gwf [Done] Downloading https://www.gw-openscience.org/catalog/GWTC-1-confident/data/GW170104/L-L1_GWOSC_4KHZ_R1-1167557889-4096.gwf [Done]
2020-03-17 17:50:43,606 Fetching PSD files
Downloading https://dcc.ligo.org/public/0158/P1900011/001/GWTC1_GW170104_PSDs.dat [Done]
2020-03-17 17:51:37,611 Making eventGW150914/emcee_pt/bbh_precessing in /home/prayush/research/test_pycbc_gw150914 2020-03-17 17:51:37,701 Copying config files to eventGW150914/emcee_pt/bbh_precessing 2020-03-17 17:51:37,704 Copying executables to eventGW150914/emcee_pt/bbh_precessing/scripts/ 2020-03-17 17:51:37,706 Fetching GWOSC frame data
Downloading https://www.gw-openscience.org/catalog/GWTC-1-confident/data/GW150914/H-H1_GWOSC_4KHZ_R1-1126257415-4096.gwf [Done] Downloading https://www.gw-openscience.org/catalog/GWTC-1-confident/data/GW150914/L-L1_GWOSC_4KHZ_R1-1126257415-4096.gwf [Done]
2020-03-17 17:52:54,887 Fetching PSD files
Downloading https://dcc.ligo.org/public/0158/P1900011/001/GWTC1_GW150914_PSDs.dat [Done]
2020-03-17 17:53:50,550 Done
!tree
. ├── bbh_precessing.ini ├── config.ini ├── emcee_pt.ini ├── eventGW150914 │ ├── data │ │ ├── H-H1_GWOSC_4KHZ_R1-1126257415-4096.gwf │ │ ├── L-L1_GWOSC_4KHZ_R1-1126257415-4096.gwf │ │ ├── psd_H1.dat │ │ └── psd_L1.dat │ └── emcee_pt │ └── bbh_precessing │ ├── data.ini │ ├── inference.hdf.bkup │ ├── inference.hdf.checkpoint │ ├── inference.ini │ ├── log │ │ ├── run_inference-166-0.err │ │ ├── run_inference-166-0.out │ │ ├── run_inference-169-0.err │ │ ├── run_inference-169-0.out │ │ ├── run_inference-173-0.err │ │ ├── run_inference-173-0.out │ │ └── tmpFgLflx │ ├── make_plot_mass1mass2 │ ├── make_plot_mass1mass2.sub │ ├── plots │ ├── run_inference │ ├── run_inference.sub │ ├── sampler.ini │ └── scripts │ ├── pycbc_inference │ └── pycbc_inference_plot_posterior ├── eventGW170104 │ ├── data │ │ ├── H-H1_GWOSC_4KHZ_R1-1167557889-4096.gwf │ │ ├── L-L1_GWOSC_4KHZ_R1-1167557889-4096.gwf │ │ ├── psd_H1.dat │ │ └── psd_L1.dat │ └── emcee_pt │ └── bbh_precessing │ ├── data.ini │ ├── inference.ini │ ├── log │ │ ├── run_inference-165-0.err │ │ ├── run_inference-165-0.out │ │ ├── run_inference-168-0.err │ │ ├── run_inference-168-0.out │ │ ├── run_inference-172-0.err │ │ ├── run_inference-172-0.out │ │ └── tmpFgLflx │ ├── make_plot_mass1mass2 │ ├── make_plot_mass1mass2.sub │ ├── plots │ ├── run_inference │ ├── run_inference.sub │ ├── sampler.ini │ └── scripts │ ├── pycbc_inference │ └── pycbc_inference_plot_posterior ├── log ├── pycbc_inference_events.dag ├── pycbc_inference_events.dag.condor.sub ├── pycbc_inference_events.dag.dagman.log ├── pycbc_inference_events.dag.dagman.out ├── pycbc_inference_events.dag.lib.err ├── pycbc_inference_events.dag.lib.out ├── pycbc_inference_events.dag.lock ├── pycbc_inference_events.dag.metrics ├── pycbc_inference_events.dag.nodes.log ├── pycbc_inference_events.dag.rescue001 ├── pycbc_inference_events.dag.rescue002 └── pycbc_inference_events.sh 15 directories, 57 files
!condor_submit_dag pycbc_inference_events.dag >> dag.out
!tail *dagman.out
03/04/20 14:44:41 Workflow accounting_group_user: <> 03/04/20 14:44:41 Warning: failed to get attribute DAGNodeName 03/04/20 14:44:41 DAGMAN_LOG_ON_NFS_IS_ERROR setting: False 03/04/20 14:44:41 Default node log file is: </home/prayush/research/test_pycbc_gw150914/./pycbc_inference_events.dag.nodes.log> 03/04/20 14:44:41 DAG Lockfile will be written to pycbc_inference_events.dag.lock 03/04/20 14:44:41 DAG Input file is pycbc_inference_events.dag 03/04/20 14:44:41 Parsing 1 dagfiles 03/04/20 14:44:41 Parsing pycbc_inference_events.dag ... 03/04/20 14:44:41 Dag contains 4 total jobs 03/04/20 14:44:41 Sleeping for 3 seconds to ensure ProcessId uniqueness
bilby¶import os
run_dir = '/home/prayush/research/test_bilby_events'
try: os.makedirs(run_dir)
except: pass
os.chdir(run_dir)
!pwd
/home/prayush/research/test_bilby_events
!rm -rf *
!ls
!gwnr_write_bilby_configs --help
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray.
result = np.asarray(values, dtype=dtype)
usage: /home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/gwnrtools-2020.10.30-py3.9.egg/EGG-INFO/scripts/gwnrtools_write_bilby_configs [--options]
Get and write configuration files for generating a workflow to perform
Bayesian parameter estimation runs on a custom set of signals with Bilby
optional arguments:
-h, --help show this help message and exit
--version Prints version information.
--verbose Print logging messages.
--write-config WRITE_CONFIG
Comma-separated name of config-type,config-subtype,
e.g.'prior,precessing_spins_bbh'
--write-injection-config WRITE_INJECTION_CONFIG
Write config files for injections and exit.
--write-event-config WRITE_EVENT_CONFIG
Write config files for events and exit.
--show-available-configs
Show available options for all configurations.
--output-dir OUTPUT_DIR
Output directory path.
!gwnr_write_bilby_configs --show-available-configs
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. result = np.asarray(values, dtype=dtype) prior: ['default', 'eccentric-nonspin', 'precessing_spins_bns', 'GW170817', 'GW150914', 'precessing_spins_bbh_tides_on', 'aligned_spins_bbh_tides_on', 'precessing_spins_bns_tides_on', 'aligned_spins_bbh', 'precessing_spins_bbh', 'aligned_spins_bns', 'aligned_spins_bns_tides_on'] injection: ['bbh-prior-default', 'bbh-alignedspin-prior-default', 'bbh-eccentric-nonspin-prior-default', 'bbh-prior-eccentric-nonspin', 'bbh-alignedspin-prior-eccentric-nonspin', 'bbh-eccentric-nonspin-prior-eccentric-nonspin', 'bbh-prior-precessing_spins_bns', 'bbh-alignedspin-prior-precessing_spins_bns', 'bbh-eccentric-nonspin-prior-precessing_spins_bns', 'bbh-prior-GW170817', 'bbh-alignedspin-prior-GW170817', 'bbh-eccentric-nonspin-prior-GW170817', 'bbh-prior-GW150914', 'bbh-alignedspin-prior-GW150914', 'bbh-eccentric-nonspin-prior-GW150914', 'bbh-prior-precessing_spins_bbh_tides_on', 'bbh-alignedspin-prior-precessing_spins_bbh_tides_on', 'bbh-eccentric-nonspin-prior-precessing_spins_bbh_tides_on', 'bbh-prior-aligned_spins_bbh_tides_on', 'bbh-alignedspin-prior-aligned_spins_bbh_tides_on', 'bbh-eccentric-nonspin-prior-aligned_spins_bbh_tides_on', 'bbh-prior-precessing_spins_bns_tides_on', 'bbh-alignedspin-prior-precessing_spins_bns_tides_on', 'bbh-eccentric-nonspin-prior-precessing_spins_bns_tides_on', 'bbh-prior-aligned_spins_bbh', 'bbh-alignedspin-prior-aligned_spins_bbh', 'bbh-eccentric-nonspin-prior-aligned_spins_bbh', 'bbh-prior-precessing_spins_bbh', 'bbh-alignedspin-prior-precessing_spins_bbh', 'bbh-eccentric-nonspin-prior-precessing_spins_bbh', 'bbh-prior-aligned_spins_bns', 'bbh-alignedspin-prior-aligned_spins_bns', 'bbh-eccentric-nonspin-prior-aligned_spins_bns', 'bbh-prior-aligned_spins_bns_tides_on', 'bbh-alignedspin-prior-aligned_spins_bns_tides_on', 'bbh-eccentric-nonspin-prior-aligned_spins_bns_tides_on'] event: ['bbh-event-prior-default', 'bbh-event-prior-eccentric-nonspin', 'bbh-event-prior-precessing_spins_bns', 'bbh-event-prior-GW170817', 'bbh-event-prior-GW150914', 'bbh-event-prior-precessing_spins_bbh_tides_on', 'bbh-event-prior-aligned_spins_bbh_tides_on', 'bbh-event-prior-precessing_spins_bns_tides_on', 'bbh-event-prior-aligned_spins_bbh', 'bbh-event-prior-precessing_spins_bbh', 'bbh-event-prior-aligned_spins_bns', 'bbh-event-prior-aligned_spins_bns_tides_on']
!gwnr_write_bilby_configs --write-config event,bbh-event-prior-GW150914 --verbose
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. result = np.asarray(values, dtype=dtype) 2021-09-20 19:37:00,651 Done
!ls
bbh-event-prior-GW150914.ini
!cat bbh-event-prior-GW150914.ini
[workflow] tag = test accounting-group = ligo.dev.o3.cbc.explore.test log-path = log request-memory = 2G request-cpus = 1 [inference] duration = 4 sample_rate = 2048 lower_frequency_cutoff = 30 upper_frequency_cutoff = 1024 reference_frequency = 30 phase_marginalization = time_marginalization = ;distance_marginalization = [sampler] name = dynesty npoints = 2000 maxmcmc = 2000 n_check_point = 1000 [data] analysis_type = event source_type = bbh event_names = GW150914,GW170104 interferometers = H1,L1 [template] source_model = bilby.gw.source.lal_binary_black_hole approximant = IMRPhenomPv2 sample_rate = 2048 lower_frequency_cutoff = 30 upper_frequency_cutoff = 1024 reference_frequency = 30 [prior] mass_ratio = Uniform(name='mass_ratio', minimum=0.125, maximum=1) chirp_mass = Uniform(name='chirp_mass', minimum=25, maximum=31) mass_1 = Constraint(name='mass_1', minimum=10, maximum=80) mass_2 = Constraint(name='mass_2', minimum=10, maximum=80) a_1 = Uniform(name='a_1', minimum=0, maximum=0.99) a_2 = Uniform(name='a_2', minimum=0, maximum=0.99) tilt_1 = Sine(name='tilt_1', boundary='reflective') tilt_2 = Sine(name='tilt_2', boundary='reflective') phi_12 = Uniform(name='phi_12', minimum=0, maximum=2 * np.pi, boundary='periodic') phi_jl = Uniform(name='phi_jl', minimum=0, maximum=2 * np.pi, boundary='periodic') luminosity_distance = PowerLaw(alpha=2, name='luminosity_distance', minimum=50, maximum=2000, unit='Mpc', latex_label='$d_L$') dec = Cosine(name='dec') ra = Uniform(name='ra', minimum=0, maximum=2 * np.pi, boundary='periodic') theta_jn = Sine(name='theta_jn', boundary='reflective') psi = Uniform(name='psi', minimum=0, maximum=np.pi, boundary='periodic') phase = Uniform(name='phase', minimum=0, maximum=2 * np.pi, boundary='periodic')
!gwnr_create_public_events_bilby_workflow --help
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray.
result = np.asarray(values, dtype=dtype)
usage: /home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/gwnrtools-2020.10.30-py3.9.egg/EGG-INFO/scripts/gwnrtools_create_public_events_bilby_workflow [--options]
Setup workflow to perform Bayesian parameter estimation runs on a custom set
of simulated signals
optional arguments:
-h, --help show this help message and exit
--version Prints version information.
--verbose Print logging messages.
--config-file CONFIG_FILE
Configuration file with details of analyses. See
`gwnrtools_write_bilby_inference_configs` for help.
--output-dir OUTPUT_DIR
Output directory path.
--force If the output-dir already exists, overwrite it.
Otherwise, an OSError is raised.
--save-backup Don't delete the backup file after the run has
completed.
--nprocesses NPROCESSES
Number of processes to use. If not given then only a
single core will be used.
--use-mpi Use MPI to parallelize the sampler
--seed SEED Seed to use for the random number generator that
initially distributes the walkers. Default is 0.
!gwnr_create_public_events_bilby_workflow --config-file bbh-event-prior-GW150914.ini --output-dir bbh-events --verbose
/home/prayush/miniconda3/envs/lalsuite-dev/lib/python3.9/site-packages/pandas/core/common.py:208: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray. result = np.asarray(values, dtype=dtype) 2021-09-20 19:42:43,113 Using seed 0 2021-09-20 19:42:43,130 Will setup analyses in bbh-events 2021-09-20 19:42:43,130 Creating DAG 2021-09-20 19:42:43,130 --- creating script writer objects for events 2021-09-20 19:42:43,131 --- script writer object created for event GW150914 2021-09-20 19:42:43,131 --- analysis objects created for event 0 2021-09-20 19:42:43,131 --- script writer object created for event GW170104 2021-09-20 19:42:43,131 --- analysis objects created for event 1 2021-09-20 19:42:43,131 Making ./GW150914 in /home/prayush/research/test_pycbc_gw150914/bbh-events Writing script for event data Please do not forget to write 'priors.prior' 2021-09-20 19:42:43,147 Making ./GW170104 in /home/prayush/research/test_pycbc_gw150914/bbh-events Writing script for event data Please do not forget to write 'priors.prior' 2021-09-20 19:42:43,162 Done
!tree
. ├── bbh-event-prior-GW150914.ini ├── bbh-events │ ├── bbh-event-prior-GW150914.ini │ ├── bilby_events.dag │ ├── bilby_events.sh │ ├── GW150914 │ │ ├── log │ │ ├── priors.prior │ │ ├── run_inference │ │ └── run_inference.sub │ └── GW170104 │ ├── log │ ├── priors.prior │ ├── run_inference │ └── run_inference.sub ├── bbh_precessing.ini └── emcee_pt.ini 5 directories, 12 files
os.chdir('bbh-events')
!ls
bbh-event-prior-GW150914.ini bilby_events.sh GW170104 bilby_events.dag GW150914
!condor_submit_dag bilby_events.dag >> dag.out
!tail *dagman.out
09/20/21 19:46:08 Number of idle job procs: 1
09/20/21 19:46:08 Reassigning the id of job 23e9b16444d8cf36712e2be1b5c18c7d from (112.0.0) to (112.0.0)
09/20/21 19:46:08 Event: ULOG_SUBMIT for HTCondor Node 23e9b16444d8cf36712e2be1b5c18c7d (112.0.0) {09/20/21 19:46:08}
09/20/21 19:46:08 Number of idle job procs: 2
09/20/21 19:46:08 DAG status: 0 (DAG_STATUS_OK)
09/20/21 19:46:08 Of 2 nodes total:
09/20/21 19:46:08 Done Pre Queued Post Ready Un-Ready Failed
09/20/21 19:46:08 === === === === === === ===
09/20/21 19:46:08 0 0 2 0 0 0 0
09/20/21 19:46:08 0 job proc(s) currently held