Skip to content

Commit

Permalink
Merge branch 'gwastro:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
SamuelH-97 authored Nov 29, 2021
2 parents c192ad0 + e5a6513 commit 865416e
Show file tree
Hide file tree
Showing 109 changed files with 1,554 additions and 988 deletions.
56 changes: 56 additions & 0 deletions .github/workflows/inference-workflow.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
name: run small inference workflow using pegasus + condor

on: [push, pull_request]

jobs:
build:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v1
- name: Set up Python
uses: actions/setup-python@v1
with:
python-version: 3.8
- name: install condor
run: |
wget -qO - https://research.cs.wisc.edu/htcondor/ubuntu/HTCondor-Release.gpg.key | sudo apt-key add -
echo "deb http://research.cs.wisc.edu/htcondor/ubuntu/8.9/focal focal contrib" | sudo tee -a /etc/apt/sources.list
echo "deb-src http://research.cs.wisc.edu/htcondor/ubuntu/8.9/focal focal contrib" | sudo tee -a /etc/apt/sources.list
sudo apt-get update
sudo apt-get install minihtcondor
sudo systemctl start condor
sudo systemctl enable condor
- name: install pegasus
run: |
wget https://download.pegasus.isi.edu/pegasus/ubuntu/dists/bionic/main/binary-amd64/pegasus_5.0.1-1+ubuntu18_amd64.deb
sudo apt install ./pegasus_5.0.1-1+ubuntu18_amd64.deb
- run: sudo apt-get install *fftw3* intel-mkl*
- name: Install pycbc
run: |
python -m pip install --upgrade pip setuptools
pip install -r requirements.txt
pip install .
- name: retrieving data
run: bash -e examples/inference/single/get.sh
- name: generating, submitting and running workflow
run: |
cp examples/inference/single/single.ini ./
cp examples/workflow/inference/small_test/*.ini ./
bash -e examples/workflow/inference/small_test/gen.sh
condor_status
cd gw_output
bash -e ../examples/search/submit.sh
./status
python ../examples/search/check_job.py
find submitdir/work/ -type f -name '*.tar.gz' -delete
- name: store log files
if: always()
uses: actions/upload-artifact@v2
with:
name: logs
path: gw_output/submitdir/work
- name: store result page
uses: actions/upload-artifact@v2
with:
name: results
path: html
6 changes: 1 addition & 5 deletions .github/workflows/search-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,17 +36,13 @@ jobs:
run: bash -e examples/search/bank.sh
- name: generating statistic files
run: bash -e examples/search/stats.sh
- name: generating workflow
- name: running workflow
run: |
cp examples/search/*.ini ./
bash -e examples/search/gen.sh
cp *.gwf output/
- name: running workflow
run: |
condor_status
cd output
bash -e ../examples/search/submit.sh
./status
python ../examples/search/check_job.py
find submitdir/work/ -type f -name '*.tar.gz' -delete
- name: store log files
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,3 +45,4 @@ A bibtex key and DOI for each release is avaliable from [Zenodo](http://zenodo.o

[![DOI](https://zenodo.org/badge/31596861.svg)](https://zenodo.org/badge/latestdoi/31596861) [![Build Status](https://travis-ci.org/gwastro/pycbc.svg?branch=master)](https://travis-ci.org/gwastro/pycbc)
[![PyPI version](https://badge.fury.io/py/PyCBC.svg)](https://badge.fury.io/py/PyCBC) ![PyPI - Downloads](https://img.shields.io/pypi/dm/pycbc) [![Anaconda-Server Badge](https://anaconda.org/conda-forge/pycbc/badges/version.svg)](https://anaconda.org/conda-forge/pycbc) [![Anaconda-Server Badge](https://anaconda.org/conda-forge/pycbc/badges/downloads.svg)](https://anaconda.org/conda-forge/pycbc)
[![astropy](http://img.shields.io/badge/powered%20by-AstroPy-orange.svg?style=flat)](http://www.astropy.org/)
16 changes: 16 additions & 0 deletions bin/all_sky_search/pycbc_coinc_findtrigs
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,10 @@ def exit_cleaning():
pass
atexit.register(exit_cleaning)

# If some templates have no triggers one or more ifos, there can be no
# coincs: so, track which templates contain triggers in all ifos
tids_with_trigs = None

for i in range(len(args.trigger_files)):
if args.stage_input:
dest = os.path.join(args.stage_input_dir, str(uuid.uuid4()) + '.hdf')
Expand All @@ -127,6 +131,14 @@ for i in range(len(args.trigger_files)):
args.gating_veto_windows)
ifo = reader.ifo
trigs.ifos.append(ifo)

# We don't have that many triggers, see if we can skip some templates
if len(reader.file[ifo]['template_id']) < 2**27:
uniq = numpy.unique(reader.file[ifo]['template_id'][:])
if tids_with_trigs is None:
tids_with_trigs = numpy.arange(0, num_templates)
tids_with_trigs = numpy.intersect1d(tids_with_trigs, uniq)

# time shift is subtracted from pivot ifo time
trigs.to_shift.append(-1 if ifo == args.pivot_ifo else 0)
logging.info('Applying time shift multiple %i to ifo %s' %
Expand Down Expand Up @@ -163,6 +175,10 @@ if args.randomize_template_order:
else:
template_ids = range(tmin, tmax)

# Only analyze templates which might have coincs
if tids_with_trigs is not None:
template_ids = numpy.intersect1d(tids_with_trigs, template_ids)

# 'data' will store output of coinc finding
# in addition to these lists of coinc info, will also store trigger times and
# ids in each ifo
Expand Down
8 changes: 2 additions & 6 deletions bin/all_sky_search/pycbc_coinc_hdfinjfind
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,16 @@ files.
"""

import argparse, h5py, logging, types, numpy, os.path
from glue.ligolw import ligolw, table, lsctables, utils as ligolw_utils
from ligo.lw import table, lsctables, utils as ligolw_utils
from ligo import segments
from pycbc import events
from pycbc.events import indices_within_segments
from pycbc.types import MultiDetOptionAction
from pycbc.inject import CBCHDFInjectionSet
import pycbc.version
from pycbc.io.ligolw import LIGOLWContentHandler


# dummy class needed for loading LIGOLW files
class LIGOLWContentHandler(ligolw.LIGOLWContentHandler):
pass
lsctables.use_in(LIGOLWContentHandler)

def hdf_append(f, key, value):
if key in f:
tmp = numpy.concatenate([f[key][:], value])
Expand Down
8 changes: 4 additions & 4 deletions bin/all_sky_search/pycbc_fit_sngls_over_multiparam
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,6 @@ else:
# for an exponential fit 1/alpha is linear in the trigger statistic values
# so calculating weighted sums or averages of 1/alpha is appropriate
nabove = fits['count_above_thresh'][:]
nabove = nabove / numpy.mean(nabove)
if tcount: ntotal = fits['count_in_template'][:]

invalpha = 1. / fits['fit_coeff'][:]
Expand All @@ -123,16 +122,17 @@ logging.info("Smoothing ...")
# optimize computational performance.
if len(parvals) == 1:
sort = parvals[0].argsort()
parvals[0] = parvals[0][sort]
parvals_0 = parvals[0][sort]
ntotal = ntotal[sort]
nabove = nabove[sort]
invalphan = invalphan[sort]

# For each template, find the range of nearby templates which fall within
# the chosen window.
left = numpy.searchsorted(parvals[0], parvals[0] - args.smoothing_width[0])
right = numpy.searchsorted(parvals[0], parvals[0] + args.smoothing_width[0]) - 1
left = numpy.searchsorted(parvals_0, parvals[0] - args.smoothing_width[0])
right = numpy.searchsorted(parvals_0, parvals[0] + args.smoothing_width[0]) - 1

del parvals_0
# Precompute the sums so we can quickly look up differences between
# templates
ntsum = ntotal.cumsum()
Expand Down
1 change: 0 additions & 1 deletion bin/all_sky_search/pycbc_fit_sngls_over_param
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,6 @@ else:
tcount = False

nabove = fits['count_above_thresh'][:]
nabove = nabove/np.mean(nabove)
if tcount: ntotal = fits['count_in_template'][:]
# for an exponential fit 1/alpha is linear in the trigger statistic values
# so taking weighted sums/averages of 1/alpha is appropriate
Expand Down
7 changes: 2 additions & 5 deletions bin/all_sky_search/pycbc_sngls_pastro
Original file line number Diff line number Diff line change
Expand Up @@ -12,20 +12,17 @@ coincidences.

import pycbc, pycbc.io, copy
import argparse, logging, numpy as np
from glue.ligolw import ligolw, table, lsctables, utils as ligolw_utils
from ligo.lw import table, lsctables, utils as ligolw_utils
from pycbc import conversions as conv
from pycbc.events import veto, stat, ranking, coinc, single as sngl
from pycbc.io.ligolw import LIGOLWContentHandler
from ligo.segments import segment, segmentlist
import pycbc.version
import matplotlib
matplotlib.use('agg')
from matplotlib import pyplot as plt
from scipy.stats import gaussian_kde as gk

# dummy class for loading LIGOLW files
class LIGOLWContentHandler(ligolw.LIGOLWContentHandler):
pass
lsctables.use_in(LIGOLWContentHandler)

d_power = {
'log': 3.,
Expand Down
9 changes: 3 additions & 6 deletions bin/all_sky_search/pycbc_strip_injections
Original file line number Diff line number Diff line change
@@ -1,19 +1,16 @@
#!/bin/env python
import numpy, argparse, pycbc.version, pycbc.pnutils, logging
from pycbc.events import veto
from glue.ligolw import ligolw, table, lsctables, utils as ligolw_utils
from pycbc.io.ligolw import LIGOLWContentHandler
from ligo.lw import ligolw, table, utils as ligolw_utils


effd = {"H1":"eff_dist_h", "L1":"eff_dist_l", "V1":"eff_dist_v"}
def remove(l, i):
to_remove = [l[t] for t in i]
for r in to_remove:
l.remove(r)

# dummy class needed for loading LIGOLW files
class LIGOLWContentHandler(ligolw.LIGOLWContentHandler):
pass
lsctables.use_in(LIGOLWContentHandler)

parser = argparse.ArgumentParser()
parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg)
parser.add_argument('--verbose', action='store_true')
Expand Down
24 changes: 8 additions & 16 deletions bin/bank/pycbc_aligned_bank_cat
Original file line number Diff line number Diff line change
Expand Up @@ -17,38 +17,33 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.

"""
Programe for concatenating the output of the geometric aligned bank dagman.
Program for concatenating the output of the geometric aligned bank dagman.
This will gather all the meta-output files and create a valid template bank
xml file.
"""
import logging
import fileinput
import glob
import argparse
import numpy
import pycbc.version
import h5py
from glue.ligolw import ligolw
from glue.ligolw import lsctables
from glue.ligolw import utils
from ligo.lw import utils
from pycbc import tmpltbank
from numpy import loadtxt
import pycbc
import pycbc.psd
import pycbc.strain
import pycbc.version
import pycbc.tmpltbank
from pycbc.waveform import get_waveform_filter_length_in_time
from pycbc.io.ligolw import LIGOLWContentHandler


__author__ = "Ian Harry <[email protected]>"
__version__ = pycbc.version.git_verbose_msg
__date__ = pycbc.version.date
__program__ = "pycbc_aligned_bank_cat"

# Read command line options
usage = """usage: %prog [options]"""
_desc = __doc__[1:]
parser = argparse.ArgumentParser(description=_desc,
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=tmpltbank.IndentedHelpFormatterWithNL)

parser.add_argument("--version", action="version", version=__version__)
Expand Down Expand Up @@ -163,12 +158,9 @@ temp_bank = numpy.array([mass1,mass2,spin1z,spin2z]).T
# needed for any reason, this code would have to be able to recalculate the
# moments (or read them in) and use the correct value of f0 and pn-order
if options.metadata_file:
class LIGOLWContentHandler(ligolw.LIGOLWContentHandler):
pass
lsctables.use_in(LIGOLWContentHandler)
outdoc = utils.load_filename(options.metadata_file,\
gz = options.metadata_file.endswith("gz"),
contenthandler=LIGOLWContentHandler)
outdoc = utils.load_filename(options.metadata_file,
compress='auto',
contenthandler=LIGOLWContentHandler)
else:
outdoc = None
if options.output_file.endswith(('.xml','.xml.gz','.xmlgz')):
Expand Down
31 changes: 12 additions & 19 deletions bin/bank/pycbc_bank_verification
Original file line number Diff line number Diff line change
Expand Up @@ -23,29 +23,28 @@ metric approximation to the parameter space.

from __future__ import division

import argparse
import logging
import numpy
import h5py
from ligo.lw import table, lsctables, utils as ligolw_utils
import pycbc
import pycbc.version
from pycbc import tmpltbank, psd, strain
from pycbc.io.ligolw import LIGOLWContentHandler
import matplotlib
matplotlib.use('Agg')
import pylab


__author__ = "Ian Harry <[email protected]>"
__version__ = pycbc.version.git_verbose_msg
__date__ = pycbc.version.date
__program__ = "pycbc_bank_verification"

import argparse
import os, sys
import copy
import logging
import numpy
import h5py
from glue.ligolw import ligolw, table, lsctables, utils as ligolw_utils
from pycbc import tmpltbank, psd, strain, pnutils
import matplotlib
matplotlib.use('Agg')
import pylab

# Read command line option
_desc = __doc__[1:]
parser = argparse.ArgumentParser(description=_desc,
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=tmpltbank.IndentedHelpFormatterWithNL)

# Begin with code specific options
Expand Down Expand Up @@ -195,12 +194,6 @@ logging.info("Reading template bank.")


if opts.input_bank.endswith(('.xml','.xml.gz','.xmlgz')):
# dummy class needed for loading LIGOLW files
class LIGOLWContentHandler(ligolw.LIGOLWContentHandler):
pass

lsctables.use_in(LIGOLWContentHandler)

indoc = ligolw_utils.load_filename(opts.input_bank,
contenthandler=LIGOLWContentHandler)
template_list = table.get_table(indoc,
Expand Down
30 changes: 10 additions & 20 deletions bin/bank/pycbc_geom_aligned_bank
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,12 @@ import distutils.spawn
import h5py
import configparser
from scipy import spatial
from glue.ligolw import ligolw
from glue.ligolw import table
from glue.ligolw import lsctables
from glue.ligolw import ilwd
from glue.ligolw import utils as ligolw_utils
from glue.ligolw.utils import process as ligolw_process
from ligo.lw import ligolw
from ligo.lw import table
from ligo.lw import lsctables
from ligo.lw import ilwd
from ligo.lw import utils as ligolw_utils
from ligo.lw.utils import process as ligolw_process
import pycbc
import pycbc.psd
import pycbc.strain
Expand Down Expand Up @@ -236,7 +236,7 @@ cmds_file_name = opts.metadata_file
outdoc = ligolw.Document()
outdoc.appendChild(ligolw.LIGO_LW())
ligolw_process.register_to_xmldoc(outdoc, __program__, vars(opts), comment="",
ifos="", version=pycbc.version.git_hash,
instruments="", version=pycbc.version.git_hash,
cvs_repository='pycbc/'+pycbc.version.git_branch,
cvs_entry_time=pycbc.version.date)
ligolw_utils.write_filename(outdoc, cmds_file_name)
Expand Down Expand Up @@ -563,14 +563,8 @@ else:
stack_exe = GeomAligned2DStackExecutable(workflow.cp, 'aligned2dstack',
ifos=workflow.ifos, out_dir=curr_dir)
num_banks = int((len(newV1s) - 0.5)//opts.split_bank_num) + 1
if not opts.is_sub_workflow:
input_h5file = wf.File.from_path(opts.intermediate_data_file)
else:
# Basically, if this is a sub-workflow do not store a PFN, as it will be
# some meaningless temporary path, but instead tell the uber-workflow
# that these files are outputs and used within the sub-dax and let it
# handle the rest.
input_h5file = pwf.File(os.path.basename(opts.intermediate_data_file))
input_h5file = wf.File.from_path(opts.intermediate_data_file)



all_outs = wf.FileList([])
Expand All @@ -593,11 +587,7 @@ else:

combine_exe = AlignedBankCatExecutable(workflow.cp, 'alignedbankcat',
ifos=workflow.ifos, out_dir=curr_dir)
if not opts.is_sub_workflow:
metadata_file = wf.File.from_path(cmds_file_name)
else:
metadata_file = pwf.File(os.path.basename(cmds_file_name))

metadata_file = wf.File.from_path(cmds_file_name)
combine_node = combine_exe.create_node(all_outs, metadata_file,
workflow.analysis_time,
output_file_path=opts.output_file)
Expand Down
Loading

0 comments on commit 865416e

Please sign in to comment.