aboutsummaryrefslogtreecommitdiff
path: root/utils/dm-search
diff options
context:
space:
mode:
Diffstat (limited to 'utils/dm-search')
-rwxr-xr-xutils/dm-search7
1 files changed, 3 insertions, 4 deletions
diff --git a/utils/dm-search b/utils/dm-search
index d5d3ec7..533d2f7 100755
--- a/utils/dm-search
+++ b/utils/dm-search
@@ -544,7 +544,6 @@ if __name__ == '__main__':
parser.add_argument("--save", action='store_true', default=False, help="save corner plots for backgrounds")
parser.add_argument("--mc", nargs='+', required=True, help="atmospheric MC files")
parser.add_argument("--muon-mc", nargs='+', required=True, help="muon MC files")
- parser.add_argument("--nhit-thresh", type=int, default=None, help="nhit threshold to apply to events before processing (should only be used for testing to speed things up)")
parser.add_argument("--steps", type=int, default=1000, help="number of steps in the MCMC chain")
parser.add_argument("--pull", type=int, default=0, help="plot pull plots")
parser.add_argument("--weights", nargs='+', required=True, help="GENIE reweight HDF5 files")
@@ -571,7 +570,7 @@ if __name__ == '__main__':
# Loop over runs to prevent using too much memory
evs = []
for run, df in rhdr.groupby('run'):
- evs.append(get_events(df.filename.values, merge_fits=True, nhit_thresh=args.nhit_thresh))
+ evs.append(get_events(df.filename.values, merge_fits=True))
ev = pd.concat(evs).reset_index()
livetime = 0.0
@@ -603,14 +602,14 @@ if __name__ == '__main__':
# and otherwise the GTIDs will clash
ev_mcs = []
for filename in args.mc:
- ev_mcs.append(get_events([filename], merge_fits=True, nhit_thresh=args.nhit_thresh, mc=True))
+ ev_mcs.append(get_events([filename], merge_fits=True, mc=True))
ev_mc = pd.concat([ev_mc for ev_mc in ev_mcs if len(ev_mc) > 0]).reset_index()
if (~rhdr.run.isin(ev_mc.run)).any():
print_warning("Error! The following runs have no Monte Carlo: %s" % \
np.unique(rhdr.run[~rhdr.run.isin(ev_mc.run)].values))
- muon_mc = get_events(args.muon_mc, merge_fits=True, nhit_thresh=args.nhit_thresh, mc=True)
+ muon_mc = get_events(args.muon_mc, merge_fits=True, mc=True)
weights = pd.concat([read_hdf(filename, "weights") for filename in args.weights],ignore_index=True)
# Add the "flux_weight" column to the ev_mc data since I stupidly simulated