aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authortlatorre <tlatorre@uchicago.edu>2021-01-05 12:35:12 -0600
committertlatorre <tlatorre@uchicago.edu>2021-01-05 12:35:12 -0600
commitb719a46d7502cd8e7cf38fa6c4dceeb04456f801 (patch)
tree73920ecfb9659a756b5d4fb7d2302f5c772abd6d
parent32297413483bb0dec96349996706dc025a9f29bf (diff)
downloadsddm-b719a46d7502cd8e7cf38fa6c4dceeb04456f801.tar.gz
sddm-b719a46d7502cd8e7cf38fa6c4dceeb04456f801.tar.bz2
sddm-b719a46d7502cd8e7cf38fa6c4dceeb04456f801.zip
add --fast argument to dm-search
-rwxr-xr-xutils/dc-closure-test2
-rwxr-xr-xutils/dm-search29
2 files changed, 17 insertions, 14 deletions
diff --git a/utils/dc-closure-test b/utils/dc-closure-test
index 2628edb..c09e5f9 100755
--- a/utils/dc-closure-test
+++ b/utils/dc-closure-test
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2019, Anthony Latorre <tlatorre at uchicago>
#
# This program is free software: you can redistribute it and/or modify it
diff --git a/utils/dm-search b/utils/dm-search
index 8a42c87..f8a8ff1 100755
--- a/utils/dm-search
+++ b/utils/dm-search
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2019, Anthony Latorre <tlatorre at uchicago>
#
# This program is free software: you can redistribute it and/or modify it
@@ -247,20 +247,23 @@ def make_nll(dm_particle_id, dm_mass, dm_energy, data, muons, mc, atmo_scale_fac
df_dict_dm = {}
for id in (20,22,2020,2022,2222):
df_dict_dm[id] = dm_sample[dm_sample.id == id]
-
if fast:
x = np.array(PRIORS)
- mc_hists = get_mc_hists_fast(df_dict,x,bins,scale=1/atmo_scale_factor,reweight=reweight)
- muon_hists = get_mc_hists_fast(df_dict_muon,x,bins,scale=1/muon_scale_factor)
- dm_hists = get_mc_hists_fast(df_dict_dm,x,bins,scale=1/len(dm_sample))
+ fast_mc_hists = get_mc_hists_fast(df_dict,x,bins,scale=1/atmo_scale_factor,reweight=reweight)
+ fast_muon_hists = get_mc_hists_fast(df_dict_muon,x,bins,scale=1/muon_scale_factor)
+ fast_dm_hists = get_mc_hists_fast(df_dict_dm,x,bins,scale=1/len(dm_sample))
def nll(x, grad=None):
if (x < PRIORS_LOW).any() or (x > PRIORS_HIGH).any():
return np.inf
- if not fast:
+ if fast:
+ mc_hists = fast_mc_hists
+ muon_hists = fast_muon_hists
+ dm_hists = fast_dm_hists
+ else:
# Get the Monte Carlo histograms. We need to do this within the
# likelihood function since we apply the energy resolution
# parameters to the Monte Carlo.
@@ -306,9 +309,9 @@ def do_fit(dm_particle_id,dm_mass,dm_energy,data,muon,data_mc,weights,atmo_scale
Returns a tuple (xopt, samples) where samples is an array of shape (steps,
number of parameters).
"""
- if universe is None:
- dm_sample = get_dm_sample(DM_SAMPLES,dm_particle_id,dm_mass,dm_energy)
+ dm_sample = get_dm_sample(DM_SAMPLES,dm_particle_id,dm_mass,dm_energy)
+ if universe is None:
nll = make_nll(dm_particle_id,dm_mass,dm_energy,data,muon,data_mc,atmo_scale_factor,muon_scale_factor,bins,print_nll,dm_sample=dm_sample,fast=fast)
pos = np.empty((walkers, len(PRIORS)),dtype=np.double)
@@ -364,7 +367,7 @@ def do_fit(dm_particle_id,dm_mass,dm_energy,data,muon,data_mc,weights,atmo_scale
data_mc_with_weights = pd.merge(data_mc,weights[universe],how='left',on=['run','unique_id'])
data_mc_with_weights.weight = data_mc_with_weights.weight.fillna(1.0)
- nll = make_nll(dm_particle_id,dm_mass,dm_energy,data,muon,data_mc_with_weights,atmo_scale_factor,muon_scale_factor,bins,reweight=True,print_nll=print_nll,dm_sample=dm_sample)
+ nll = make_nll(dm_particle_id,dm_mass,dm_energy,data,muon,data_mc_with_weights,atmo_scale_factor,muon_scale_factor,bins,reweight=True,print_nll=print_nll,dm_sample=dm_sample,fast=fast)
nlls.append(nll(xopt))
universe = np.argmin(nlls)
@@ -374,7 +377,7 @@ def do_fit(dm_particle_id,dm_mass,dm_energy,data,muon,data_mc,weights,atmo_scale
data_mc_with_weights.weight = data_mc_with_weights.weight.fillna(1.0)
# Create a new negative log likelihood function with the weighted Monte Carlo.
- nll = make_nll(dm_particle_id,dm_mass,dm_energy,data,muon,data_mc_with_weights,atmo_scale_factor,muon_scale_factor,bins,reweight=True,print_nll=print_nll,dm_sample=dm_sample)
+ nll = make_nll(dm_particle_id,dm_mass,dm_energy,data,muon,data_mc_with_weights,atmo_scale_factor,muon_scale_factor,bins,reweight=True,print_nll=print_nll,dm_sample=dm_sample,fast=fast)
# Now, we refit with the Monte Carlo weighted by the most likely GENIE
# systematics.
@@ -476,7 +479,7 @@ def get_limits(dm_masses,data,muon,data_mc,atmo_scale_factor,muon_scale_factor,b
m1 = SNOMAN_MASS[id1]
m2 = SNOMAN_MASS[id2]
dm_energy = dm_mass
- xopt, universe, samples = do_fit(dm_particle_id,dm_mass,dm_energy,data,muon,data_mc,weights,atmo_scale_factor,muon_scale_factor,bins,steps,print_nll,walkers,thin,universe,fast)
+ xopt, universe, samples = do_fit(dm_particle_id,dm_mass,dm_energy,data,muon,data_mc,weights,atmo_scale_factor,muon_scale_factor,bins,steps,print_nll,walkers,thin,refit=True,universe=universe,fast=fast)
data_mc_with_weights = pd.merge(data_mc,weights[universe],how='left',on=['run','unique_id'])
data_mc_with_weights.weight = data_mc_with_weights.weight.fillna(1.0)
@@ -563,7 +566,7 @@ if __name__ == '__main__':
parser.add_argument("--run-list", default=None, help="run list")
parser.add_argument("--mcpl", nargs='+', required=True, help="GENIE MCPL files")
parser.add_argument("--run-info", required=True, help="run_info.log autosno file")
- parser.add_argument("--universe", default=None, help="GENIE universe for systematics")
+ parser.add_argument("--universe", type=int, default=None, help="GENIE universe for systematics")
parser.add_argument("--fast", action='store_true', default=False, help="run fast version of likelihood without energy bias and resolution")
args = parser.parse_args()
@@ -836,7 +839,7 @@ if __name__ == '__main__':
data_atm.loc[data_atm.id2 == 22,'energy2'] *= (1+xtrue[3]+np.random.randn(np.count_nonzero(data_atm.id2 == 22))*xtrue[4])
data_atm['ke'] = data_atm['energy1'].fillna(0) + data_atm['energy2'].fillna(0) + data_atm['energy3'].fillna(0)
- limits, best_fit, discovery_array = get_limits(DM_MASSES,data,muon,data_mc,atmo_scale_factor,muon_scale_factor,bins,args.steps,args.print_nll,args.walkers,args.thin)
+ limits, best_fit, discovery_array = get_limits(DM_MASSES,data,muon,data_mc,atmo_scale_factor,muon_scale_factor,bins,args.steps,args.print_nll,args.walkers,args.thin,args.fast)
for id in (2020,2222):
if (best_fit[id] > discovery_array[id]).any():