diff options
author | tlatorre <tlatorre@uchicago.edu> | 2019-08-05 14:02:09 -0500 |
---|---|---|
committer | tlatorre <tlatorre@uchicago.edu> | 2019-08-05 14:02:09 -0500 |
commit | f90d4d6ba905d3247942442c4349a30378c3e614 (patch) | |
tree | cdf6faefa0606c0afdc8c39a095a6a013ee15ea8 /utils/submit-grid-jobs | |
parent | 2c3d4cd7ef42e878f0e981cf91295ff8ad051877 (diff) | |
download | sddm-f90d4d6ba905d3247942442c4349a30378c3e614.tar.gz sddm-f90d4d6ba905d3247942442c4349a30378c3e614.tar.bz2 sddm-f90d4d6ba905d3247942442c4349a30378c3e614.zip |
add ability to specify a particle combo on the command line
This commit updates the fit program to accept a particle combo from the command
line so you can fit for a single particle combination hypothesis. For example
running:
$ ./fit ~/zdabs/mu_minus_700_1000.hdf5 -p 2020
would just fit for the 2 electron hypothesis.
The reason for adding this ability is that my grid jobs were getting evicted
when fitting muons in run 10,000 since it takes 10s of hours to fit for all the
particle hypothesis. With this change, and a small update to the
submit-grid-jobs script we now submit a single grid job per particle
combination hypothesis which should make each grid job run approximately 4
times faster.
Diffstat (limited to 'utils/submit-grid-jobs')
-rwxr-xr-x | utils/submit-grid-jobs | 20 |
1 files changed, 17 insertions, 3 deletions
diff --git a/utils/submit-grid-jobs b/utils/submit-grid-jobs index 63cbcb7..a55a723 100755 --- a/utils/submit-grid-jobs +++ b/utils/submit-grid-jobs @@ -140,13 +140,16 @@ ID = uuid.uuid1() class MyTemplate(string.Template): delimiter = '@' -def submit_job(filename, run, gtid, dir, dqxx_dir, min_nhit, max_particles): +def submit_job(filename, run, gtid, dir, dqxx_dir, min_nhit, max_particles, particle_combo=None): print("submitting job for %s gtid %i" % (filename, gtid)) head, tail = split(filename) root, ext = splitext(tail) # all output files are prefixed with FILENAME_GTID_UUID - prefix = "%s_%08i_%s" % (root,gtid,ID.hex) + if particle_combo: + prefix = "%s_%08i_%i_%s" % (root,gtid,particle_combo,ID.hex) + else: + prefix = "%s_%08i_%s" % (root,gtid,ID.hex) # fit output filename output = "%s.hdf5" % prefix @@ -166,6 +169,8 @@ def submit_job(filename, run, gtid, dir, dqxx_dir, min_nhit, max_particles): sys.exit(1) args = [tail,"-o",output,"--gtid",gtid,"--min-nhit",min_nhit,"--max-particles",max_particles] + if particle_combo: + args += ["-p",particle_combo] transfer_input_files = ",".join([executable,filename,join(dqxx_dir,"DQXX_%010i.dat" % run)] + [join(dir,filename) for filename in INPUT_FILES]) transfer_output_files = ",".join([output]) @@ -191,12 +196,19 @@ def submit_job(filename, run, gtid, dir, dqxx_dir, min_nhit, max_particles): # submit the job check_call(["condor_submit",condor_submit]) +def array_to_particle_combo(combo): + particle_combo = 0 + for i, id in enumerate(combo[::-1]): + particle_combo += id*100**i + return particle_combo + if __name__ == '__main__': import argparse from subprocess import check_call import os import tempfile import h5py + from itertools import combinations_with_replacement parser = argparse.ArgumentParser("submit grid jobs") parser.add_argument("filenames", nargs='+', help="input files") @@ -275,7 +287,9 @@ if __name__ == '__main__': with h5py.File(output.name) as f: for ev in f['ev']: if ev['nhit'] >= args.min_nhit: - submit_job(filename, ev['run'], ev['gtid'], dir, dqxx_dir, args.min_nhit, args.max_particles) + for i in range(1,args.max_particles+1): + for particle_combo in map(array_to_particle_combo,combinations_with_replacement([20,22],i)): + submit_job(filename, ev['run'], ev['gtid'], dir, dqxx_dir, args.min_nhit, args.max_particles, particle_combo) # Delete temporary HDF5 file os.unlink(output.name) |