aboutsummaryrefslogtreecommitdiff
path: root/src/run-fit
blob: d309cd3269c8cc5d9f6317b3c139f19edacae4e6 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
#!/usr/bin/env python
from __future__ import print_function, division
import subprocess
from os.path import splitext, split

def run_fit(filename):
    head, tail = split(filename)
    root, ext = splitext(tail)
    output = root + '.hdf5'
    cmd = ["./fit", filename, "-o", output]
    subprocess.call(cmd)

if __name__ == '__main__':
    import argparse
    from multiprocessing import Pool, cpu_count
    import signal
    import os

    parser = argparse.ArgumentParser("fit multiple zdab files")
    parser.add_argument("-j", "--jobs", type=int, default=None,
                        help="number of jobs")
    parser.add_argument("filenames", nargs="+",
                        help="zdab files")
    args = parser.parse_args()

    jobs = args.jobs

    if jobs is None:
        jobs = cpu_count()

    # see https://stackoverflow.com/questions/11312525/catch-ctrlc-sigint-and-exit-multiprocesses-gracefully-in-python
    handler = signal.signal(signal.SIGINT, signal.SIG_IGN)
    p = Pool(jobs)
    signal.signal(signal.SIGINT, handler)
    try:
        result = p.map_async(run_fit,args.filenames)
        result.get()
    except KeyboardInterrupt:
        print("ctrl-c caught. quitting...")
        p.terminate()
        os.killpg(os.getpgid(0),9)
    else:
        p.close()

    p.join()
o 5% p update find peaks algorithm to do single particle quick fits - I did update the find peaks algorithm to work *much* better, but it still doesn't do single particle quick fits. I think the next improvement would be to add the ability to actually determine the number of rings. ^ figure out how to combine SNO fitter data with my fitter for final analysis. For example, how to apply neutron follower cut? - double check that zdab-reprocess is correct for the D2O and salt phases since it appears to be from the NCD phase ? add code to compute expected deviation from nll_best to normalize psi - tried several different versions of this and nothing seemed to perform as well as psi/nhit. ^ add term to likelihood for probability that a channel is misccalibrated or didn't make it into the event - when calculating the first order statistic for a gaussian is alpha = pi/8 a better choice than 0.375? - speed up charge initialization - determine *real* mean and standard deviation of single PE charge distribution. TELLIE SNO+ data? ^ extend electron range table - extended the electron range table up to 1 TeV ? fix time PDF. Currently take the first order statistic of dark noise + indirect light + direct light all together, but this isn't correct. - thought more about this and it's not actually obvious if what I'm doing is wrong. Need to think more about this. an class="p">: args.db = join(home,'state.db') if args.new_dir is None: args.new_dir = join(home,"fit_results_to_move") conn = sqlite3.connect(args.db) c = conn.cursor() results = c.execute('SELECT filename, uuid, gtid, particle_id, state FROM state WHERE state = "SUCCESS" ORDER BY timestamp ASC') for filename, uuid, gtid, particle_id, state in results.fetchall(): head, tail = split(filename) root, ext = splitext(tail) # all output files are prefixed with FILENAME_GTID_UUID prefix = "%s_%08i_%i_%s" % (root,gtid,particle_id,uuid) new_dir = "%s_%s" % (root,uuid) if state == 'SUCCESS': # If it successfully fit, then we move all the associated files to # a new directory. From there, they can be copied back for filename in glob.glob("%s/%s.*" % (new_dir,prefix)): mv(filename,join(args.new_dir,filename)) conn.close()