aboutsummaryrefslogtreecommitdiff
path: root/utils/submit-grid-jobs
blob: 18c6e37817fc4cca4cee5fb728bba5fa179a89d0 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
#!/usr/bin/env python
# Copyright (c) 2019, Anthony Latorre <tlatorre at uchicago>
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <https://www.gnu.org/licenses/>.

from __future__ import print_function, division
import yaml
import string
from os.path import split, splitext, join
import uuid
from subprocess import check_call

CONDOR_TEMPLATE = \
"""
# We need the job to run our executable script, with the
#  input.txt filename as an argument, and to transfer the
#  relevant input and output files:
executable = @executable
arguments = @args
transfer_input_files = @transfer_input_files

transfer_output_files = @transfer_output_files

error = @error
output = @output
log = @log

# The below are good base requirements for first testing jobs on OSG, 
#  if you don't have a good idea of memory and disk usage.
requirements = (OSGVO_OS_STRING == "RHEL 7") && (OpSys == "LINUX")
request_cpus = 1
request_memory = 1 GB
request_disk = 1 GB

# Queue one job with the above specifications.
queue 1

+ProjectName = "SNOplus"
""".strip()

# all files required to run the fitter (except the DQXX files)
INPUT_FILES = ["muE_water_liquid.txt","pmt_response_qoca_d2o_20060216.dat","rsp_rayleigh.dat","e_water_liquid.txt","pmt_pcath_response.dat","pmt.txt","muE_deuterium_oxide_liquid.txt","pmt_response.dat","proton_water_liquid.txt"]

# generate a UUID to append to all the filenames so that if we run the same job
# twice we don't overwrite the first job
ID = uuid.uuid1()

class MyTemplate(string.Template):
    delimiter = '@'

def submit_job(filename, run, gtid, dir, dqxx_dir, min_nhit, max_particles):
    print("submitting job for %s gtid %i" % (filename, gtid))
    head, tail = split(filename)
    root, ext = splitext(tail)

    # all output files are prefixed with FILENAME_GTID_UUID
    prefix = "%s_%08i_%s" % (root,gtid,ID.hex)

    # fit output filename
    output = "%s.txt" % prefix
    # condor submit filename
    condor_submit = "%s.submit" % prefix

    # set up the arguments for the template
    executable = join(dir,"fit")
    args = [tail,"-o",output,"--gtid",gtid,"--min-nhit",min_nhit,"--max-particles",max_particles]
    transfer_input_files = ",".join([filename,join(dqxx_dir,"DQXX_%010i.dat" % run)] + [join(dir,filename) for filename in INPUT_FILES])
    transfer_output_files = ",".join([output])

    condor_error = "%s.error" % prefix
    condor_output = "%s.output" % prefix
    condor_log = "%s.log" % prefix

    template = MyTemplate(CONDOR_TEMPLATE)

    submit_string = template.safe_substitute(
            executable=executable,
            args=" ".join(map(str,args)),
            transfer_input_files=transfer_input_files,
            transfer_output_files=transfer_output_files,
            error=condor_error,
            output=condor_output,
            log=condor_log)
    
    # write out the formatted template
    with open(condor_submit, "w") as f:
        f.write(submit_string)

    # submit the job
    check_call(["condor_submit",condor_submit])

if __name__ == '__main__':
    import argparse
    from subprocess import check_output

    parser = argparse.ArgumentParser("submit grid jobs")
    parser.add_argument("filenames", nargs='+', help="input files")
    parser.add_argument("--dir", type=str, help="fitter directory", required=True)
    parser.add_argument("--dqxx-dir", type=str, help="dqxx directory", required=True)
    parser.add_argument("--min-nhit", type=int, help="minimum nhit to fit an event", default=100)
    parser.add_argument("--max-particles", type=int, help="maximum number of particles to fit for", default=3)
    args = parser.parse_args()

    for filename in args.filenames:
        output = check_output([join(args.dir,"zdab-cat"),filename])
        data = yaml.load(output)

        for i, event in enumerate(data['data']):
            for ev in event['ev']:
                run = ev['run']
                gtid = ev['gtid']
                nhit = ev['nhit']

                if nhit >= args.min_nhit:
                    submit_job(filename, run, gtid, args.dir, args.dqxx_dir, args.min_nhit, args.max_particles)