summaryrefslogtreecommitdiff
path: root/threadtest.py
blob: f03f920b9886df74b8da80bff31e1bcba816b22d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
from gputhread import *
from Queue import Queue
from sample import uniform_sphere
import numpy as np
from pycuda import gpuarray
import pycuda.driver as cuda
from uncertainties import ufloat, umath
from histogram import Histogram
import math

jobs = Queue()
output = Queue()

def generate_event(detector, position=(0,0,0), nphotons=5000):
    jobs.put((gpuarray.vec.make_float3(*position), nphotons))
    jobs.join()

    earliest_times = output.get()

    pmt_times = earliest_times[detector.pmtids]
    event_times = [ (i, t) for i, t in zip(detector.pmtids, pmt_times) if t < 1e8 ]
    print '%i hit pmts' % len(event_times)
    return event_times

def likelihood(detector, event_times, position=(0,0,0), nphotons=5000, neval=100):
    for i in range(neval):
        jobs.put((gpuarray.vec.make_float3(*position), nphotons))
    jobs.join()

    t = np.zeros(shape=(neval, max(detector.pmtids)+1), dtype=np.float32)
    for i in range(neval):
        t[i] = output.get()

    log_likelihood = 0.0
    log_likelihood_variance = 0.0
    for i, time in event_times:
        h = Histogram(500, (-0.5e-9, 99.5e-9))
        h.fill(t[:,i])

        if h.nentries > 0:
            h.normalize()

        probability = h.ueval(time)

        if probability.nominal_value == 0.0:
            if h.nentries > 0:
                probability = ufloat((0.5/h.nentries, 0.5/h.nentries))
            else:
                probability = \
                    ufloat((1.0/len(h.bincenters), 1.0/len(h.bincenters)))

        log_likelihood += math.log(probability.nominal_value)
        log_likelihood_variance += (probability.std_dev()/probability.nominal_value)**2

    return ufloat((-log_likelihood, log_likelihood_variance**0.5))

if __name__ == '__main__':
    import sys
    import optparse
    import time

    parser = optparse.OptionParser('%prog')
    parser.add_option('-b', type='int', dest='nbits', default=8)
    parser.add_option('-j', type='string', dest='devices', default='0')
    parser.add_option('-n', type='int', dest='nblocks', default=64)
    options, args = parser.parse_args()

    from detectors import build_minilbne

    detector = build_minilbne()

    detector.build(bits=options.nbits)

    cuda.init()

    gputhreads = []
    for i in [int(s) for s in options.devices.split(',')]:
        gputhreads.append(GPUThread(i, detector, jobs, output, options.nblocks))
        gputhreads[-1].start()

    try:
        event_times = generate_event(detector)

        for z in np.linspace(-1.0, 1.0, 100):
            t0 = time.time()
            log_likelihood = likelihood(detector, event_times, (z,0,0))
            elapsed = time.time() - t0
            print 'z = %5.2f, likelihood = %s, elapsed %.2f sec' % (z, log_likelihood, elapsed)
    finally:
        for gputhread in gputhreads:
            gputhread.stop()

        for gputhread in gputhreads:
            gputhread.join()