summaryrefslogtreecommitdiff
path: root/threadtest.py
blob: 91a18577954861dac0df56afc3f05008caa99949 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
from gputhread import *
from Queue import Queue
from detectors import LBNE
from sample import uniform_sphere
import numpy as np
from pycuda import gpuarray
import pycuda.driver as cuda
from uncertainties import ufloat, umath
from histogram import Histogram

jobs = Queue()
output = Queue()

def create_job(position=(0,0,0), nphotons=1000, max_steps=10):
    positions = np.tile(position, nphotons).reshape((nphotons, 3))
    directions = uniform_sphere(nphotons)
    polarizations = uniform_sphere(nphotons)
    wavelengths = np.random.uniform(200, 800, size=nphotons)
    times = np.zeros(nphotons)
    states = -np.ones(nphotons)
    last_hit_triangles = -np.ones(nphotons)

    return Job(positions, directions, wavelengths, polarizations, times,
               states, last_hit_triangles, max_steps)

def generate_event(position=(0,0,0), nphotons=1000):
    jobs.put(create_job(position, nphotons))
    jobs.join()

    job = output.get()

    last_hit_triangles = job.last_hit_triangles
    solids = lbne.solid_id[last_hit_triangles]
    solids[last_hit_triangles == -1] = -1
    surface_absorbed = job.states == 2

    for i in np.unique(job.states):
        print 'state %2i %i' % (i, len(job.states[job.states == i]))

    event_times = []
    for i in lbne.pmtids:
        photons = np.logical_and(solids == i, surface_absorbed)

        hit_times = job.times[photons]

        if len(hit_times) > 0:
            event_times.append((i, np.min(hit_times)))

    print '%i hit pmts' % len(event_times)

    return event_times

def likelihood(event_times, position=(0,0,0), nphotons=1000, neval=100):
    for i in range(neval):
        jobs.put(create_job(position, nphotons))
    jobs.join()

    t = {}
    for i in range(neval):
        job = output.get()

        last_hit_triangles = job.last_hit_triangles
        solids = lbne.solid_id[job.last_hit_triangles]
        solids[last_hit_triangles == -1] = -1
        surface_absorbed = job.states == 2

        for j in lbne.pmtids:
            pmt_photons = solids == j
            photons = np.logical_and(pmt_photons, surface_absorbed)

            if j not in t:
                t[j] = []

            hit_times = job.times[photons]

            if len(hit_times) > 0:
                t[j].append(np.min(hit_times))

    log_likelihood = ufloat((0,0))
    for i, time in event_times:
        h = Histogram(100, (-0.5e-9, 99.5e-9))
        h.fill(t[i])

        if h.nentries > 0:
            h.normalize()

        probability = h.ueval(time)

        if probability.nominal_value == 0.0:
            if h.nentries > 0:
                probability = ufloat((0.5/h.nentries, 0.5/h.nentries))
            else:
                probability = \
                    ufloat((1.0/len(h.bincenters), 1.0/len(h.bincenters)))

        log_likelihood += umath.log(probability)

    return -log_likelihood

if __name__ == '__main__':
    import sys
    import optparse
    import time

    parser = optparse.OptionParser('%prog')
    parser.add_option('-b', type='int', dest='nbits', default=8)
    parser.add_option('-j', type='int', dest='ndevices', default=1)
    parser.add_option('-n', type='int', dest='nblocks', default=64)
    options, args = parser.parse_args()

    lbne = LBNE()
    lbne.build(bits=options.nbits)

    cuda.init()

    gputhreads = []
    for i in range(options.ndevices):
        gputhreads.append(GPUThread(i, lbne, jobs, output, options.nblocks))
        gputhreads[-1].start()

    try:
        event_times = generate_event()

        for z in np.linspace(-1.0, 1.0, 100):
            t0 = time.time()
            log_likelihood = likelihood(event_times, (z,0,0))
            elapsed = time.time() - t0
            print 'z = %5.2f, likelihood = %s, elapsed %.2f sec' % (z, log_likelihood, elapsed)
    finally:
        for gputhread in gputhreads:
            gputhread.stop()

        for gputhread in gputhreads:
            gputhread.join()