summaryrefslogtreecommitdiff
path: root/threadtest.py
diff options
context:
space:
mode:
Diffstat (limited to 'threadtest.py')
-rw-r--r--threadtest.py104
1 files changed, 104 insertions, 0 deletions
diff --git a/threadtest.py b/threadtest.py
new file mode 100644
index 0000000..d76224f
--- /dev/null
+++ b/threadtest.py
@@ -0,0 +1,104 @@
+from gputhread import *
+from Queue import Queue
+from sample import uniform_sphere
+import numpy as np
+from pycuda import gpuarray
+import pycuda.driver as cuda
+from uncertainties import ufloat, umath
+from histogram import Histogram
+
+jobs = Queue()
+output = Queue()
+
+def create_job(position=(0,0,0), nphotons=5000, max_steps=10):
+ positions = np.tile(position, nphotons).reshape((nphotons, 3))
+ directions = uniform_sphere(nphotons)
+ polarizations = uniform_sphere(nphotons)
+ wavelengths = np.random.uniform(200, 800, size=nphotons)
+ times = np.zeros(nphotons)
+ states = -np.ones(nphotons)
+ last_hit_triangles = -np.ones(nphotons)
+
+ return Job(positions, directions, wavelengths, polarizations, times,
+ states, last_hit_triangles, max_steps)
+
+def generate_event(detector, position=(0,0,0), nphotons=5000):
+ jobs.put(create_job(position, nphotons))
+ jobs.join()
+
+ job = output.get()
+
+ pmt_times = job.earliest_times[detector.pmtids]
+ event_times = [ (i, t) for i, t in zip(detector.pmtids, pmt_times) if t < 1e8 ]
+ print '%i hit pmts' % len(event_times)
+ return event_times
+
+def likelihood(detector, event_times, position=(0,0,0), nphotons=5000, neval=100):
+ for i in range(neval):
+ jobs.put(create_job(position, nphotons))
+ jobs.join()
+
+ t = np.zeros(shape=(neval, max(detector.pmtids)+1), dtype=np.float32)
+ for i in range(neval):
+ job = output.get()
+ t[i] = job.earliest_times
+
+ log_likelihood = ufloat((0,0))
+ for i, time in event_times:
+ h = Histogram(100, (-0.5e-9, 99.5e-9))
+ h.fill(t[:,i])
+
+ if h.nentries > 0:
+ h.normalize()
+
+ probability = h.ueval(time)
+
+ if probability.nominal_value == 0.0:
+ if h.nentries > 0:
+ probability = ufloat((0.5/h.nentries, 0.5/h.nentries))
+ else:
+ probability = \
+ ufloat((1.0/len(h.bincenters), 1.0/len(h.bincenters)))
+
+ log_likelihood += umath.log(probability)
+
+ return -log_likelihood
+
+if __name__ == '__main__':
+ import sys
+ import optparse
+ import time
+
+ parser = optparse.OptionParser('%prog')
+ parser.add_option('-b', type='int', dest='nbits', default=8)
+ parser.add_option('-j', type='int', dest='ndevices', default=1)
+ parser.add_option('-n', type='int', dest='nblocks', default=64)
+ options, args = parser.parse_args()
+
+ from detectors import lbne_cut, minilbne_cut, minilbne, microlbne
+
+ detector = minilbne_cut
+
+ detector.build(bits=options.nbits)
+
+ cuda.init()
+
+ gputhreads = []
+ for i in range(options.ndevices):
+ gputhreads.append(GPUThread(i, detector, jobs, output, options.nblocks))
+ gputhreads[-1].start()
+
+ try:
+ event_times = generate_event(detector)
+
+ for z in np.linspace(-1.0, 1.0, 100):
+ t0 = time.time()
+ log_likelihood = likelihood(detector, event_times, (z,0,0))
+ elapsed = time.time() - t0
+ print 'z = %5.2f, likelihood = %s, elapsed %.2f sec' % (z, log_likelihood, elapsed)
+ finally:
+ for gputhread in gputhreads:
+ gputhread.stop()
+
+ for gputhread in gputhreads:
+ gputhread.join()