from gputhread import * from Queue import Queue from detectors import LBNE from sample import uniform_sphere import numpy as np from pycuda import gpuarray import pycuda.driver as cuda from uncertainties import ufloat, umath from histogram import Histogram jobs = Queue() output = Queue() def create_job(position=(0,0,0), nphotons=10000, max_steps=10): positions = np.tile(position, nphotons).reshape((nphotons, 3)) directions = uniform_sphere(nphotons) polarizations = uniform_sphere(nphotons) wavelengths = np.random.uniform(200, 800, size=nphotons) times = np.zeros(nphotons) states = -np.ones(nphotons) last_hit_triangles = -np.ones(nphotons) return Job(positions, directions, wavelengths, polarizations, times, states, last_hit_triangles, max_steps) def generate_event(position=(0,0,0), nphotons=10000): jobs.put(create_job(position, nphotons)) jobs.join() job = output.get() last_hit_triangles = job.last_hit_triangles solids = lbne.solid_id[last_hit_triangles] solids[last_hit_triangles == -1] = -1 surface_absorbed = job.states == 2 event_times = [] for i in lbne.pmtids: photons = np.logical_and(solids == i, surface_absorbed) hit_times = job.times[photons] if len(hit_times) > 0: event_times.append((i, np.min(hit_times))) print event_times return np.array(event_times) def likelihood(event_times, position=(0,0,0), nphotons=10000, neval=100): for i in range(neval): jobs.put(create_job(position, nphotons)) jobs.join() t = {} for i in range(neval): job = output.get() last_hit_triangles = job.last_hit_triangles solids = lbne.solid_id[job.last_hit_triangles] solids[last_hit_triangles == -1] = -1 surface_absorbed = job.states == 2 for j in lbne.pmtids: pmt_photons = solids == j photons = np.logical_and(pmt_photons, surface_absorbed) if j not in t: t[j] = [] hit_times = job.times[photons] if len(hit_times) > 0: t[j].append(np.min(hit_times)) log_likelihood = ufloat((0,0)) for i, time in event_times: h = Histogram(100, (-0.5e-9, 99.5e-9)) h.fill(t[i]) if h.nentries > 0: h.normalize() probability = h.ueval(time) if probability.nominal_value == 0.0: if h.nentries > 0: probability = ufloat((0.5/h.nentries, 0.5/h.nentries)) else: probability = \ ufloat((1.0/len(h.bincenters), 1.0/len(h.bincenters))) log_likelihood += umath.log(probability) return -log_likelihood if __name__ == '__main__': import sys import optparse import time parser = optparse.OptionParser('%prog') parser.add_option('-b', type='int', dest='nbits', default=8) parser.add_option('-j', type='int', dest='ndevices', default=1) parser.add_option('-n', type='int', dest='nblocks', default=64) options, args = parser.parse_args() lbne = LBNE() lbne.build(bits=options.nbits) cuda.init() gputhreads = [] for i in range(options.ndevices): gputhreads.append(GPUThread(i, lbne, jobs, output, options.nblocks)) gputhreads[-1].start() try: event_times = generate_event() for z in np.linspace(-1.0, 1.0, 100): t0 = time.time() log_likelihood = likelihood(event_times, (z,0,0)) elapsed = time.time() - t0 print 'z = %5.2f, likelihood = %s, elapsed %.2f sec' % (z, log_likelihood, elapsed) finally: for gputhread in gputhreads: gputhread.stop() for gputhread in gputhreads: gputhread.join()