aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/likelihood.c18
-rw-r--r--src/sno_charge.c86
-rw-r--r--src/sno_charge.h1
-rw-r--r--src/test.c30
4 files changed, 120 insertions, 15 deletions
diff --git a/src/likelihood.c b/src/likelihood.c
index 696b52a..1822553 100644
--- a/src/likelihood.c
+++ b/src/likelihood.c
@@ -1089,24 +1089,12 @@ double nll_best(event *ev)
continue;
}
- /* Technically we should be computing:
+ /* Set the expected number of PE to whatever maximizes P(q|mu), i.e.
*
* mu[i] = max(p(q|mu))
*
- * where by max I mean we find the expected number of PE which
- * maximizes p(q|mu). However, I don't know of any easy analytical
- * solution to this. So instead we just compute the number of PE which
- * maximizes p(q|n). */
- for (j = 1; j < MAX_PE; j++) {
- logp[j] = get_log_pq(ev->pmt_hits[i].qhs,j);
-
- if (j == 1 || logp[j] > max_logp) {
- maxj = j;
- max_logp = logp[j];
- }
- }
-
- mu[i] = maxj;
+ */
+ mu[i] = get_most_likely_mean_pe(ev->pmt_hits[i].qhs);
ts[i] = ev->pmt_hits[i].t;
}
diff --git a/src/sno_charge.c b/src/sno_charge.c
index 4b51954..17e4ebc 100644
--- a/src/sno_charge.c
+++ b/src/sno_charge.c
@@ -22,12 +22,28 @@
#include <unistd.h>
#include <stdio.h>
#include "misc.h"
+#include <nlopt.h>
/* Maximum number of PE to compute charge PDFs for.
*
* For PE greater than this we compute the PDF using a Gaussian distribution. */
#define MAX_PE 100
+/* Maximum number of PE to consider when computing P(q|mu) in nlopt_log_pq().
+ * This should be at least as large as the maximum charge we expect to record
+ * in the detector which should be approximately (4095-600)/30 ~ 100 for QHS.
+ */
+#define MAX_PE_LOOKUP 1000
+
+/* Number of points to compute the table for when calculating the most likely
+ * mumber of mean PE given an observed charge. */
+#define N_CHARGE_MEAN_PE 1000
+
+/* Minimum and maximum charges to compute the table for when calculating the
+ * most likely mumber of mean PE given an observed charge. */
+static double xlo_mean_pe = -1.0;
+static double xhi_mean_pe = 100.0;
+
/* Minimum charge to compute the PDF at (in units of PE). */
double qlo = -1.0;
/* Maximum charge to compute the PDF at (in units of PE). */
@@ -90,6 +106,76 @@ static double QSMEAR_ADC = 3.61;
/* Mean of simulated threshold distribution. */
static double MEAN_THRESH = 8.5;
+static double nlopt_log_pq(unsigned int n, const double *x, double *grad, void *params)
+{
+ int i;
+ double logp[MAX_PE_LOOKUP];
+ double q;
+ double mu = *x;
+
+ q = ((double *) params)[0];
+
+ for (i = 1; i < MAX_PE_LOOKUP; i++)
+ logp[i] = get_log_pq(q,i) + get_log_phit(i) - mu + i*log(mu) - lnfact(i);
+
+ return -logsumexp(logp+1,MAX_PE_LOOKUP-1);
+}
+
+/* Returns the most likely number of PE to produce a given charge `q`, i.e. it
+ * returns the `mu` which maximizes
+ *
+ * P(q|mu) = \sum_n P(q|n) P(n|mu)
+ *
+ * Note: You must call init_charge() before calling this function!. */
+double get_most_likely_mean_pe(double q)
+{
+ size_t i;
+ static int initialized = 0;
+ static double xs[N_CHARGE_MEAN_PE];
+ static double ys[N_CHARGE_MEAN_PE];
+ double lb[1];
+ double ub[1];
+ double ss[1];
+ double fval;
+ nlopt_opt opt;
+
+ if (!initialized) {
+ /* Create the minimizer object. */
+ opt = nlopt_create(NLOPT_LN_BOBYQA, 1);
+
+ lb[0] = xlo_mean_pe;
+ ub[0] = xhi_mean_pe/qmean + 10.0;
+ ss[0] = 0.1;
+
+ nlopt_set_lower_bounds(opt, lb);
+ nlopt_set_upper_bounds(opt, ub);
+ nlopt_set_initial_step(opt, ss);
+ nlopt_set_xtol_abs1(opt, 1e-10);
+
+ for (i = 0; i < LEN(xs); i++) {
+ xs[i] = xlo_mean_pe + (xhi_mean_pe-xlo_mean_pe)*i/(LEN(xs)-1);
+ nlopt_set_min_objective(opt,nlopt_log_pq,&xs[i]);
+ ys[i] = fmax(xs[i]/qmean,0.1);
+ nlopt_optimize(opt,&ys[i],&fval);
+ /* This is a bit of a hack, but there is a discontinuity in
+ * get_log_pq() when it switches over from using the Polya
+ * distribution to a gaussian which I think is causing the
+ * optimization to occasionally stop too early. By minimizing
+ * twice, it seems to fix itself and the most likely number of PE
+ * is a smooth monotonic function of the charge. */
+ nlopt_optimize(opt,&ys[i],&fval);
+ }
+
+ nlopt_destroy(opt);
+
+ initialized = 1;
+ }
+
+ if (q > xhi_mean_pe) return q/qmean;
+
+ return interp1d(q,xs,ys,LEN(xs));
+}
+
double spe_pol2exp(double q)
{
/* Returns the probability distribution to get a charge `q` on a PMT with a
diff --git a/src/sno_charge.h b/src/sno_charge.h
index 141ad28..549ca2c 100644
--- a/src/sno_charge.h
+++ b/src/sno_charge.h
@@ -17,6 +17,7 @@
#ifndef SNO_CHARGE_H
#define SNO_CHARGE_H
+double get_most_likely_mean_pe(double q);
void init_charge(void);
double get_log_pq(double q, int n);
double get_pq(double q, int n);
diff --git a/src/test.c b/src/test.c
index c2b7466..0513598 100644
--- a/src/test.c
+++ b/src/test.c
@@ -2126,6 +2126,35 @@ err:
return 1;
}
+int test_get_most_likely_mean_pe(char *err)
+{
+ /* Tests that the get_most_likely_mean_pe() function returns values which
+ * are monotonically increasing. */
+ size_t i;
+ double q, result, last_result;
+ size_t n = 100;
+
+ init_charge();
+
+ for (i = 0; i < n; i++) {
+ q = i*1000.0/(n-1);
+
+ result = get_most_likely_mean_pe(q);
+
+ if (i > 0 && result < last_result) {
+ sprintf(err, "get_most_likely_mean_pe() returned %.5g for q = %.2f, but expected something bigger than %.5g", result, q, last_result);
+ goto err;
+ }
+
+ last_result = result;
+ }
+
+ return 0;
+
+err:
+ return 1;
+}
+
struct tests {
testFunction *test;
char *name;
@@ -2179,6 +2208,7 @@ struct tests {
{test_electron_get_angular_pdf_norm, "test_electron_get_angular_pdf_norm"},
{test_fast_acos, "test_fast_acos"},
{test_fast_sqrt, "test_fast_sqrt"},
+ {test_get_most_likely_mean_pe, "test_get_most_likely_mean_pe"},
};
int main(int argc, char **argv)