aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authortlatorre <tlatorre@uchicago.edu>2018-09-13 09:53:47 -0500
committertlatorre <tlatorre@uchicago.edu>2018-09-13 09:53:47 -0500
commit8beebac19b9351cdc6108ea31eeda6531d75540b (patch)
tree977bb6bf8bac38d2c964b5f82a8c22c70a59b159 /src
parentfad85fa60a3b6b0e015ad070431f249f87cf77be (diff)
downloadsddm-8beebac19b9351cdc6108ea31eeda6531d75540b.tar.gz
sddm-8beebac19b9351cdc6108ea31eeda6531d75540b.tar.bz2
sddm-8beebac19b9351cdc6108ea31eeda6531d75540b.zip
speed things up by introducing a minimum ratio between probabilities
Previously to avoid computing P(q,t|n)*P(n|mu) for large n when they were very unlikely I was using a precomputed maximum n value based only on the expected number of PE. However, this didn't take into account P(q|n). This commit updates the likelihood function to dynamically decide when to quit computing these probabilities when the probability for a given n divided by the most likely probability is less than some threshold. This threshold is currently set to 10**(-10) which means we quit calculating these probabilities when the probability is 10 million times less likely than the most probable value.
Diffstat (limited to 'src')
-rw-r--r--src/likelihood.c18
-rw-r--r--src/likelihood.h8
2 files changed, 17 insertions, 9 deletions
diff --git a/src/likelihood.c b/src/likelihood.c
index 2e0f135..6ddd576 100644
--- a/src/likelihood.c
+++ b/src/likelihood.c
@@ -328,9 +328,8 @@ double nll_muon(event *ev, double T0, double *pos, double *dir, double t0, doubl
size_t i, j, nhit;
intParams params;
double total_charge;
- double logp[MAX_PE], nll[MAX_PMTS], range, theta0, E0, p0, beta0, smax, log_mu;
+ double logp[MAX_PE], nll[MAX_PMTS], range, theta0, E0, p0, beta0, smax, log_mu, max_logp;
double tmean = 0.0;
- int jmax;
double mu_direct[MAX_PMTS];
double ts[MAX_PMTS];
@@ -411,14 +410,17 @@ double nll_muon(event *ev, double T0, double *pos, double *dir, double t0, doubl
log_mu = log(mu[i]);
if (ev->pmt_hits[i].hit) {
- jmax = (int) ceil(fmax(mu[i],1)*STD_MAX + fmax(mu[i],1));
-
- if (jmax > MAX_PE) jmax = MAX_PE;
-
- for (j = 1; j < jmax; j++) {
+ for (j = 1; j < MAX_PE; j++) {
logp[j] = log(pq(ev->pmt_hits[i].qhs,j)) - mu[i] + j*log_mu - lnfact(j) + log_pt(ev->pmt_hits[i].t, j, mu_noise, mu_indirect, &mu_direct[i], 1, &ts[i], tmean, 1.5);
+ if (j == 1 || logp[j] > max_logp) max_logp = logp[j];
+
+ if (logp[j] - max_logp < MIN_RATIO*log(10)) {
+ j++;
+ break;
+ }
}
- nll[nhit++] = -logsumexp(logp+1, jmax-1);
+
+ nll[nhit++] = -logsumexp(logp+1, j-1);
} else {
logp[0] = -mu[i];
if (fast) {
diff --git a/src/likelihood.h b/src/likelihood.h
index 8ddaf2a..0da7db9 100644
--- a/src/likelihood.h
+++ b/src/likelihood.h
@@ -10,7 +10,13 @@
*
* Note: This must be less than MAX_PE. */
#define MAX_PE_NO_HIT 10
-#define STD_MAX 10
+
+/* To speed things up we quit calculating the probability of a hit when the
+ * ratio between the current probability and the maximum probability is less
+ * than 10**(-MIN_RATIO). So if MIN_RATIO is -10, that means that we ignore
+ * probabilities which are 10 million times less likely than the most probable
+ * value for n. */
+#define MIN_RATIO -10
#define CHARGE_FRACTION 60000.0
#define DARK_RATE 500.0