From 8beebac19b9351cdc6108ea31eeda6531d75540b Mon Sep 17 00:00:00 2001 From: tlatorre Date: Thu, 13 Sep 2018 09:53:47 -0500 Subject: speed things up by introducing a minimum ratio between probabilities Previously to avoid computing P(q,t|n)*P(n|mu) for large n when they were very unlikely I was using a precomputed maximum n value based only on the expected number of PE. However, this didn't take into account P(q|n). This commit updates the likelihood function to dynamically decide when to quit computing these probabilities when the probability for a given n divided by the most likely probability is less than some threshold. This threshold is currently set to 10**(-10) which means we quit calculating these probabilities when the probability is 10 million times less likely than the most probable value. --- src/likelihood.h | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) (limited to 'src/likelihood.h') diff --git a/src/likelihood.h b/src/likelihood.h index 8ddaf2a..0da7db9 100644 --- a/src/likelihood.h +++ b/src/likelihood.h @@ -10,7 +10,13 @@ * * Note: This must be less than MAX_PE. */ #define MAX_PE_NO_HIT 10 -#define STD_MAX 10 + +/* To speed things up we quit calculating the probability of a hit when the + * ratio between the current probability and the maximum probability is less + * than 10**(-MIN_RATIO). So if MIN_RATIO is -10, that means that we ignore + * probabilities which are 10 million times less likely than the most probable + * value for n. */ +#define MIN_RATIO -10 #define CHARGE_FRACTION 60000.0 #define DARK_RATE 500.0 -- cgit