Get rid of useless test/'optimization' that was carried over from UGv1. New codde is (minimally) faster with same results.
git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@4478 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
parent
f28523e7de
commit
fd8351cd49
|
|
@ -61,8 +61,6 @@ public abstract class AlleleFrequencyCalculationModel implements Cloneable {
|
|||
protected Logger logger;
|
||||
protected PrintStream verboseWriter;
|
||||
|
||||
private int minAlleleFrequencyToTest;
|
||||
|
||||
protected AlleleFrequencyCalculationModel(int N, Logger logger, PrintStream verboseWriter) {
|
||||
this.N = N;
|
||||
this.logger = logger;
|
||||
|
|
@ -109,15 +107,6 @@ public abstract class AlleleFrequencyCalculationModel implements Cloneable {
|
|||
return generateCalls(contexts, GLs, AFofMaxLikelihood);
|
||||
}
|
||||
|
||||
// TODO: get rid of this optimization, it is wrong!
|
||||
protected int getMinAlleleFrequencyToTest() {
|
||||
return minAlleleFrequencyToTest;
|
||||
}
|
||||
|
||||
protected void setMinAlleleFrequencyToTest(int minAF) {
|
||||
minAlleleFrequencyToTest = minAF;
|
||||
}
|
||||
|
||||
protected Map<String, Genotype> generateCalls(Map<String, StratifiedAlignmentContext> contexts,
|
||||
Map<String, BiallelicGenotypeLikelihoods> GLs,
|
||||
int frequency) {
|
||||
|
|
|
|||
|
|
@ -56,9 +56,6 @@ public class GridSearchAFEstimation extends AlleleFrequencyCalculationModel {
|
|||
log10AlleleFrequencyPosteriors[0] = AFMatrix.getLikelihoodsOfFrequency() + log10AlleleFrequencyPriors[0];
|
||||
double maxLikelihoodSeen = log10AlleleFrequencyPosteriors[0];
|
||||
|
||||
// TODO: get rid of this optimization, it is wrong!
|
||||
int minAlleleFrequencyToTest = getMinAlleleFrequencyToTest();
|
||||
|
||||
int maxAlleleFrequencyToTest = AFMatrix.getSamples().size() * 2;
|
||||
|
||||
// for each minor allele frequency, calculate log10PofDgivenAFi
|
||||
|
|
@ -71,7 +68,7 @@ public class GridSearchAFEstimation extends AlleleFrequencyCalculationModel {
|
|||
|
||||
// an optimization to speed up the calculation: if we are beyond the local maximum such
|
||||
// that subsequent likelihoods won't factor into the confidence score, just quit
|
||||
if ( i >= minAlleleFrequencyToTest && maxLikelihoodSeen - log10AlleleFrequencyPosteriors[i] > LOG10_OPTIMIZATION_EPSILON )
|
||||
if ( maxLikelihoodSeen - log10AlleleFrequencyPosteriors[i] > LOG10_OPTIMIZATION_EPSILON )
|
||||
return;
|
||||
|
||||
if ( log10AlleleFrequencyPosteriors[i] > maxLikelihoodSeen )
|
||||
|
|
|
|||
|
|
@ -142,10 +142,6 @@ public class UnifiedGenotyperEngine {
|
|||
if ( GLs.size() == 0 )
|
||||
return estimateReferenceConfidence(stratifiedContexts, genotypePriors.getHeterozygosity(), false);
|
||||
|
||||
// reset the optimization value and determine the p(AF>0)
|
||||
// TODO: get rid of this optimization, it is wrong!
|
||||
afcm.get().setMinAlleleFrequencyToTest(0);
|
||||
|
||||
// 'zero' out the AFs (so that we don't have to worry if not all samples have reads at this position)
|
||||
clearAFarray(log10AlleleFrequencyPosteriors.get());
|
||||
afcm.get().getLog10PNonRef(tracker, refContext, GLs, log10AlleleFrequencyPriors, log10AlleleFrequencyPosteriors.get());
|
||||
|
|
@ -221,9 +217,6 @@ public class UnifiedGenotyperEngine {
|
|||
double lod = overallLog10PofF - overallLog10PofNull;
|
||||
//System.out.println("overallLog10PofNull=" + overallLog10PofNull + ", overallLog10PofF=" + overallLog10PofF);
|
||||
|
||||
// set the optimization value for the subsequent strand calculations
|
||||
afcm.get().setMinAlleleFrequencyToTest(bestAFguess);
|
||||
|
||||
// the forward lod
|
||||
GLs.clear();
|
||||
glcm.get().getLikelihoods(tracker, refContext, stratifiedContexts, StratifiedAlignmentContext.StratifiedContextType.FORWARD, genotypePriors, GLs);
|
||||
|
|
|
|||
Loading…
Reference in New Issue