a) Treat SNP genotype likelihoods just as indels, in the sense that they're always normalized as PL's so one of them will always be zero. This creates minor numerical differences in Qual and annotations due to numerical approximations in AF computation.
b) Intermediate CombineVariants fixes, not ready yet
This commit is contained in:
parent
80d7300de4
commit
0e74cc3c74
|
|
@ -31,6 +31,7 @@ import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils;
|
|||
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
|
||||
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
|
||||
import org.broadinstitute.sting.utils.BaseUtils;
|
||||
import org.broadinstitute.sting.utils.MathUtils;
|
||||
import org.broadinstitute.sting.utils.baq.BAQ;
|
||||
import org.broadinstitute.sting.utils.exceptions.StingException;
|
||||
import org.broadinstitute.sting.utils.genotype.DiploidGenotype;
|
||||
|
|
@ -122,6 +123,11 @@ public class SNPGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoodsC
|
|||
aList.add(refAllele);
|
||||
aList.add(altAllele);
|
||||
double[] dlike = new double[]{likelihoods[refGenotype.ordinal()],likelihoods[hetGenotype.ordinal()],likelihoods[homGenotype.ordinal()]} ;
|
||||
double maxElement = MathUtils.max(dlike[AlleleFrequencyCalculationModel.GenotypeType.AA.ordinal()],
|
||||
dlike[AlleleFrequencyCalculationModel.GenotypeType.AB.ordinal()],dlike[AlleleFrequencyCalculationModel.GenotypeType.BB.ordinal()]);
|
||||
for (int i=0; i < dlike.length; i++)
|
||||
dlike[i] -= maxElement;
|
||||
|
||||
GLs.put(sample.getKey(), new MultiallelicGenotypeLikelihoods(sample.getKey(),
|
||||
aList, dlike, getFilteredDepth(pileup)));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -274,11 +274,11 @@ public class CombineVariants extends RodWalker<Integer, Integer> {
|
|||
else {
|
||||
mergedVCs = preMergedVCs;
|
||||
}
|
||||
|
||||
for ( VariantContext mergedVC : mergedVCs ) {
|
||||
// only operate at the start of events
|
||||
if ( mergedVC == null )
|
||||
continue;
|
||||
System.out.println(mergedVC.toString());
|
||||
|
||||
HashMap<String, Object> attributes = new HashMap<String, Object>(mergedVC.getAttributes());
|
||||
// re-compute chromosome counts
|
||||
|
|
|
|||
|
|
@ -594,7 +594,9 @@ public class VariantContextUtils {
|
|||
|
||||
// if we have more alternate alleles in the merged VC than in one or more of the original VCs, we need to strip out the GL/PLs (because they are no longer accurate)
|
||||
for ( VariantContext vc : VCs ) {
|
||||
if ( vc.alleles.size() != alleles.size() ) {
|
||||
if (vc.alleles.size() == 1)
|
||||
continue;
|
||||
if ( vc.alleles.size() != alleles.size()) {
|
||||
genotypes = stripPLs(genotypes);
|
||||
break;
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue