Renamed StratifyAlignmentContext to AlignmentContextUtils, and StatiefyContextType to ReadOrientation. Also, went through the system and deleted all references to second bases. That ship passed long ago. This was the actual commit, the last was an intellij error

git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@5564 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
depristo 2011-04-03 15:36:17 +00:00
parent 349661b958
commit b316c9a590
21 changed files with 43 additions and 421 deletions

View File

@ -37,16 +37,16 @@ import java.util.*;
* Useful utilities for storing different AlignmentContexts
* User: ebanks
*/
public class StratifiedAlignmentContext {
public class AlignmentContextUtils {
// Definitions:
// COMPLETE = full alignment context
// FORWARD = reads on forward strand
// REVERSE = reads on forward strand
//
public enum StratifiedContextType { COMPLETE, FORWARD, REVERSE }
public enum ReadOrientation { COMPLETE, FORWARD, REVERSE }
private StratifiedAlignmentContext() {
private AlignmentContextUtils() {
// cannot be instantiated
}
@ -58,7 +58,7 @@ public class StratifiedAlignmentContext {
* @param type
* @return
*/
public static AlignmentContext stratify(AlignmentContext context, StratifiedContextType type) {
public static AlignmentContext stratify(AlignmentContext context, ReadOrientation type) {
switch(type) {
case COMPLETE:
return context;

View File

@ -65,15 +65,6 @@ public class PileupWalker extends LocusWalker<Integer, Integer> implements TreeR
@Output
PrintStream out;
@Argument(fullName="alwaysShowSecondBase",doc="If true, prints dummy bases for the second bases in the BAM file where they are missing",required=false)
public boolean alwaysShowSecondBase = false;
//@Argument(fullName="qualsAsInts",doc="If true, prints out qualities in the pileup as comma-separated integers",required=false)
//public boolean qualsAsInts = false;
//@Argument(fullName="ignore_uncovered_bases",shortName="skip_uncov",doc="Output nothing when a base is uncovered")
//public boolean IGNORE_UNCOVERED_BASES = false;
@Argument(fullName="showIndelPileups",shortName="show_indels",doc="In addition to base pileups, generate pileups of extended indel events")
public boolean SHOW_INDEL_PILEUPS = false;
@ -87,14 +78,8 @@ public class PileupWalker extends LocusWalker<Integer, Integer> implements TreeR
String rods = getReferenceOrderedData( tracker );
if ( context.hasBasePileup() ) {
ReadBackedPileup basePileup = context.getBasePileup();
String secondBasePileup = "";
if(shouldShowSecondaryBasePileup(basePileup))
secondBasePileup = getSecondBasePileup(basePileup);
out.printf("%s%s %s%n", basePileup.getPileupString(ref.getBaseAsChar()), secondBasePileup, rods);
out.printf("%s %s%n", basePileup.getPileupString(ref.getBaseAsChar()), rods);
}
if ( context.hasExtendedEventPileup() ) {
@ -120,27 +105,6 @@ public class PileupWalker extends LocusWalker<Integer, Integer> implements TreeR
return lhs + rhs;
}
/**
* Should the secondary base be shown under all circumstances?
* @param pileup The ReadBackedPileup at the current locus.
* @return True, if a secondary base pileup should always be shown.
*/
private boolean shouldShowSecondaryBasePileup( ReadBackedPileup pileup ) {
return ( pileup.hasSecondaryBases() || alwaysShowSecondBase );
}
/**
* Gets second base information for the pileup, if requested.
* @param pileup Pileup from which to extract secondary base info.
* @return String representation of the secondary base.
*/
private String getSecondBasePileup( ReadBackedPileup pileup ) {
if( pileup.hasSecondaryBases() )
return " " + new String(pileup.getSecondaryBases());
else
return " " + Utils.dupString('N', pileup.size());
}
/**
* Get a string representation the reference-ordered data.
* @param tracker Container for the reference-ordered data.

View File

@ -29,7 +29,7 @@ import org.broad.tribble.util.variantcontext.VariantContext;
import org.broad.tribble.vcf.VCFHeaderLineType;
import org.broad.tribble.vcf.VCFInfoHeaderLine;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.StratifiedAlignmentContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.*;
@ -42,7 +42,6 @@ import java.util.*;
import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.utils.sam.AlignmentUtils;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import org.broadinstitute.sting.utils.sam.ReadUtils;
public class HaplotypeScore implements InfoFieldAnnotation, StandardAnnotation {
private final static boolean DEBUG = false;
@ -60,7 +59,7 @@ public class HaplotypeScore implements InfoFieldAnnotation, StandardAnnotation {
if ( !vc.isBiallelic() || stratifiedContexts.size() == 0 ) // size 0 means that call was made by someone else and we have no data here
return null;
final AlignmentContext context = StratifiedAlignmentContext.joinContexts(stratifiedContexts.values());
final AlignmentContext context = AlignmentContextUtils.joinContexts(stratifiedContexts.values());
final int contextWingSize = Math.min(((int)ref.getWindow().size() - 1)/2, MIN_CONTEXT_WING_SIZE);
final int contextSize = contextWingSize * 2 + 1;

View File

@ -29,8 +29,8 @@ import org.broad.tribble.util.variantcontext.VariantContext;
import org.broad.tribble.vcf.*;
import org.broadinstitute.sting.commandline.Hidden;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.StratifiedAlignmentContext;
import org.broadinstitute.sting.gatk.contexts.variantcontext.VariantContextUtils;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.*;
@ -212,9 +212,9 @@ public class VariantAnnotator extends RodWalker<Integer, Integer> {
Map<String, AlignmentContext> stratifiedContexts;
if ( BaseUtils.simpleBaseToBaseIndex(ref.getBase()) != -1 ) {
if ( ! context.hasExtendedEventPileup() ) {
stratifiedContexts = StratifiedAlignmentContext.splitContextBySampleName(context.getBasePileup(), ASSUME_SINGLE_SAMPLE);
stratifiedContexts = AlignmentContextUtils.splitContextBySampleName(context.getBasePileup(), ASSUME_SINGLE_SAMPLE);
} else {
stratifiedContexts = StratifiedAlignmentContext.splitContextBySampleName(context.getExtendedEventPileup(), ASSUME_SINGLE_SAMPLE);
stratifiedContexts = AlignmentContextUtils.splitContextBySampleName(context.getExtendedEventPileup(), ASSUME_SINGLE_SAMPLE);
}
if ( stratifiedContexts != null ) {
annotatedVCs = new ArrayList<VariantContext>(VCs.size());

View File

@ -39,7 +39,7 @@ import org.broadinstitute.sting.commandline.Argument;
import org.broadinstitute.sting.commandline.Output;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.StratifiedAlignmentContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils;
import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.refdata.features.annotator.AnnotatorInputTableCodec;
@ -249,7 +249,7 @@ public class GenomicAnnotator extends RodWalker<Integer, Integer> implements Tre
(vc.isVariant() && !vc.isBiallelic()) ) {
results.add(vc);
} else {
Map<String, AlignmentContext> stratifiedContexts = StratifiedAlignmentContext.splitContextBySampleName(context);
Map<String, AlignmentContext> stratifiedContexts = AlignmentContextUtils.splitContextBySampleName(context);
if ( stratifiedContexts != null )
results.addAll(engine.annotateContext(tracker, ref, stratifiedContexts, vc));
else

View File

@ -29,6 +29,7 @@ import net.sf.samtools.SAMRecord;
import org.apache.log4j.Logger;
import org.broad.tribble.util.variantcontext.VariantContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils;
import org.broadinstitute.sting.gatk.walkers.indels.HaplotypeIndelErrorModel;
import org.broadinstitute.sting.gatk.walkers.indels.PairHMMIndelErrorModel;
import org.broadinstitute.sting.utils.BaseUtils;
@ -39,7 +40,6 @@ import org.broadinstitute.sting.utils.genotype.Haplotype;
import org.broadinstitute.sting.utils.pileup.ExtendedEventPileupElement;
import org.broadinstitute.sting.utils.pileup.ReadBackedExtendedEventPileup;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileup;
import org.broadinstitute.sting.gatk.contexts.StratifiedAlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broad.tribble.util.variantcontext.Allele;
@ -86,7 +86,7 @@ public class DindelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoo
private ArrayList<Allele> computeConsensusAlleles(ReferenceContext ref,
Map<String, AlignmentContext> contexts,
StratifiedAlignmentContext.StratifiedContextType contextType) {
AlignmentContextUtils.ReadOrientation contextType) {
Allele refAllele=null, altAllele=null;
GenomeLoc loc = ref.getLocus();
ArrayList<Allele> aList = new ArrayList<Allele>();
@ -100,7 +100,7 @@ public class DindelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoo
int insCount = 0, delCount = 0;
// quick check of total number of indels in pileup
for ( Map.Entry<String, AlignmentContext> sample : contexts.entrySet() ) {
AlignmentContext context = StratifiedAlignmentContext.stratify(sample.getValue(), contextType);
AlignmentContext context = AlignmentContextUtils.stratify(sample.getValue(), contextType);
final ReadBackedExtendedEventPileup indelPileup = context.getExtendedEventPileup();
insCount += indelPileup.getNumberOfInsertions();
@ -112,7 +112,7 @@ public class DindelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoo
for ( Map.Entry<String, AlignmentContext> sample : contexts.entrySet() ) {
// todo -- warning, can be duplicating expensive partition here
AlignmentContext context = StratifiedAlignmentContext.stratify(sample.getValue(), contextType);
AlignmentContext context = AlignmentContextUtils.stratify(sample.getValue(), contextType);
final ReadBackedExtendedEventPileup indelPileup = context.getExtendedEventPileup();
@ -268,7 +268,7 @@ public class DindelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoo
public Allele getLikelihoods(RefMetaDataTracker tracker,
ReferenceContext ref,
Map<String, AlignmentContext> contexts,
StratifiedAlignmentContext.StratifiedContextType contextType,
AlignmentContextUtils.ReadOrientation contextType,
GenotypePriors priors,
Map<String, BiallelicGenotypeLikelihoods> GLs,
Allele alternateAlleleToUse) {
@ -355,7 +355,7 @@ public class DindelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoo
for ( Map.Entry<String, AlignmentContext> sample : contexts.entrySet() ) {
AlignmentContext context = StratifiedAlignmentContext.stratify(sample.getValue(), contextType);
AlignmentContext context = AlignmentContextUtils.stratify(sample.getValue(), contextType);
ReadBackedPileup pileup = null;
if (context.hasExtendedEventPileup())

View File

@ -27,7 +27,7 @@ package org.broadinstitute.sting.gatk.walkers.genotyper;
import org.apache.log4j.Logger;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.StratifiedAlignmentContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broad.tribble.util.variantcontext.Allele;
@ -80,7 +80,7 @@ public abstract class GenotypeLikelihoodsCalculationModel implements Cloneable {
public abstract Allele getLikelihoods(RefMetaDataTracker tracker,
ReferenceContext ref,
Map<String, AlignmentContext> contexts,
StratifiedAlignmentContext.StratifiedContextType contextType,
AlignmentContextUtils.ReadOrientation contextType,
GenotypePriors priors,
Map<String, BiallelicGenotypeLikelihoods> GLs,
Allele alternateAlleleToUse);

View File

@ -27,12 +27,12 @@ package org.broadinstitute.sting.gatk.walkers.genotyper;
import org.broad.tribble.util.variantcontext.VariantContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils;
import org.broadinstitute.sting.utils.*;
import org.broadinstitute.sting.utils.exceptions.StingException;
import org.broadinstitute.sting.utils.genotype.DiploidGenotype;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileup;
import org.broadinstitute.sting.utils.pileup.PileupElement;
import org.broadinstitute.sting.gatk.contexts.StratifiedAlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broad.tribble.util.variantcontext.Allele;
@ -56,7 +56,7 @@ public class SNPGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoodsC
public Allele getLikelihoods(RefMetaDataTracker tracker,
ReferenceContext ref,
Map<String, AlignmentContext> contexts,
StratifiedAlignmentContext.StratifiedContextType contextType,
AlignmentContextUtils.ReadOrientation contextType,
GenotypePriors priors,
Map<String, BiallelicGenotypeLikelihoods> GLs,
Allele alternateAlleleToUse) {
@ -100,7 +100,7 @@ public class SNPGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoodsC
Allele altAllele = Allele.create(bestAlternateAllele, false);
for ( Map.Entry<String, AlignmentContext> sample : contexts.entrySet() ) {
ReadBackedPileup pileup = StratifiedAlignmentContext.stratify(sample.getValue(), contextType).getBasePileup();
ReadBackedPileup pileup = AlignmentContextUtils.stratify(sample.getValue(), contextType).getBasePileup();
// create the GenotypeLikelihoods object
DiploidSNPGenotypeLikelihoods GL = new DiploidSNPGenotypeLikelihoods((DiploidSNPGenotypePriors)priors, UAC.PCR_error);

View File

@ -31,12 +31,11 @@ import org.broad.tribble.util.variantcontext.VariantContext;
import org.broad.tribble.util.variantcontext.Genotype;
import org.broad.tribble.util.variantcontext.Allele;
import org.broadinstitute.sting.gatk.GenomeAnalysisEngine;
import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils;
import org.broadinstitute.sting.gatk.filters.BadMateFilter;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.StratifiedAlignmentContext;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.refdata.utils.helpers.DbSNPHelper;
import org.broadinstitute.sting.gatk.walkers.annotator.VariantAnnotatorEngine;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.exceptions.UserException;
@ -170,7 +169,7 @@ public class UnifiedGenotyperEngine {
if ( stratifiedContexts == null )
return (UAC.OutputMode != OUTPUT_MODE.EMIT_ALL_SITES ? null : new VariantCallContext(generateEmptyContext(tracker, refContext, stratifiedContexts, rawContext), refContext.getBase(), false));
VariantContext vc = calculateLikelihoods(tracker, refContext, stratifiedContexts, StratifiedAlignmentContext.StratifiedContextType.COMPLETE, null);
VariantContext vc = calculateLikelihoods(tracker, refContext, stratifiedContexts, AlignmentContextUtils.ReadOrientation.COMPLETE, null);
if ( vc == null )
return null;
@ -190,10 +189,10 @@ public class UnifiedGenotyperEngine {
Map<String, AlignmentContext> stratifiedContexts = getFilteredAndStratifiedContexts(UAC, refContext, rawContext);
if ( stratifiedContexts == null )
return null;
return calculateLikelihoods(tracker, refContext, stratifiedContexts, StratifiedAlignmentContext.StratifiedContextType.COMPLETE, alternateAlleleToUse);
return calculateLikelihoods(tracker, refContext, stratifiedContexts, AlignmentContextUtils.ReadOrientation.COMPLETE, alternateAlleleToUse);
}
private VariantContext calculateLikelihoods(RefMetaDataTracker tracker, ReferenceContext refContext, Map<String, AlignmentContext> stratifiedContexts, StratifiedAlignmentContext.StratifiedContextType type, Allele alternateAlleleToUse) {
private VariantContext calculateLikelihoods(RefMetaDataTracker tracker, ReferenceContext refContext, Map<String, AlignmentContext> stratifiedContexts, AlignmentContextUtils.ReadOrientation type, Allele alternateAlleleToUse) {
// initialize the data for this thread if that hasn't been done yet
if ( glcm.get() == null ) {
@ -230,7 +229,7 @@ public class UnifiedGenotyperEngine {
pileup = rawContext.getExtendedEventPileup();
else if (rawContext.hasBasePileup())
pileup = rawContext.getBasePileup();
stratifiedContexts = StratifiedAlignmentContext.splitContextBySampleName(pileup, UAC.ASSUME_SINGLE_SAMPLE);
stratifiedContexts = AlignmentContextUtils.splitContextBySampleName(pileup, UAC.ASSUME_SINGLE_SAMPLE);
vc = annotationEngine.annotateContext(tracker, ref, stratifiedContexts, vc).iterator().next();
}
@ -369,7 +368,7 @@ public class UnifiedGenotyperEngine {
if ( DEBUG_SLOD ) System.out.println("overallLog10PofF=" + overallLog10PofF);
// the forward lod
VariantContext vcForward = calculateLikelihoods(tracker, refContext, stratifiedContexts, StratifiedAlignmentContext.StratifiedContextType.FORWARD, vc.getAlternateAllele(0));
VariantContext vcForward = calculateLikelihoods(tracker, refContext, stratifiedContexts, AlignmentContextUtils.ReadOrientation.FORWARD, vc.getAlternateAllele(0));
clearAFarray(log10AlleleFrequencyPosteriors.get());
afcm.get().getLog10PNonRef(tracker, refContext, vcForward.getGenotypes(), log10AlleleFrequencyPriors, log10AlleleFrequencyPosteriors.get());
//double[] normalizedLog10Posteriors = MathUtils.normalizeFromLog10(log10AlleleFrequencyPosteriors.get(), true);
@ -378,7 +377,7 @@ public class UnifiedGenotyperEngine {
if ( DEBUG_SLOD ) System.out.println("forwardLog10PofNull=" + forwardLog10PofNull + ", forwardLog10PofF=" + forwardLog10PofF);
// the reverse lod
VariantContext vcReverse = calculateLikelihoods(tracker, refContext, stratifiedContexts, StratifiedAlignmentContext.StratifiedContextType.REVERSE, vc.getAlternateAllele(0));
VariantContext vcReverse = calculateLikelihoods(tracker, refContext, stratifiedContexts, AlignmentContextUtils.ReadOrientation.REVERSE, vc.getAlternateAllele(0));
clearAFarray(log10AlleleFrequencyPosteriors.get());
afcm.get().getLog10PNonRef(tracker, refContext, vcReverse.getGenotypes(), log10AlleleFrequencyPriors, log10AlleleFrequencyPosteriors.get());
//normalizedLog10Posteriors = MathUtils.normalizeFromLog10(log10AlleleFrequencyPosteriors.get(), true);
@ -419,7 +418,7 @@ public class UnifiedGenotyperEngine {
pileup = rawContext.getExtendedEventPileup();
else if (rawContext.hasBasePileup())
pileup = rawContext.getBasePileup();
stratifiedContexts = StratifiedAlignmentContext.splitContextBySampleName(pileup, UAC.ASSUME_SINGLE_SAMPLE);
stratifiedContexts = AlignmentContextUtils.splitContextBySampleName(pileup, UAC.ASSUME_SINGLE_SAMPLE);
Collection<VariantContext> variantContexts = annotationEngine.annotateContext(tracker, refContext, stratifiedContexts, vcCall);
vcCall = variantContexts.iterator().next(); // we know the collection will always have exactly 1 element.
@ -469,7 +468,7 @@ public class UnifiedGenotyperEngine {
return null;
// stratify the AlignmentContext and cut by sample
stratifiedContexts = StratifiedAlignmentContext.splitContextBySampleName(pileup, UAC.ASSUME_SINGLE_SAMPLE);
stratifiedContexts = AlignmentContextUtils.splitContextBySampleName(pileup, UAC.ASSUME_SINGLE_SAMPLE);
} else if ( UAC.GLmodel == GenotypeLikelihoodsCalculationModel.Model.SNP && !rawContext.hasExtendedEventPileup() ) {
@ -478,7 +477,7 @@ public class UnifiedGenotyperEngine {
return null;
// stratify the AlignmentContext and cut by sample
stratifiedContexts = StratifiedAlignmentContext.splitContextBySampleName(rawContext.getBasePileup(), UAC.ASSUME_SINGLE_SAMPLE);
stratifiedContexts = AlignmentContextUtils.splitContextBySampleName(rawContext.getBasePileup(), UAC.ASSUME_SINGLE_SAMPLE);
// filter the reads (and test for bad pileups)
if ( !filterPileup(stratifiedContexts, badReadPileupFilter) )

View File

@ -3,7 +3,7 @@ package org.broadinstitute.sting.oneoffprojects.walkers;
import org.broad.tribble.util.variantcontext.VariantContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.StratifiedAlignmentContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.refdata.ReferenceOrderedDatum;
import org.broadinstitute.sting.gatk.walkers.DataSource;
@ -70,7 +70,7 @@ public class AlleleBalanceHistogramWalker extends LocusWalker<Map<String,Double>
}
private HashMap<String,Double> getAlleleBalanceBySample(VariantContext vc, ReferenceContext ref, AlignmentContext context) {
Map<String, AlignmentContext> sampleContext = StratifiedAlignmentContext.splitContextBySampleName(context);
Map<String, AlignmentContext> sampleContext = AlignmentContextUtils.splitContextBySampleName(context);
HashMap<String,Double> balances = new HashMap<String,Double>();
System.out.println("----- "+ref.getLocus()+" -----");
int returnedBalances = 0;

View File

@ -10,8 +10,8 @@ import org.broad.tribble.vcf.VCFWriter;
import org.broadinstitute.sting.commandline.Argument;
import org.broadinstitute.sting.commandline.Output;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.StratifiedAlignmentContext;
import org.broadinstitute.sting.gatk.contexts.variantcontext.VariantContextUtils;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.LocusWalker;
@ -468,7 +468,7 @@ public class MendelianViolationClassifier extends LocusWalker<MendelianViolation
throw new ReviewedStingException("Parental bases have length zero at "+trio.toString());
}
Map<String,AlignmentContext> splitContext = StratifiedAlignmentContext.splitContextBySampleName(context);
Map<String,AlignmentContext> splitContext = AlignmentContextUtils.splitContextBySampleName(context);
Double proportion = getAlleleProportion(parental, splitContext.get(trioStructure.child));
if ( proportion != null ) {
violation.addAttribute(MendelianInfoKey.ProportionOfParentAllele.getKey(), proportion);
@ -502,7 +502,7 @@ public class MendelianViolationClassifier extends LocusWalker<MendelianViolation
// look for tri-allelic sites mis-called as hom -- as a speedup we do this only at non-filtered, non genotype error sites
if ( ! trio.isFiltered() ) {
Map<String,AlignmentContext> splitCon = StratifiedAlignmentContext.splitContextBySampleName(context);
Map<String,AlignmentContext> splitCon = AlignmentContextUtils.splitContextBySampleName(context);
Pair<Allele,Integer> triAl = getTriAllelicQuality(tracker, ref, trio, splitCon);
if ( triAl != null ) {
violation.addAttribute(MendelianInfoKey.TriAllelicBase.getKey(),triAl.first.toString());

View File

@ -1,104 +0,0 @@
/*
* Copyright (c) 2010 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.oneoffprojects.walkers.annotator;
import org.broad.tribble.util.variantcontext.VariantContext;
import org.broad.tribble.vcf.VCFHeaderLineType;
import org.broad.tribble.vcf.VCFInfoHeaderLine;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.InfoFieldAnnotation;
import org.broadinstitute.sting.utils.BaseUtils;
import org.broadinstitute.sting.utils.collections.Pair;
import org.broadinstitute.sting.utils.pileup.PileupElement;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileup;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import java.util.Map;
import java.util.HashMap;
import java.util.List;
import java.util.Arrays;
/**
* Created by IntelliJ IDEA.
* User: chartl
* Date: Dec 17, 2009
* Time: 2:18:43 PM
* To change this template use File | Settings | File Templates.
*/
public class ProportionOfRefSecondBasesSupportingSNP implements InfoFieldAnnotation {
private String KEY_NAME = "ref_2bb_snp_prop";
private boolean USE_MAPQ0_READS = false;
public List<String> getKeyNames() { return Arrays.asList(KEY_NAME); }
public Map<String, Object> annotate(RefMetaDataTracker tracker, ReferenceContext ref, Map<String, AlignmentContext> context, VariantContext vc) {
if ( ! vc.isSNP() || ! vc.isBiallelic() )
return null;
Pair<Integer,Integer> totalAndSNPSupporting = new Pair<Integer,Integer>(0,0);
for ( String sample : context.keySet() ) {
ReadBackedPileup pileup = context.get(sample).getBasePileup();
totalAndSNPSupporting = getTotalRefAndSNPSupportCounts(pileup, ref.getBaseAsChar(), vc.getAlternateAllele(0).toString().charAt(0), totalAndSNPSupporting);
}
if ( totalAndSNPSupporting.equals(new Pair<Integer,Integer>(0,0)) )
return null;
double p = getProportionOfRefSecondaryBasesSupportingSNP(totalAndSNPSupporting);
Map<String, Object> map = new HashMap<String, Object>();
map.put(getKeyNames().get(0), String.format("%f", p ));
return map;
}
private double getProportionOfRefSecondaryBasesSupportingSNP(Pair<Integer,Integer> tRef_snpSupport) {
return ( 1.0 + tRef_snpSupport.second) / (1.0 + tRef_snpSupport.first );
}
private Pair<Integer,Integer> getTotalRefAndSNPSupportCounts(ReadBackedPileup p, char ref, char snp, Pair<Integer,Integer> refAndSNPCounts) {
int nRefBases = 0;
int nSecondBasesSupportingSNP = 0;
for (PileupElement e : p ) {
if ( BaseUtils.basesAreEqual( e.getBase(), (byte) ref ) ) {
if ( BaseUtils.isRegularBase(e.getSecondBase()) ) {
nRefBases++;
if ( BaseUtils.basesAreEqual( e.getSecondBase(), (byte) snp ) ) {
nSecondBasesSupportingSNP++;
}
}
}
}
refAndSNPCounts.first+=nRefBases;
refAndSNPCounts.second+=nSecondBasesSupportingSNP;
return refAndSNPCounts;
}
public List<VCFInfoHeaderLine> getDescriptions() {
return Arrays.asList(new VCFInfoHeaderLine(KEY_NAME,
1, VCFHeaderLineType.Float,"Simple proportion of second best base calls for reference base that support the SNP base"));
}
}

View File

@ -1,114 +0,0 @@
/*
* Copyright (c) 2010 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.oneoffprojects.walkers.annotator;
import org.broad.tribble.util.variantcontext.VariantContext;
import org.broad.tribble.vcf.VCFHeaderLineType;
import org.broad.tribble.vcf.VCFInfoHeaderLine;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.InfoFieldAnnotation;
import org.broadinstitute.sting.utils.collections.Pair;
import org.broadinstitute.sting.utils.BaseUtils;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileup;
import org.broadinstitute.sting.utils.pileup.PileupElement;
import java.util.Map;
import java.util.HashMap;
import java.util.List;
import java.util.Arrays;
/**
* Created by IntelliJ IDEA.
* User: chartl
* Date: Dec 17, 2009
* Time: 2:42:05 PM
* To change this template use File | Settings | File Templates.
*/
public class ProportionOfSNPSecondBasesSupportingRef implements InfoFieldAnnotation {
public String KEY_NAME = "SNP_2B_SUPPORT_REF";
public boolean USE_MAPQ0_READS = false;
public String debug_file = "/humgen/gsa-scr1/chartl/temporary/ProportionOfRefSecondBasesSupportingSNP.debug.txt";
public List<String> getKeyNames() { return Arrays.asList(KEY_NAME); }
public boolean useZeroQualityReads() { return USE_MAPQ0_READS; }
public Map<String, Object> annotate(RefMetaDataTracker tracker, ReferenceContext ref, Map<String, AlignmentContext> context, VariantContext vc) {
if ( ! vc.isSNP() || ! vc.isBiallelic() )
return null;
Pair<Integer,Integer> totalAndSNPSupporting = new Pair<Integer,Integer>(0,0);
for ( String sample : context.keySet() ) {
ReadBackedPileup pileup = context.get(sample).getBasePileup();
totalAndSNPSupporting = getTotalSNPandRefSupporting(pileup, ref.getBaseAsChar(), vc.getAlternateAllele(0).toString().charAt(0), totalAndSNPSupporting);
}
if ( totalAndSNPSupporting.equals(new Pair<Integer,Integer>(0,0)) )
return null;
double p = getProportionOfSNPSecondaryBasesSupportingRef(totalAndSNPSupporting);
Map<String, Object> map = new HashMap<String, Object>();
map.put(getKeyNames().get(0), String.format("%f", p ));
return map;
}
public double getProportionOfSNPSecondaryBasesSupportingRef(Pair<Integer,Integer> tSNP_refSupport) {
return ( 1.0 + tSNP_refSupport.second) / (1.0 + tSNP_refSupport.first );
}
public Pair<Integer,Integer> getTotalSNPandRefSupporting(ReadBackedPileup p, char ref, char snp, Pair<Integer,Integer> SNPandRefCounts) {
int nSNPBases = 0;
int nSNPBasesSupportingRef = 0;
for (PileupElement e : p ) {
if ( BaseUtils.basesAreEqual( e.getBase(), (byte) snp ) ) {
if ( hasSecondBase(e) ) {
nSNPBases++;
if ( BaseUtils.basesAreEqual( e.getSecondBase(), (byte) ref ) ) {
nSNPBasesSupportingRef++;
}
}
}
}
SNPandRefCounts.first+=nSNPBases;
SNPandRefCounts.second+=nSNPBasesSupportingRef;
return SNPandRefCounts;
}
public boolean hasSecondBase(PileupElement e) {
return BaseUtils.isRegularBase(e.getSecondBase());
}
public List<VCFInfoHeaderLine> getDescriptions() {
return Arrays.asList(new VCFInfoHeaderLine(KEY_NAME,
1, VCFHeaderLineType.Float,"Simple proportion of second best base calls for SNP base that support the Ref base"));
}
}

View File

@ -3,7 +3,7 @@ package org.broadinstitute.sting.oneoffprojects.walkers.association;
import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.StratifiedAlignmentContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils;
import org.broadinstitute.sting.gatk.datasources.sample.Sample;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.utils.pileup.PileupElement;
@ -18,7 +18,7 @@ import java.util.*;
* Holds multiple map contexts for use in the regional association walker
*/
public class MapExtender {
static StratifiedAlignmentContext.StratifiedContextType TYPE = StratifiedAlignmentContext.StratifiedContextType.COMPLETE;
static AlignmentContextUtils.ReadOrientation TYPE = AlignmentContextUtils.ReadOrientation.COMPLETE;
// hold on to these -- atoms may want access to the tracker or other context types
private MapHolder previous = null;
private MapHolder current = null;
@ -39,7 +39,7 @@ public class MapExtender {
if ( current != null ) {
for ( Map.Entry<Sample,AlignmentContext> sac : current.getContext().entrySet() ) {
AlignmentContext context = StratifiedAlignmentContext.stratify(sac.getValue(), TYPE);
AlignmentContext context = AlignmentContextUtils.stratify(sac.getValue(), TYPE);
if ( context.hasBasePileup() ) {
fullPileup.put(sac.getKey(),context.getBasePileup());
} else if ( context.hasExtendedEventPileup() ) {

View File

@ -1,8 +1,8 @@
package org.broadinstitute.sting.oneoffprojects.walkers.association;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.StratifiedAlignmentContext;
import org.broadinstitute.sting.gatk.datasources.sample.Sample;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
@ -16,7 +16,7 @@ public class MapHolder {
public MapHolder(RefMetaDataTracker t, ReferenceContext r, AlignmentContext a) {
tracker = t;
ref = r;
alignments = StratifiedAlignmentContext.splitContextBySample(a);
alignments = AlignmentContextUtils.splitContextBySample(a);
}
public Map<Sample, AlignmentContext> getContext() {

View File

@ -306,29 +306,6 @@ public class BaseUtils {
}
}
public static byte getSecondBase(final SAMRecord read, int offset) {
byte base2 = '.'; // todo -- what should the default char really be?
if (read.getAttribute("SQ") != null) {
byte[] compressedQuals = (byte[]) read.getAttribute("SQ");
if (offset != -1 && compressedQuals != null && compressedQuals.length == read.getReadLength()) {
base2 = BaseUtils.baseIndexToSimpleBase(QualityUtils.compressedQualityToBaseIndex(compressedQuals[offset]));
}
}
else if (read.getAttribute("E2") != null) {
String secondaries = (String) read.getAttribute("E2");
if (offset != -1 && secondaries != null && secondaries.length() == read.getReadLength()) {
base2 = (byte)secondaries.charAt(offset);
}
}
else {
base2 = 'N';
}
return base2;
}
/**
* Return the complement (A <-> T or C <-> G) of a base, or the specified base if it can't be complemented (i.e. an ambiguous base).
*

View File

@ -128,26 +128,6 @@ public class QualityUtils {
return compressedQual;
}
/**
* Compress a base and a log probabiliy difference (-10log10(p3/p2)) into
* a single byte so that it can be output in a SAMRecord's SQ field.
*
* @param baseIndex the base index
* @param probdiff the log probability difference between the secondary and tertiary bases (-10log10(p3/p2))
* @return a byte containing the index and the log probability difference
*/
static public byte baseAndProbDiffToCompressedQuality(int baseIndex, double probdiff) {
byte compressedQual = 0;
compressedQual = (byte) baseIndex;
byte cprob = (byte) probdiff;
byte qualmask = (byte) 252;
compressedQual += ((cprob << 2) & qualmask);
return compressedQual;
}
/**
* From a compressed base, extract the base index (0:A, 1:C, 2:G, 3:T)
*
@ -173,21 +153,6 @@ public class QualityUtils {
return ((double) x2)/100.0;
}
/**
* From a compressed base, extract the log probability difference between the secondary and tertiary bases.
*
* @param compressedQual the compressed quality score, as returned by baseAndProbDiffToCompressedQuality
* @return the log probability difference (-10log10(p3/p2))
*/
static public double compressedQualityToProbDiff(byte compressedQual) {
// Because java natives are signed, extra care must be taken to avoid
// shifting a 1 into the sign bit in the implicit promotion of 2 to an int.
int x2 = ((int) compressedQual) & 0xff;
x2 = (x2 >>> 2);
return ((double) x2);
}
/**
* Return the complement of a compressed quality
*

View File

@ -725,33 +725,6 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
return loc;
}
/**
* Somewhat expensive routine that returns true if any base in the pileup has secondary bases annotated
* @return
*/
@Override
public boolean hasSecondaryBases() {
if(pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker;
boolean hasSecondaryBases = false;
for(Sample sample: tracker.getSamples()) {
hasSecondaryBases |= createNewPileup(loc,tracker.getElements(sample)).hasSecondaryBases();
}
return hasSecondaryBases;
}
else {
for ( PileupElement pile : this ) {
// skip deletion sites
if ( ! pile.isDeletion() && BaseUtils.isRegularBase((char)pile.getSecondBase()) )
return true;
}
}
return false;
}
/**
* Get counts of A, C, G, T in order, which returns a int[4] vector with counts according
* to BaseUtils.simpleBaseToBaseIndex for each base.
@ -835,18 +808,6 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
return v;
}
/**
* Returns an array of the secondary bases in this pileup. Note this call costs O(n) and allocates fresh array each time
* @return
*/
@Override
public byte[] getSecondaryBases() {
byte[] v = new byte[size()];
int pos = 0;
for ( PileupElement pile : pileupElementTracker ) { v[pos++] = pile.getSecondBase(); }
return v;
}
/**
* Returns an array of the quals in this pileup. Note this call costs O(n) and allocates fresh array each time
* @return

View File

@ -103,11 +103,6 @@ public class ExtendedEventPileupElement extends PileupElement {
return getBaseIndex(offset >= 0 ? offset : offset+eventLength);
}
@Override
public byte getSecondBase() {
return getSecondBase(offset >= 0 ? offset : offset+eventLength);
}
@Override
public byte getQual() {
return getQual(offset >= 0 ? offset : offset+eventLength);

View File

@ -43,10 +43,6 @@ public class PileupElement {
return getBaseIndex(offset);
}
public byte getSecondBase() {
return getSecondBase(offset);
}
public byte getQual() {
return getQual(offset);
}
@ -65,10 +61,6 @@ public class PileupElement {
return isDeletion() ? DELETION_BASE : BaseUtils.simpleBaseToBaseIndex((char)read.getReadBases()[offset]);
}
protected byte getSecondBase(final int offset) {
return isDeletion() ? DELETION_BASE : BaseUtils.getSecondBase(read, offset);
}
protected byte getQual(final int offset) {
return isDeletion() ? DELETION_QUAL : read.getBaseQualities()[offset];
}

View File

@ -183,12 +183,6 @@ public interface ReadBackedPileup extends Iterable<PileupElement>, HasGenomeLoca
*/
public int[] getBaseCounts();
/**
* Somewhat expensive routine that returns true if any base in the pileup has secondary bases annotated
* @return
*/
public boolean hasSecondaryBases();
public String getPileupString(Character ref);
/**
@ -209,12 +203,6 @@ public interface ReadBackedPileup extends Iterable<PileupElement>, HasGenomeLoca
*/
public byte[] getBases();
/**
* Returns an array of the secondary bases in this pileup. Note this call costs O(n) and allocates fresh array each time
* @return
*/
public byte[] getSecondaryBases();
/**
* Returns an array of the quals in this pileup. Note this call costs O(n) and allocates fresh array each time
* @return