diff --git a/ivy.xml b/ivy.xml index b7ca65406..1802c1627 100644 --- a/ivy.xml +++ b/ivy.xml @@ -61,6 +61,7 @@ + @@ -82,7 +83,7 @@ - + diff --git a/protected/java/src/org/broadinstitute/sting/gatk/downsampling/AlleleBiasedDownsamplingUtils.java b/protected/java/src/org/broadinstitute/sting/gatk/downsampling/AlleleBiasedDownsamplingUtils.java index a7bb58d0c..02821ab50 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/downsampling/AlleleBiasedDownsamplingUtils.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/downsampling/AlleleBiasedDownsamplingUtils.java @@ -47,7 +47,6 @@ package org.broadinstitute.sting.gatk.downsampling; import net.sf.samtools.SAMReadGroupRecord; -import net.sf.samtools.SAMRecord; import org.broadinstitute.sting.utils.*; import org.broadinstitute.sting.utils.pileup.*; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; @@ -78,30 +77,15 @@ public class AlleleBiasedDownsamplingUtils { for ( int i = 0; i < 4; i++ ) alleleStratifiedElements[i] = new ArrayList(); - // keep all of the reduced reads - final ArrayList reducedReadPileups = new ArrayList(); - // start by stratifying the reads by the alleles they represent at this position - for( final PileupElement pe : pileup ) { + for ( final PileupElement pe : pileup ) { // we do not want to remove a reduced read - if ( pe.getRead().isReducedRead() ) - reducedReadPileups.add(pe); - - final int baseIndex = BaseUtils.simpleBaseToBaseIndex(pe.getBase()); - if ( baseIndex != -1 ) - alleleStratifiedElements[baseIndex].add(pe); - } - - // Unfortunately, we need to maintain the original pileup ordering of reads or FragmentUtils will complain later. - int numReadsToRemove = (int)(pileup.getNumberOfElements() * downsamplingFraction); // floor - final TreeSet elementsToKeep = new TreeSet(new Comparator() { - @Override - public int compare(PileupElement element1, PileupElement element2) { - final int difference = element1.getRead().getAlignmentStart() - element2.getRead().getAlignmentStart(); - return difference != 0 ? difference : element1.getRead().getReadName().compareTo(element2.getRead().getReadName()); + if ( !pe.getRead().isReducedRead() ) { + final int baseIndex = BaseUtils.simpleBaseToBaseIndex(pe.getBase()); + if ( baseIndex != -1 ) + alleleStratifiedElements[baseIndex].add(pe); } - }); - elementsToKeep.addAll(reducedReadPileups); + } // make a listing of allele counts final int[] alleleCounts = new int[4]; @@ -109,22 +93,30 @@ public class AlleleBiasedDownsamplingUtils { alleleCounts[i] = alleleStratifiedElements[i].size(); // do smart down-sampling + int numReadsToRemove = (int)(pileup.getNumberOfElements() * downsamplingFraction); // floor final int[] targetAlleleCounts = runSmartDownsampling(alleleCounts, numReadsToRemove); + final HashSet readsToRemove = new HashSet(numReadsToRemove); for ( int i = 0; i < 4; i++ ) { final ArrayList alleleList = alleleStratifiedElements[i]; - // if we don't need to remove any reads, keep them all - if ( alleleList.size() <= targetAlleleCounts[i] ) - elementsToKeep.addAll(alleleList); - else - elementsToKeep.addAll(downsampleElements(alleleList, alleleList.size() - targetAlleleCounts[i], log)); + // if we don't need to remove any reads, then don't + if ( alleleList.size() > targetAlleleCounts[i] ) + readsToRemove.addAll(downsampleElements(alleleList, alleleList.size() - targetAlleleCounts[i], log)); } // clean up pointers so memory can be garbage collected if needed for ( int i = 0; i < 4; i++ ) alleleStratifiedElements[i].clear(); - return new ReadBackedPileupImpl(pileup.getLocation(), new ArrayList(elementsToKeep)); + // we need to keep the reads sorted because the FragmentUtils code will expect them in coordinate order and will fail otherwise + final List readsToKeep = new ArrayList(pileup.getNumberOfElements() - numReadsToRemove); + for ( final PileupElement pe : pileup ) { + if ( !readsToRemove.contains(pe) ) { + readsToKeep.add(pe); + } + } + + return new ReadBackedPileupImpl(pileup.getLocation(), new ArrayList(readsToKeep)); } private static int scoreAlleleCounts(final int[] alleleCounts) { @@ -188,37 +180,43 @@ public class AlleleBiasedDownsamplingUtils { } /** - * Performs allele biased down-sampling on a pileup and computes the list of elements to keep + * Performs allele biased down-sampling on a pileup and computes the list of elements to remove * * @param elements original list of records * @param numElementsToRemove the number of records to remove * @param log logging output - * @return the list of pileup elements TO KEEP + * @return the list of pileup elements TO REMOVE */ - private static List downsampleElements(final ArrayList elements, final int numElementsToRemove, final PrintStream log) { - if ( numElementsToRemove == 0 ) - return elements; + private static List downsampleElements(final List elements, final int numElementsToRemove, final PrintStream log) { + ArrayList elementsToRemove = new ArrayList(numElementsToRemove); + // are there no elements to remove? + if ( numElementsToRemove == 0 ) + return elementsToRemove; + + // should we remove all of the elements? final int pileupSize = elements.size(); if ( numElementsToRemove == pileupSize ) { logAllElements(elements, log); - return new ArrayList(0); + elementsToRemove.addAll(elements); + return elementsToRemove; } + // create a bitset describing which elements to remove final BitSet itemsToRemove = new BitSet(pileupSize); for ( Integer selectedIndex : MathUtils.sampleIndicesWithoutReplacement(pileupSize, numElementsToRemove) ) { itemsToRemove.set(selectedIndex); } - ArrayList elementsToKeep = new ArrayList(pileupSize - numElementsToRemove); for ( int i = 0; i < pileupSize; i++ ) { - if ( itemsToRemove.get(i) ) - logRead(elements.get(i).getRead(), log); - else - elementsToKeep.add(elements.get(i)); + if ( itemsToRemove.get(i) ) { + final T element = elements.get(i); + logElement(element, log); + elementsToRemove.add(element); + } } - return elementsToKeep; + return elementsToRemove; } /** @@ -252,65 +250,30 @@ public class AlleleBiasedDownsamplingUtils { final List alleleBin = alleleReadMap.get(alleles.get(i)); if ( alleleBin.size() > targetAlleleCounts[i] ) { - readsToRemove.addAll(downsampleReads(alleleBin, alleleBin.size() - targetAlleleCounts[i], log)); + readsToRemove.addAll(downsampleElements(alleleBin, alleleBin.size() - targetAlleleCounts[i], log)); } } return readsToRemove; } - /** - * Performs allele biased down-sampling on a pileup and computes the list of elements to remove - * - * @param reads original list of records - * @param numElementsToRemove the number of records to remove - * @param log logging output - * @return the list of pileup elements TO REMOVE - */ - private static List downsampleReads(final List reads, final int numElementsToRemove, final PrintStream log) { - final ArrayList readsToRemove = new ArrayList(numElementsToRemove); - - if ( numElementsToRemove == 0 ) - return readsToRemove; - - final int pileupSize = reads.size(); - if ( numElementsToRemove == pileupSize ) { - logAllReads(reads, log); - return reads; - } - - final BitSet itemsToRemove = new BitSet(pileupSize); - for ( Integer selectedIndex : MathUtils.sampleIndicesWithoutReplacement(pileupSize, numElementsToRemove) ) { - itemsToRemove.set(selectedIndex); - } - - for ( int i = 0; i < pileupSize; i++ ) { - if ( itemsToRemove.get(i) ) { - final GATKSAMRecord read = reads.get(i); - readsToRemove.add(read); - logRead(read, log); + private static void logAllElements(final List elements, final PrintStream log) { + if ( log != null ) { + for ( final T obj : elements ) { + logElement(obj, log); } } - - return readsToRemove; } - private static void logAllElements(final List elements, final PrintStream log) { + private static void logElement(final T obj, final PrintStream log) { if ( log != null ) { - for ( final PileupElement p : elements ) - logRead(p.getRead(), log); - } - } - private static void logAllReads(final List reads, final PrintStream log) { - if ( log != null ) { - for ( final GATKSAMRecord read : reads ) - logRead(read, log); - } - } + final GATKSAMRecord read; + if ( obj instanceof PileupElement ) + read = ((PileupElement)obj).getRead(); + else + read = (GATKSAMRecord)obj; - private static void logRead(final SAMRecord read, final PrintStream log) { - if ( log != null ) { final SAMReadGroupRecord readGroup = read.getReadGroup(); log.println(String.format("%s\t%s\t%s\t%s", read.getReadName(), readGroup.getSample(), readGroup.getLibrary(), readGroup.getPlatformUnit())); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthOfCoverage.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthOfCoverage.java index aeec36c18..4adb2ca71 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthOfCoverage.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthOfCoverage.java @@ -99,7 +99,7 @@ public class DepthOfCoverage extends InfoFieldAnnotation implements StandardAnno for (PerReadAlleleLikelihoodMap maps : perReadAlleleLikelihoodMap.values() ) { for (Map.Entry> el : maps.getLikelihoodReadMap().entrySet()) { final GATKSAMRecord read = el.getKey(); - depth += (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinate(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1); + depth += (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1); } } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java index a194fe323..5acea12f6 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java @@ -144,7 +144,7 @@ public class DepthPerAlleleBySample extends GenotypeAnnotation implements Standa continue; // read is non-informative if (!vc.getAlleles().contains(a)) continue; // sanity check - shouldn't be needed - alleleCounts.put(a, alleleCounts.get(a) + (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinate(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1)); + alleleCounts.put(a, alleleCounts.get(a) + (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1)); } final int[] counts = new int[alleleCounts.size()]; counts[0] = alleleCounts.get(vc.getReference()); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java index 167e5df63..ff3d7940f 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java @@ -116,8 +116,8 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat else if (table1 == null) return annotationForOneTable(pValueForContingencyTable(table2)); else { // take the one with the best (i.e., least significant pvalue) - double pvalue1 = Math.max(pValueForContingencyTable(table1), MIN_PVALUE); - double pvalue2 = Math.max(pValueForContingencyTable(table2), MIN_PVALUE); + double pvalue1 = pValueForContingencyTable(table1); + double pvalue2 = pValueForContingencyTable(table2); return annotationForOneTable(Math.max(pvalue1, pvalue2)); } } @@ -129,7 +129,7 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat * @return a hash map from FS -> phred-scaled pValue */ private Map annotationForOneTable(final double pValue) { - final Object value = String.format("%.3f", QualityUtils.phredScaleErrorRate(pValue)); + final Object value = String.format("%.3f", QualityUtils.phredScaleErrorRate(Math.max(pValue, MIN_PVALUE))); // prevent INFINITYs return Collections.singletonMap(FS, value); // Map map = new HashMap(); // map.put(FS, String.format("%.3f", QualityUtils.phredScaleErrorRate(pValue))); @@ -265,24 +265,16 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat for (PerReadAlleleLikelihoodMap maps : stratifiedPerReadAlleleLikelihoodMap.values() ) { for (Map.Entry> el : maps.getLikelihoodReadMap().entrySet()) { - final boolean matchesRef = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()).equals(ref,true); - final boolean matchesAlt = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()).equals(alt,true); - - if ( !matchesRef && !matchesAlt ) - continue; - - boolean isFW = el.getKey().getReadNegativeStrandFlag(); - - int row = matchesRef ? 0 : 1; - int column = isFW ? 0 : 1; - + final Allele mostLikelyAllele = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); final GATKSAMRecord read = el.getKey(); - table[row][column] += (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinate(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1); + final int representativeCount = read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1; + updateTable(table, mostLikelyAllele, read, ref, alt, representativeCount); } } return table; } + /** Allocate and fill a 2x2 strand contingency table. In the end, it'll look something like this: * fw rc @@ -299,31 +291,36 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat for ( Map.Entry sample : stratifiedContexts.entrySet() ) { for (PileupElement p : sample.getValue().getBasePileup()) { - // ignore reduced reads because they are always on the forward strand! - // TODO -- when het compression is enabled in RR, we somehow need to allow those reads through into the Fisher test - if ( p.getRead().isReducedRead() ) - continue; - if ( ! RankSumTest.isUsableBase(p, false) ) // ignore deletions continue; if ( p.getQual() < minQScoreToConsider || p.getMappingQual() < minQScoreToConsider ) continue; - final Allele base = Allele.create(p.getBase(), false); - final boolean isFW = !p.getRead().getReadNegativeStrandFlag(); - - final boolean matchesRef = ref.equals(base, true); - final boolean matchesAlt = alt.equals(base, true); - if ( matchesRef || matchesAlt ) { - int row = matchesRef ? 0 : 1; - int column = isFW ? 0 : 1; - - table[row][column] += p.getRepresentativeCount(); - } + updateTable(table, Allele.create(p.getBase(), false), p.getRead(), ref, alt, p.getRepresentativeCount()); } } return table; } + + private static void updateTable(final int[][] table, final Allele allele, final GATKSAMRecord read, final Allele ref, final Allele alt, final int representativeCount) { + // ignore reduced reads because they are always on the forward strand! + // TODO -- when het compression is enabled in RR, we somehow need to allow those reads through into the Fisher test + if ( read.isReducedRead() ) + return; + + final boolean matchesRef = allele.equals(ref, true); + final boolean matchesAlt = allele.equals(alt, true); + + if ( matchesRef || matchesAlt ) { + + final boolean isFW = !read.getReadNegativeStrandFlag(); + + int row = matchesRef ? 0 : 1; + int column = isFW ? 0 : 1; + + table[row][column] += representativeCount; + } + } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/GCContent.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/GCContent.java index 3bb3d7d5a..2b3290595 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/GCContent.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/GCContent.java @@ -95,9 +95,9 @@ public class GCContent extends InfoFieldAnnotation implements ExperimentalAnnota for ( byte base : ref.getBases() ) { int baseIndex = BaseUtils.simpleBaseToBaseIndex(base); - if ( baseIndex == BaseUtils.gIndex || baseIndex == BaseUtils.cIndex ) + if ( baseIndex == BaseUtils.Base.G.ordinal() || baseIndex == BaseUtils.Base.C.ordinal() ) gc++; - else if ( baseIndex == BaseUtils.aIndex || baseIndex == BaseUtils.tIndex ) + else if ( baseIndex == BaseUtils.Base.A.ordinal() || baseIndex == BaseUtils.Base.T.ordinal() ) at++; else ; // ignore diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/HaplotypeScore.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/HaplotypeScore.java index fe4075117..3acba48ae 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/HaplotypeScore.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/HaplotypeScore.java @@ -56,7 +56,6 @@ import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.InfoFieldAnnot import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.variant.utils.BaseUtils; -import org.broadinstitute.sting.utils.Haplotype; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.QualityUtils; import org.broadinstitute.variant.vcf.VCFHeaderLineType; @@ -236,8 +235,8 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot final byte[] haplotypeBases = new byte[contextSize]; Arrays.fill(haplotypeBases, (byte) REGEXP_WILDCARD); - final double[] baseQualities = new double[contextSize]; - Arrays.fill(baseQualities, 0.0); + final byte[] baseQualities = new byte[contextSize]; + Arrays.fill(baseQualities, (byte)0); byte[] readBases = read.getReadBases(); readBases = AlignmentUtils.readToAlignmentByteArray(read.getCigar(), readBases); // Adjust the read bases based on the Cigar string @@ -267,7 +266,7 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot readQuals[baseOffset] = (byte) 0; } // quals less than 5 are used as codes and don't have actual probabilistic meaning behind them haplotypeBases[i] = readBases[baseOffset]; - baseQualities[i] = (double) readQuals[baseOffset]; + baseQualities[i] = readQuals[baseOffset]; } return new Haplotype(haplotypeBases, baseQualities); @@ -286,10 +285,10 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot final int length = a.length; final byte[] consensusChars = new byte[length]; - final double[] consensusQuals = new double[length]; + final int[] consensusQuals = new int[length]; - final double[] qualsA = haplotypeA.getQuals(); - final double[] qualsB = haplotypeB.getQuals(); + final int[] qualsA = haplotypeA.getQuals(); + final int[] qualsB = haplotypeB.getQuals(); for (int i = 0; i < length; i++) { chA = a[i]; @@ -300,7 +299,7 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot if ((chA == wc) && (chB == wc)) { consensusChars[i] = wc; - consensusQuals[i] = 0.0; + consensusQuals[i] = 0; } else if ((chA == wc)) { consensusChars[i] = chB; consensusQuals[i] = qualsB[i]; @@ -433,7 +432,6 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot } - public List getKeyNames() { return Arrays.asList("HaplotypeScore"); } @@ -441,4 +439,46 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("HaplotypeScore", 1, VCFHeaderLineType.Float, "Consistency of the site with at most two segregating haplotypes")); } + + private static class Haplotype { + private final byte[] bases; + private final int[] quals; + private int qualitySum = -1; + + public Haplotype( final byte[] bases, final int[] quals ) { + this.bases = bases; + this.quals = quals; + } + + public Haplotype( final byte[] bases, final int qual ) { + this.bases = bases; + quals = new int[bases.length]; + Arrays.fill(quals, qual); + } + + public Haplotype( final byte[] bases, final byte[] quals ) { + this.bases = bases; + this.quals = new int[quals.length]; + for ( int i = 0 ; i < quals.length; i++ ) + this.quals[i] = (int)quals[i]; + } + + public double getQualitySum() { + if ( qualitySum == -1 ) { + qualitySum = 0; + for ( final int qual : quals ) { + qualitySum += qual; + } + } + return qualitySum; + } + + public int[] getQuals() { + return quals.clone(); + } + + public byte[] getBases() { + return bases.clone(); + } + } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java index 959a26fba..ec107512a 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java @@ -169,8 +169,7 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements ActiveR * @return true if this base is part of a meaningful read for comparison, false otherwise */ public static boolean isUsableBase(final PileupElement p, final boolean allowDeletions) { - return !(p.isInsertionAtBeginningOfRead() || - (! allowDeletions && p.isDeletion()) || + return !((! allowDeletions && p.isDeletion()) || p.getMappingQual() == 0 || p.getMappingQual() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE || ((int) p.getQual()) < QualityUtils.MIN_USABLE_Q_SCORE); // need the unBAQed quality score here diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java index 99dadea54..f03a25c04 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java @@ -52,6 +52,7 @@ import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.*; +import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.variant.vcf.*; import org.broadinstitute.sting.utils.exceptions.UserException; @@ -214,10 +215,10 @@ public class VariantAnnotatorEngine { Map infoAnnotations = new LinkedHashMap(vc.getAttributes()); // annotate db occurrences - vc = annotateDBs(tracker, ref, vc, infoAnnotations); + vc = annotateDBs(tracker, ref.getLocus(), vc, infoAnnotations); // annotate expressions where available - annotateExpressions(tracker, ref, infoAnnotations); + annotateExpressions(tracker, ref.getLocus(), infoAnnotations); // go through all the requested info annotationTypes for ( InfoFieldAnnotation annotationType : requestedInfoAnnotations ) { @@ -254,10 +255,22 @@ public class VariantAnnotatorEngine { return builder.genotypes(annotateGenotypes(null, null, null, vc, perReadAlleleLikelihoodMap)).make(); } - private VariantContext annotateDBs(RefMetaDataTracker tracker, ReferenceContext ref, VariantContext vc, Map infoAnnotations) { + public VariantContext annotateDBs(final RefMetaDataTracker tracker, final GenomeLoc loc, VariantContext vc) { + final Map newInfoAnnotations = new HashMap(0); + vc = annotateDBs(tracker, loc, vc, newInfoAnnotations); + + if ( !newInfoAnnotations.isEmpty() ) { + final VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(newInfoAnnotations); + vc = builder.make(); + } + + return vc; + } + + private VariantContext annotateDBs(final RefMetaDataTracker tracker, final GenomeLoc loc, VariantContext vc, final Map infoAnnotations) { for ( Map.Entry, String> dbSet : dbAnnotations.entrySet() ) { if ( dbSet.getValue().equals(VCFConstants.DBSNP_KEY) ) { - final String rsID = VCFUtils.rsIDOfFirstRealVariant(tracker.getValues(dbSet.getKey(), ref.getLocus()), vc.getType()); + final String rsID = VCFUtils.rsIDOfFirstRealVariant(tracker.getValues(dbSet.getKey(), loc), vc.getType()); // add the ID if appropriate if ( rsID != null ) { @@ -273,7 +286,7 @@ public class VariantAnnotatorEngine { } } else { boolean overlapsComp = false; - for ( VariantContext comp : tracker.getValues(dbSet.getKey(), ref.getLocus()) ) { + for ( VariantContext comp : tracker.getValues(dbSet.getKey(), loc) ) { if ( !comp.isFiltered() && ( !requireStrictAlleleMatch || comp.getAlleles().equals(vc.getAlleles()) ) ) { overlapsComp = true; break; @@ -287,9 +300,9 @@ public class VariantAnnotatorEngine { return vc; } - private void annotateExpressions(RefMetaDataTracker tracker, ReferenceContext ref, Map infoAnnotations) { + private void annotateExpressions(final RefMetaDataTracker tracker, final GenomeLoc loc, final Map infoAnnotations) { for ( VAExpression expression : requestedExpressions ) { - Collection VCs = tracker.getValues(expression.binding, ref.getLocus()); + Collection VCs = tracker.getValues(expression.binding, loc); if ( VCs.size() == 0 ) continue; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/coverage/DepthOfCoverage.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/coverage/DepthOfCoverage.java index 1e4c55e0d..b10daab58 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/coverage/DepthOfCoverage.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/coverage/DepthOfCoverage.java @@ -938,7 +938,7 @@ public class DepthOfCoverage extends LocusWalker contexts, AlignmentContextUtils.ReadOrientation contextType) { final Map consensusIndelStrings = countConsensusAlleles(ref, contexts, contextType); -// logger.info("Alleles at " + ref.getLocus()); -// for ( Map.Entry elt : consensusIndelStrings.entrySet() ) { -// logger.info(" " + elt.getValue() + " => " + elt.getKey()); -// } return consensusCountsToAlleles(ref, consensusIndelStrings); } @@ -138,14 +134,9 @@ public class ConsensusAlleleCounter { final int nReadsOverall = indelPileup.getNumberOfElements(); if ( nIndelReads == 0 || (nIndelReads / (1.0 * nReadsOverall)) < minFractionInOneSample ) { -// if ( nIndelReads > 0 ) -// logger.info("Skipping sample " + sample.getKey() + " with nIndelReads " + nIndelReads + " nReads " + nReadsOverall); continue; -// } else { -// logger.info("### Keeping sample " + sample.getKey() + " with nIndelReads " + nIndelReads + " nReads " + nReadsOverall); } - for (PileupElement p : indelPileup) { final GATKSAMRecord read = ReadClipper.hardClipAdaptorSequence(p.getRead()); if (read == null) @@ -154,17 +145,10 @@ public class ConsensusAlleleCounter { continue; } -/* if (DEBUG && p.isIndel()) { - System.out.format("Read: %s, cigar: %s, aln start: %d, aln end: %d, p.len:%d, Type:%s, EventBases:%s\n", - read.getReadName(),read.getCigar().toString(),read.getAlignmentStart(),read.getAlignmentEnd(), - p.getEventLength(),p.getType().toString(), p.getEventBases()); - } - */ - String indelString = p.getEventBases(); - if ( p.isBeforeInsertion() ) { - // edge case: ignore a deletion immediately preceding an insertion as p.getEventBases() returns null [EB] - if ( indelString == null ) + final String insertionBases = p.getBasesOfImmediatelyFollowingInsertion(); + // edge case: ignore a deletion immediately preceding an insertion as p.getBasesOfImmediatelyFollowingInsertion() returns null [EB] + if ( insertionBases == null ) continue; boolean foundKey = false; @@ -182,20 +166,20 @@ public class ConsensusAlleleCounter { String s = cList.get(k).getFirst(); int cnt = cList.get(k).getSecond(); // case 1: current insertion is prefix of indel in hash map - if (s.startsWith(indelString)) { + if (s.startsWith(insertionBases)) { cList.set(k,new Pair(s,cnt+1)); foundKey = true; } - else if (indelString.startsWith(s)) { + else if (insertionBases.startsWith(s)) { // case 2: indel stored in hash table is prefix of current insertion // In this case, new bases are new key. foundKey = true; - cList.set(k,new Pair(indelString,cnt+1)); + cList.set(k,new Pair(insertionBases,cnt+1)); } } if (!foundKey) // none of the above: event bases not supported by previous table, so add new key - cList.add(new Pair(indelString,1)); + cList.add(new Pair(insertionBases,1)); } else if (read.getAlignmentStart() == loc.getStart()+1) { @@ -203,28 +187,28 @@ public class ConsensusAlleleCounter { for (int k=0; k < cList.size(); k++) { String s = cList.get(k).getFirst(); int cnt = cList.get(k).getSecond(); - if (s.endsWith(indelString)) { + if (s.endsWith(insertionBases)) { // case 1: current insertion (indelString) is suffix of indel in hash map (s) cList.set(k,new Pair(s,cnt+1)); foundKey = true; } - else if (indelString.endsWith(s)) { + else if (insertionBases.endsWith(s)) { // case 2: indel stored in hash table is prefix of current insertion // In this case, new bases are new key. foundKey = true; - cList.set(k,new Pair(indelString,cnt+1)); + cList.set(k,new Pair(insertionBases,cnt+1)); } } if (!foundKey) // none of the above: event bases not supported by previous table, so add new key - cList.add(new Pair(indelString,1)); + cList.add(new Pair(insertionBases,1)); } else { // normal case: insertion somewhere in the middle of a read: add count to arrayList - int cnt = consensusIndelStrings.containsKey(indelString)? consensusIndelStrings.get(indelString):0; - cList.add(new Pair(indelString,cnt+1)); + int cnt = consensusIndelStrings.containsKey(insertionBases)? consensusIndelStrings.get(insertionBases):0; + cList.add(new Pair(insertionBases,cnt+1)); } // copy back arrayList into hashMap @@ -234,11 +218,10 @@ public class ConsensusAlleleCounter { } } - else if ( p.isBeforeDeletedBase() ) { - indelString = String.format("D%d",p.getEventLength()); - int cnt = consensusIndelStrings.containsKey(indelString)? consensusIndelStrings.get(indelString):0; - consensusIndelStrings.put(indelString,cnt+1); - + else if ( p.isBeforeDeletionStart() ) { + final String deletionString = String.format("D%d",p.getLengthOfImmediatelyFollowingIndel()); + int cnt = consensusIndelStrings.containsKey(deletionString)? consensusIndelStrings.get(deletionString):0; + consensusIndelStrings.put(deletionString,cnt+1); } } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/DiploidSNPGenotypeLikelihoods.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/DiploidSNPGenotypeLikelihoods.java index fc7573f21..8c8de2bad 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/DiploidSNPGenotypeLikelihoods.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/DiploidSNPGenotypeLikelihoods.java @@ -425,7 +425,7 @@ public class DiploidSNPGenotypeLikelihoods implements Cloneable { byte qual = p.getQual(); if ( qual > SAMUtils.MAX_PHRED_SCORE ) - throw new UserException.MalformedBAM(p.getRead(), String.format("the maximum allowed quality score is %d, but a quality of %d was observed in read %s. Perhaps your BAM incorrectly encodes the quality scores in Sanger format; see http://en.wikipedia.org/wiki/FASTQ_format for more details", SAMUtils.MAX_PHRED_SCORE, qual, p.getRead().getReadName())); + throw new UserException.MisencodedBAM(p.getRead(), "we encountered an extremely high quality score (" + (int)qual + ")"); if ( capBaseQualsAtMappingQual ) qual = (byte)Math.min((int)qual, p.getMappingQual()); if ( (int)qual < minBaseQual ) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/ErrorModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/ErrorModel.java index 12af7839a..1b004d889 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/ErrorModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/ErrorModel.java @@ -214,7 +214,7 @@ public class ErrorModel { if (DEBUG) System.out.format("PE: base:%s isNextToDel:%b isNextToIns:%b eventBases:%s eventLength:%d Allele:%s RefAllele:%s\n", pileupElement.getBase(), pileupElement.isBeforeDeletionStart(), - pileupElement.isBeforeInsertion(),pileupElement.getEventBases(),pileupElement.getEventLength(), allele.toString(), refAllele.toString()); + pileupElement.isBeforeInsertion(),pileupElement.getBasesOfImmediatelyFollowingInsertion(),pileupElement.getLengthOfImmediatelyFollowingIndel(), allele.toString(), refAllele.toString()); //pileupElement. // if test allele is ref, any base mismatch, or any insertion/deletion at start of pileup count as mismatch @@ -238,11 +238,11 @@ public class ErrorModel { // for non-ref alleles, byte[] alleleBases = allele.getBases(); int eventLength = alleleBases.length - refAllele.getBases().length; - if (eventLength < 0 && pileupElement.isBeforeDeletionStart() && pileupElement.getEventLength() == -eventLength) + if (eventLength < 0 && pileupElement.isBeforeDeletionStart() && pileupElement.getLengthOfImmediatelyFollowingIndel() == -eventLength) return true; if (eventLength > 0 && pileupElement.isBeforeInsertion() && - Arrays.equals(pileupElement.getEventBases().getBytes(),Arrays.copyOfRange(alleleBases,1,alleleBases.length))) // allele contains ref byte, but pileupElement's event bases doesn't + Arrays.equals(pileupElement.getBasesOfImmediatelyFollowingInsertion().getBytes(),Arrays.copyOfRange(alleleBases,1,alleleBases.length))) // allele contains ref byte, but pileupElement's event bases doesn't return true; return false; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidyIndelGenotypeLikelihoods.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidyIndelGenotypeLikelihoods.java index 7bbe470f8..c957bb9db 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidyIndelGenotypeLikelihoods.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidyIndelGenotypeLikelihoods.java @@ -210,7 +210,7 @@ public class GeneralPloidyIndelGenotypeLikelihoods extends GeneralPloidyGenotype // count number of elements in pileup for (PileupElement elt : pileup) { if (VERBOSE) - System.out.format("base:%s isNextToDel:%b isNextToIns:%b eventBases:%s eventLength:%d\n",elt.getBase(), elt.isBeforeDeletionStart(),elt.isBeforeInsertion(),elt.getEventBases(),elt.getEventLength()); + System.out.format("base:%s isNextToDel:%b isNextToIns:%b eventBases:%s eventLength:%d\n",elt.getBase(), elt.isBeforeDeletionStart(),elt.isBeforeInsertion(),elt.getBasesOfImmediatelyFollowingInsertion(),elt.getLengthOfImmediatelyFollowingIndel()); int idx =0; for (Allele allele : alleles) { int cnt = numSeenBases.get(idx); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidySNPGenotypeLikelihoods.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidySNPGenotypeLikelihoods.java index edae18a16..aa117eb3b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidySNPGenotypeLikelihoods.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidySNPGenotypeLikelihoods.java @@ -323,22 +323,12 @@ public class GeneralPloidySNPGenotypeLikelihoods extends GeneralPloidyGenotypeLi public ReadBackedPileup createBAQedPileup( final ReadBackedPileup pileup ) { final List BAQedElements = new ArrayList(); for( final PileupElement PE : pileup ) { - final PileupElement newPE = new BAQedPileupElement( PE ); + final PileupElement newPE = new SNPGenotypeLikelihoodsCalculationModel.BAQedPileupElement( PE ); BAQedElements.add( newPE ); } return new ReadBackedPileupImpl( pileup.getLocation(), BAQedElements ); } - public class BAQedPileupElement extends PileupElement { - public BAQedPileupElement( final PileupElement PE ) { - super(PE.getRead(), PE.getOffset(), PE.isDeletion(), PE.isBeforeDeletedBase(), PE.isAfterDeletedBase(), PE.isBeforeInsertion(), PE.isAfterInsertion(), PE.isNextToSoftClip()); - } - - @Override - public byte getQual( final int offset ) { return BAQ.calcBAQFromTag(getRead(), offset, true); } - } - - /** * Helper function that returns the phred-scaled base quality score we should use for calculating * likelihoods for a pileup element. May return 0 to indicate that the observation is bad, and may diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java index 86000f236..84c109c9d 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java @@ -252,7 +252,7 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood protected int getFilteredDepth(ReadBackedPileup pileup) { int count = 0; for (PileupElement p : pileup) { - if (p.isDeletion() || p.isInsertionAtBeginningOfRead() || BaseUtils.isRegularBase(p.getBase())) + if (p.isDeletion() || BaseUtils.isRegularBase(p.getBase())) count += p.getRepresentativeCount(); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java index c1b790559..7dc3e8ee3 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java @@ -237,11 +237,16 @@ public class SNPGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoodsC public static class BAQedPileupElement extends PileupElement { public BAQedPileupElement( final PileupElement PE ) { - super(PE.getRead(), PE.getOffset(), PE.isDeletion(), PE.isBeforeDeletedBase(), PE.isAfterDeletedBase(), PE.isBeforeInsertion(), PE.isAfterInsertion(), PE.isNextToSoftClip()); + super(PE); } @Override - public byte getQual( final int offset ) { return BAQ.calcBAQFromTag(getRead(), offset, true); } + public byte getQual() { + if ( isDeletion() ) + return super.getQual(); + else + return BAQ.calcBAQFromTag(getRead(), offset, true); + } } private static class SampleGenotypeData { diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index 96f327631..26f2560b7 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -47,6 +47,7 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import com.google.java.contract.Ensures; +import net.sf.samtools.*; import org.broadinstitute.sting.commandline.*; import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; @@ -55,7 +56,9 @@ import org.broadinstitute.sting.gatk.arguments.StandardCallerArgumentCollection; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.filters.BadMateFilter; +import org.broadinstitute.sting.gatk.io.StingSAMFileWriter; import org.broadinstitute.sting.gatk.iterators.ReadTransformer; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.*; @@ -66,6 +69,7 @@ import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedArgumentCollection import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedGenotyperEngine; import org.broadinstitute.sting.gatk.walkers.genotyper.VariantCallContext; import org.broadinstitute.sting.utils.*; +import org.broadinstitute.sting.utils.activeregion.ActiveRegion; import org.broadinstitute.sting.utils.activeregion.ActiveRegionReadState; import org.broadinstitute.sting.utils.activeregion.ActivityProfileResult; import org.broadinstitute.sting.utils.clipping.ReadClipper; @@ -129,6 +133,7 @@ import java.util.*; @PartitionBy(PartitionType.LOCUS) @BAQMode(ApplicationTime = ReadTransformer.ApplicationTime.FORBIDDEN) @ActiveRegionExtension(extension=65, maxRegion=300) +//@Downsample(by= DownsampleType.BY_SAMPLE, toCoverage=5) public class HaplotypeCaller extends ActiveRegionWalker implements AnnotatorCompatible { /** @@ -140,6 +145,17 @@ public class HaplotypeCaller extends ActiveRegionWalker implem @Output(fullName="graphOutput", shortName="graph", doc="File to which debug assembly graph information should be written", required = false) protected PrintStream graphWriter = null; + /** + * The assembled haplotypes will be written as BAM to this file if requested. Really for debugging purposes only. Note that the output here + * does not include uninformative reads so that not every input read is emitted to the bam. + */ + @Hidden + @Output(fullName="bamOutput", shortName="bam", doc="File to which assembled haplotypes should be written", required = false) + protected StingSAMFileWriter bamWriter = null; + private SAMFileHeader bamHeader = null; + private long uniqueNameCounter = 1; + private final static String readGroupId = "ArtificialHaplotype"; + /** * The PairHMM implementation to use for genotype likelihood calculations. The various implementations balance a tradeoff of accuracy and runtime. */ @@ -168,6 +184,16 @@ public class HaplotypeCaller extends ActiveRegionWalker implem @Argument(fullName="downsampleRegion", shortName="dr", doc="coverage, per-sample, to downsample each active region to", required = false) protected int DOWNSAMPLE_PER_SAMPLE_PER_REGION = 1000; + /** + * If this flag is provided, the haplotype caller will include unmapped reads in the assembly and calling + * when these reads occur in the region being analyzed. Typically, for paired end analyses, one pair of the + * read can map, but if its pair is too divergent then it may be unmapped and placed next to its mate, taking + * the mates contig and alignment start. If this flag is provided the haplotype caller will see such reads, + * and may make use of them in assembly and calling, where possible. + */ + @Argument(fullName="includeUmappedReads", shortName="unmapped", doc="If provided, unmapped reads with chromosomal coordinates (i.e., those placed to their maps) will be included in the assembly and calling", required = false) + protected boolean includeUnmappedReads = false; + @Argument(fullName="useAllelesTrigger", shortName="allelesTrigger", doc = "If specified, use additional trigger on variants found in an external alleles file", required=false) protected boolean USE_ALLELES_TRIGGER = false; @@ -175,6 +201,10 @@ public class HaplotypeCaller extends ActiveRegionWalker implem @Argument(fullName="useFilteredReadsForAnnotations", shortName="useFilteredReadsForAnnotations", doc = "If specified, use the contamination-filtered read maps for the purposes of annotating variants", required=false) protected boolean USE_FILTERED_READ_MAP_FOR_ANNOTATIONS = false; + @Hidden + @Argument(fullName="justDetermineActiveRegions", shortName="justDetermineActiveRegions", doc = "If specified, the HC won't actually do any assembly or calling, it'll just run the upfront active region determination code. Useful for benchmarking and scalability testing", required=false) + protected boolean justDetermineActiveRegions = false; + /** * rsIDs from this file are used to populate the ID column of the output. Also, the DB INFO flag will be set when appropriate. * dbSNP is not used in any way for the calculations themselves. @@ -236,6 +266,8 @@ public class HaplotypeCaller extends ActiveRegionWalker implem // the genotyping engine private GenotypingEngine genotypingEngine = null; + private VariantAnnotatorEngine annotationEngine = null; + // fasta reference reader to supplement the edges of the reference sequence private CachingIndexedFastaSequenceFile referenceReader; @@ -280,7 +312,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem UG_engine_simple_genotyper = new UnifiedGenotyperEngine(getToolkit(), simpleUAC, logger, null, null, samples, GATKVariantContextUtils.DEFAULT_PLOIDY); // initialize the output VCF header - final VariantAnnotatorEngine annotationEngine = new VariantAnnotatorEngine(Arrays.asList(annotationClassesToUse), annotationsToUse, annotationsToExclude, this, getToolkit()); + annotationEngine = new VariantAnnotatorEngine(Arrays.asList(annotationClassesToUse), annotationsToUse, annotationsToExclude, this, getToolkit()); Set headerInfo = new HashSet(); @@ -314,6 +346,9 @@ public class HaplotypeCaller extends ActiveRegionWalker implem assemblyEngine = new SimpleDeBruijnAssembler( DEBUG, graphWriter, minKmer ); likelihoodCalculationEngine = new LikelihoodCalculationEngine( (byte)gcpHMM, DEBUG, pairHMM ); genotypingEngine = new GenotypingEngine( DEBUG, annotationEngine, USE_FILTERED_READ_MAP_FOR_ANNOTATIONS ); + + if ( bamWriter != null ) + setupBamWriter(); } //--------------------------------------------------------------------------------------------------------------- @@ -329,11 +364,20 @@ public class HaplotypeCaller extends ActiveRegionWalker implem // enable non primary and extended reads in the active region @Override public EnumSet desiredReadStates() { - return EnumSet.of( - ActiveRegionReadState.PRIMARY, - ActiveRegionReadState.NONPRIMARY, - ActiveRegionReadState.EXTENDED - ); + if ( includeUnmappedReads ) { + throw new UserException.BadArgumentValue("includeUmappedReads", "is not yet functional"); +// return EnumSet.of( +// ActiveRegionReadState.PRIMARY, +// ActiveRegionReadState.NONPRIMARY, +// ActiveRegionReadState.EXTENDED, +// ActiveRegionReadState.UNMAPPED +// ); + } else + return EnumSet.of( + ActiveRegionReadState.PRIMARY, + ActiveRegionReadState.NONPRIMARY, + ActiveRegionReadState.EXTENDED + ); } @Override @@ -371,7 +415,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem final byte qual = p.getQual(); if( p.isDeletion() || qual > (byte) 18) { int AA = 0; final int AB = 1; int BB = 2; - if( p.getBase() != ref.getBase() || p.isDeletion() || p.isBeforeDeletedBase() || p.isAfterDeletedBase() || p.isBeforeInsertion() || p.isAfterInsertion() || p.isNextToSoftClip() ) { + if( p.getBase() != ref.getBase() || p.isDeletion() || p.isBeforeDeletionStart() || p.isAfterDeletionEnd() || p.isBeforeInsertion() || p.isAfterInsertion() || p.isNextToSoftClip() ) { AA = 2; BB = 0; if( p.isNextToSoftClip() ) { @@ -402,7 +446,10 @@ public class HaplotypeCaller extends ActiveRegionWalker implem //--------------------------------------------------------------------------------------------------------------- @Override - public Integer map( final org.broadinstitute.sting.utils.activeregion.ActiveRegion activeRegion, final RefMetaDataTracker metaDataTracker ) { + public Integer map( final ActiveRegion activeRegion, final RefMetaDataTracker metaDataTracker ) { + if ( justDetermineActiveRegions ) + // we're benchmarking ART and/or the active region determination code in the HC, just leave without doing any work + return 1; final ArrayList activeAllelesToGenotype = new ArrayList(); @@ -420,8 +467,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem if( UG_engine.getUAC().GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES && activeAllelesToGenotype.isEmpty() ) { return 0; } // No alleles found in this region so nothing to do! finalizeActiveRegion( activeRegion ); // merge overlapping fragments, clip adapter and low qual tails - final Haplotype referenceHaplotype = new Haplotype(activeRegion.getActiveRegionReference(referenceReader)); // Create the reference haplotype which is the bases from the reference that make up the active region - referenceHaplotype.setIsReference(true); + final Haplotype referenceHaplotype = new Haplotype(activeRegion.getActiveRegionReference(referenceReader), true); // Create the reference haplotype which is the bases from the reference that make up the active region final byte[] fullReferenceWithPadding = activeRegion.getFullReference(referenceReader, REFERENCE_PADDING); //int PRUNE_FACTOR = Math.max(MIN_PRUNE_FACTOR, determinePruneFactorFromCoverage( activeRegion )); final ArrayList haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, getPaddedLoc(activeRegion), MIN_PRUNE_FACTOR, activeAllelesToGenotype ); @@ -452,9 +498,31 @@ public class HaplotypeCaller extends ActiveRegionWalker implem activeRegion.getLocation(), getToolkit().getGenomeLocParser(), activeAllelesToGenotype ) ) { + annotationEngine.annotateDBs(metaDataTracker, getToolkit().getGenomeLocParser().createGenomeLoc(call), call); vcfWriter.add( call ); } + if ( bamWriter != null ) { + // write the haplotypes to the bam + final GenomeLoc paddedRefLoc = getPaddedLoc(activeRegion); + for ( Haplotype haplotype : haplotypes ) + writeHaplotype(haplotype, paddedRefLoc, bestHaplotypes.contains(haplotype)); + + // we need to remap the Alleles back to the Haplotypes; inefficient but unfortunately this is a requirement currently + final Map alleleToHaplotypeMap = new HashMap(haplotypes.size()); + for ( final Haplotype haplotype : haplotypes ) + alleleToHaplotypeMap.put(Allele.create(haplotype.getBases()), haplotype); + + // next, output the interesting reads for each sample aligned against the appropriate haplotype + for ( final PerReadAlleleLikelihoodMap readAlleleLikelihoodMap : stratifiedReadMap.values() ) { + for ( Map.Entry> entry : readAlleleLikelihoodMap.getLikelihoodReadMap().entrySet() ) { + final Allele bestAllele = PerReadAlleleLikelihoodMap.getMostLikelyAllele(entry.getValue()); + if ( bestAllele != Allele.NO_CALL ) + writeReadAgainstHaplotype(entry.getKey(), alleleToHaplotypeMap.get(bestAllele), paddedRefLoc.getStart()); + } + } + } + if( DEBUG ) { System.out.println("----------------------------------------------------------------------------------"); } return 1; // One active region was processed during this map call @@ -548,6 +616,95 @@ public class HaplotypeCaller extends ActiveRegionWalker implem return returnMap; } + private void setupBamWriter() { + // prepare the bam header + bamHeader = new SAMFileHeader(); + bamHeader.setSequenceDictionary(getToolkit().getSAMFileHeader().getSequenceDictionary()); + bamHeader.setSortOrder(SAMFileHeader.SortOrder.coordinate); + + // include the original read groups plus a new artificial one for the haplotypes + final List readGroups = new ArrayList(getToolkit().getSAMFileHeader().getReadGroups()); + final SAMReadGroupRecord rg = new SAMReadGroupRecord(readGroupId); + rg.setSample("HC"); + rg.setSequencingCenter("BI"); + readGroups.add(rg); + bamHeader.setReadGroups(readGroups); + + bamWriter.setPresorted(false); + bamWriter.writeHeader(bamHeader); + } + + private void writeHaplotype(final Haplotype haplotype, final GenomeLoc paddedRefLoc, final boolean isAmongBestHaplotypes) { + final GATKSAMRecord record = new GATKSAMRecord(bamHeader); + record.setReadBases(haplotype.getBases()); + record.setAlignmentStart(paddedRefLoc.getStart() + haplotype.getAlignmentStartHapwrtRef()); + record.setBaseQualities(Utils.dupBytes((byte) '!', haplotype.getBases().length)); + record.setCigar(haplotype.getCigar()); + record.setMappingQuality(isAmongBestHaplotypes ? 60 : 0); + record.setReadName("HC" + uniqueNameCounter++); + record.setReadUnmappedFlag(false); + record.setReferenceIndex(paddedRefLoc.getContigIndex()); + record.setAttribute(SAMTag.RG.toString(), readGroupId); + record.setFlags(16); + bamWriter.addAlignment(record); + } + + private void writeReadAgainstHaplotype(final GATKSAMRecord read, final Haplotype haplotype, final int referenceStart) { + + final SWPairwiseAlignment swPairwiseAlignment = new SWPairwiseAlignment(haplotype.getBases(), read.getReadBases(), 5.0, -10.0, -22.0, -1.2); + final int readStartOnHaplotype = swPairwiseAlignment.getAlignmentStart2wrt1(); + final int readStartOnReference = referenceStart + haplotype.getAlignmentStartHapwrtRef() + readStartOnHaplotype; + read.setAlignmentStart(readStartOnReference); + + final Cigar cigar = generateReadCigarFromHaplotype(read, readStartOnHaplotype, haplotype.getCigar()); + read.setCigar(cigar); + + bamWriter.addAlignment(read); + } + + private Cigar generateReadCigarFromHaplotype(final GATKSAMRecord read, final int readStartOnHaplotype, final Cigar haplotypeCigar) { + + int currentReadPos = 0; + int currentHapPos = 0; + final List readCigarElements = new ArrayList(); + + for ( final CigarElement cigarElement : haplotypeCigar.getCigarElements() ) { + + if ( cigarElement.getOperator() == CigarOperator.D ) { + if ( currentReadPos > 0 ) + readCigarElements.add(cigarElement); + } else if ( cigarElement.getOperator() == CigarOperator.M || cigarElement.getOperator() == CigarOperator.I ) { + + final int elementLength = cigarElement.getLength(); + final int nextReadPos = currentReadPos + elementLength; + final int nextHapPos = currentHapPos + elementLength; + + // do we want this element? + if ( currentReadPos > 0 ) { + // do we want the entire element? + if ( nextReadPos < read.getReadLength() ) { + readCigarElements.add(cigarElement); + currentReadPos = nextReadPos; + } + // otherwise, we can finish up and return the cigar + else { + readCigarElements.add(new CigarElement(read.getReadLength() - currentReadPos, cigarElement.getOperator())); + return new Cigar(readCigarElements); + } + } + // do we want part of the element to start? + else if ( currentReadPos == 0 && nextHapPos > readStartOnHaplotype ) { + currentReadPos = Math.min(nextHapPos - readStartOnHaplotype, read.getReadLength()); + readCigarElements.add(new CigarElement(currentReadPos, cigarElement.getOperator())); + } + + currentHapPos = nextHapPos; + } + } + + return new Cigar(readCigarElements); + } + /* private int determinePruneFactorFromCoverage( final ActiveRegion activeRegion ) { final ArrayList readLengthDistribution = new ArrayList(); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java index 8b844817d..57e071189 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java @@ -124,9 +124,14 @@ public class LikelihoodCalculationEngine { } private PerReadAlleleLikelihoodMap computeReadLikelihoods( final ArrayList haplotypes, final ArrayList reads) { + // first, a little set up to get copies of the Haplotypes that are Alleles (more efficient than creating them each time) + final int numHaplotypes = haplotypes.size(); + final Map alleleVersions = new HashMap(numHaplotypes); + for ( final Haplotype haplotype : haplotypes ) { + alleleVersions.put(haplotype, Allele.create(haplotype.getBases())); + } final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap = new PerReadAlleleLikelihoodMap(); - final int numHaplotypes = haplotypes.size(); for( final GATKSAMRecord read : reads ) { final byte[] overallGCP = new byte[read.getReadLength()]; Arrays.fill( overallGCP, constantGCP ); // Is there a way to derive empirical estimates for this from the data? @@ -138,20 +143,17 @@ public class LikelihoodCalculationEngine { readQuals[kkk] = ( readQuals[kkk] > (byte) read.getMappingQuality() ? (byte) read.getMappingQuality() : readQuals[kkk] ); // cap base quality by mapping quality //readQuals[kkk] = ( readQuals[kkk] > readInsQuals[kkk] ? readInsQuals[kkk] : readQuals[kkk] ); // cap base quality by base insertion quality, needs to be evaluated //readQuals[kkk] = ( readQuals[kkk] > readDelQuals[kkk] ? readDelQuals[kkk] : readQuals[kkk] ); // cap base quality by base deletion quality, needs to be evaluated + // TODO -- why is Q18 hard-coded here??? readQuals[kkk] = ( readQuals[kkk] < (byte) 18 ? QualityUtils.MIN_USABLE_Q_SCORE : readQuals[kkk] ); } for( int jjj = 0; jjj < numHaplotypes; jjj++ ) { final Haplotype haplotype = haplotypes.get(jjj); - // TODO -- need to test against a reference/position with non-standard bases - //if ( !Allele.acceptableAlleleBases(haplotype.getBases(), false) ) - // continue; - final int haplotypeStart = ( previousHaplotypeSeen == null ? 0 : computeFirstDifferingPosition(haplotype.getBases(), previousHaplotypeSeen.getBases()) ); previousHaplotypeSeen = haplotype; - perReadAlleleLikelihoodMap.add(read, Allele.create(haplotype.getBases()), + perReadAlleleLikelihoodMap.add(read, alleleVersions.get(haplotype), pairHMM.computeReadLikelihoodGivenHaplotypeLog10(haplotype.getBases(), read.getReadBases(), readQuals, readInsQuals, readDelQuals, overallGCP, haplotypeStart, jjj == 0)); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/SimpleDeBruijnAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/SimpleDeBruijnAssembler.java index e1a94eee7..e16994fa4 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/SimpleDeBruijnAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/SimpleDeBruijnAssembler.java @@ -338,7 +338,7 @@ public class SimpleDeBruijnAssembler extends LocalAssemblyEngine { for( final DefaultDirectedGraph graph : graphs ) { for ( final KBestPaths.Path path : KBestPaths.getKBestPaths(graph, NUM_BEST_PATHS_PER_KMER_GRAPH) ) { - final Haplotype h = new Haplotype( path.getBases( graph ), path.getScore() ); + final Haplotype h = new Haplotype( path.getBases( graph ) ); if( addHaplotype( h, fullReferenceWithPadding, returnHaplotypes, activeRegionStart, activeRegionStop, false ) ) { // for GGA mode, add the desired allele into the haplotype if it isn't already present diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/validation/ValidationAmplicons.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/validation/ValidationAmplicons.java index 9ba74b98a..8547c0993 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/validation/ValidationAmplicons.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/validation/ValidationAmplicons.java @@ -376,7 +376,7 @@ public class ValidationAmplicons extends RodWalker { if ( lowerCaseSNPs ) { sequence.append(Character.toLowerCase((char) ref.getBase())); } else { - sequence.append((char) BaseUtils.N); + sequence.append((char) BaseUtils.Base.N.base); } rawSequence.append(Character.toUpperCase((char) ref.getBase())); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetrics.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetrics.java index bb76006bf..8a87c9957 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetrics.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetrics.java @@ -272,22 +272,7 @@ public class ConcordanceMetrics { @Requires({"evalVC != null","truthVC != null"}) private SiteConcordanceType getMatchType(VariantContext evalVC, VariantContext truthVC) { - if ( evalVC.isMonomorphicInSamples() ) - return SiteConcordanceType.TRUTH_ONLY; - if ( truthVC.isMonomorphicInSamples() ) - return SiteConcordanceType.EVAL_ONLY; - - boolean evalSusbsetTruth = VariantContextUtils.allelesAreSubset(evalVC,truthVC); - boolean truthSubsetEval = VariantContextUtils.allelesAreSubset(truthVC,evalVC); - - if ( evalSusbsetTruth && truthSubsetEval ) - return SiteConcordanceType.ALLELES_MATCH; - else if ( evalSusbsetTruth ) - return SiteConcordanceType.EVAL_SUBSET_TRUTH; - else if ( truthSubsetEval ) - return SiteConcordanceType.EVAL_SUPERSET_TRUTH; - - return SiteConcordanceType.ALLELES_DO_NOT_MATCH; + return SiteConcordanceType.getConcordanceType(evalVC,truthVC); } public int[] getSiteConcordance() { @@ -305,6 +290,27 @@ public class ConcordanceMetrics { EVAL_SUBSET_TRUTH, ALLELES_DO_NOT_MATCH, EVAL_ONLY, - TRUTH_ONLY + TRUTH_ONLY; + + public static SiteConcordanceType getConcordanceType(VariantContext eval, VariantContext truth) { + if ( eval.isMonomorphicInSamples() ) + return TRUTH_ONLY; + if ( truth.isMonomorphicInSamples() ) + return EVAL_ONLY; + + boolean evalSubsetTruth = VariantContextUtils.allelesAreSubset(eval,truth); + boolean truthSubsetEval = VariantContextUtils.allelesAreSubset(eval,truth); + + if ( evalSubsetTruth && truthSubsetEval ) + return ALLELES_MATCH; + + if ( evalSubsetTruth ) + return EVAL_SUBSET_TRUTH; + + if ( truthSubsetEval ) + return EVAL_SUPERSET_TRUTH; + + return ALLELES_DO_NOT_MATCH; + } } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java index 64eb4deef..e8965dfc8 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java @@ -64,7 +64,7 @@ import java.util.*; /** * A simple walker for performing genotype concordance calculations between two callsets */ -public class GenotypeConcordance extends RodWalker,ConcordanceMetrics> { +public class GenotypeConcordance extends RodWalker>,ConcordanceMetrics> { @Input(fullName="eval",shortName="eval",doc="The variants and genotypes to evaluate",required=true) RodBinding evalBinding; @@ -75,21 +75,41 @@ public class GenotypeConcordance extends RodWalker genotypeFilterExpressionsEval = new ArrayList(); + + @Argument(shortName="gfc", fullName="genotypeFilterExpressionComp", doc="One or more criteria to use to set COMP genotypes to no-call. "+ + "These genotype-level filters are only applied to the COMP rod.", required=false) + public ArrayList genotypeFilterExpressionsComp = new ArrayList(); + + @Argument(shortName="moltenize",fullName="moltenize",doc="Molten rather than tabular output") + public boolean moltenize = false; + @Output PrintStream out; - List evalSamples; - List compSamples; + private List evalSamples; + private List compSamples; + private List evalJexls = null; + private List compJexls = null; - // todo -- integration test coverage - // todo -- deal with occurrences like: - // Eval: 20 4000 A C - // Eval: 20 4000 A AC - // Comp: 20 4000 A C - // currently this results in a warning and skipping - // todo -- extend to multiple eval, multiple comp - // todo -- table with "proportion of overlapping sites" (not just eval/comp margins) + // todo -- table with "proportion of overlapping sites" (not just eval/comp margins) [e.g. drop no-calls] + // (this will break all the integration tests of course, due to new formatting) + public void initialize() { + evalJexls = initializeJexl(genotypeFilterExpressionsEval); + compJexls = initializeJexl(genotypeFilterExpressionsComp); + } + + private List initializeJexl(ArrayList genotypeFilterExpressions) { + ArrayList dummyNames = new ArrayList(genotypeFilterExpressions.size()); + int expCount = 1; + for ( String exp : genotypeFilterExpressions ) { + dummyNames.add(String.format("gfe%d",expCount++)); + } + return VariantContextUtils.initializeMatchExps(dummyNames, genotypeFilterExpressions); + } public ConcordanceMetrics reduceInit() { Map headerMap = GATKVCFUtils.getVCFHeadersFromRods(getToolkit(), Arrays.asList(evalBinding,compBinding)); @@ -101,120 +121,320 @@ public class GenotypeConcordance extends RodWalker map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { - Pair evalCompPair = null; + public List> map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + List> evalCompPair = new ArrayList>(3); if ( tracker != null && ( tracker.getValues(evalBinding,ref.getLocus()).size() > 0 || - tracker.getValues(compBinding,ref.getLocus()).size() > 0 ) ) { + tracker.getValues(compBinding,ref.getLocus()).size() > 0 ) ) { List eval = tracker.getValues(evalBinding,ref.getLocus()); List comp = tracker.getValues(compBinding,ref.getLocus()); if ( eval.size() > 1 || comp.size() > 1 ) { - logger.warn("Eval or Comp Rod at position "+ref.getLocus().toString()+" has multiple records. Site will be skipped."); - return evalCompPair; + if ( noDuplicateTypes(eval) && noDuplicateTypes(comp) ) { + logger.info("Eval or Comp Rod at position " + ref.getLocus().toString() + " has multiple records. Resolving."); + evalCompPair = resolveMultipleRecords(eval,comp); + } else { + logger.warn("Eval or Comp Rod at position "+ref.getLocus().toString()+" has multiple records of the same type. This locus will be skipped."); + } + } else { + // if a rod is missing, explicitly create a variant context with 'missing' genotypes. Slow, but correct. + // note that if there is no eval rod there must be a comp rod, and also the reverse + VariantContext evalContext = eval.size() == 1 ? eval.get(0) : createEmptyContext(comp.get(0),evalSamples); + VariantContext compContext = comp.size() == 1 ? comp.get(0) : createEmptyContext(eval.get(0),compSamples); + evalContext = filterGenotypes(evalContext,ignoreFilters,evalJexls); + compContext = filterGenotypes(compContext,ignoreFilters,compJexls); + evalCompPair.add(new Pair(evalContext,compContext)); } - // if a rod is missing, explicitly create a variant context with 'missing' genotypes. Slow, but correct. - // note that if there is no eval rod there must be a comp rod, and also the reverse - VariantContext evalContext = eval.size() == 1 ? eval.get(0) : createEmptyContext(ref,comp.get(0),evalSamples); - VariantContext compContext = comp.size() == 1 ? comp.get(0) : createEmptyContext(ref,eval.get(0),compSamples); - evalContext = filterGenotypes(evalContext,ignoreFilters); - compContext = filterGenotypes(compContext,ignoreFilters); - evalCompPair = new Pair(evalContext,compContext); } return evalCompPair; } - public ConcordanceMetrics reduce(Pair evalComp, ConcordanceMetrics metrics) { - if ( evalComp != null ) + private boolean noDuplicateTypes(List vcList) { + HashSet types = new HashSet(vcList.size()); + for ( VariantContext vc : vcList ) { + VariantContext.Type type = vc.getType(); + if ( types.contains(type) ) + return false; + types.add(type); + } + + return true; + } + + /** + * The point of this method is to match up pairs of evals and comps by their type (or alternate alleles for mixed). + * Basically multiple records could exist for a site such as: + * Eval: 20 4000 A C + * Eval: 20 4000 A AC + * Comp: 20 4000 A C + * So for each eval, loop through the comps. If the eval alleles (non-emptily) intersect the comp alleles, pair them up and remove + * that comp records. Continue until we're out of evals or comps. This is n^2, but should rarely actually happen. + * + * The remaining unpaired records get paird with an empty contexts. So in the example above we'd get a list of: + * 1 - (20,4000,A/C | 20,4000,A/C) + * 2 - (20,4000,A/AC | Empty ) + * @param evalList - list of eval variant contexts + * @param compList - list of comp variant contexts + * @return resolved pairs of the input lists + */ + private List> resolveMultipleRecords(List evalList, List compList) { + List> resolvedPairs = new ArrayList>(evalList.size()+compList.size()); // oversized but w/e + List pairedEval = new ArrayList(evalList.size()); + for ( VariantContext eval : evalList ) { + VariantContext.Type evalType = eval.getType(); + Set evalAlleles = new HashSet(eval.getAlternateAlleles()); + VariantContext pairedComp = null; + for ( VariantContext comp : compList ) { + if ( evalType.equals(comp.getType()) ) { + pairedComp = comp; + break; + } else if ( eval.isMixed() || comp.isMixed() ) { + for ( Allele compAllele : comp.getAlternateAlleles() ) { + if ( evalAlleles.contains(compAllele) ) { + pairedComp = comp; + break; + } + } + } + } + if ( pairedComp != null ) { + compList.remove(pairedComp); + resolvedPairs.add(new Pair(filterGenotypes(eval,ignoreFilters,evalJexls),filterGenotypes(pairedComp,ignoreFilters,compJexls))); + pairedEval.add(eval); + if ( compList.size() < 1 ) + break; + } + } + evalList.removeAll(pairedEval); + for ( VariantContext unpairedEval : evalList ) { + resolvedPairs.add(new Pair(filterGenotypes(unpairedEval,ignoreFilters,evalJexls),createEmptyContext(unpairedEval,compSamples))); + } + + for ( VariantContext unpairedComp : compList ) { + resolvedPairs.add(new Pair(createEmptyContext(unpairedComp,evalSamples),filterGenotypes(unpairedComp,ignoreFilters,compJexls))); + } + + return resolvedPairs; + } + + public ConcordanceMetrics reduce(List> evalCompList, ConcordanceMetrics metrics) { + for ( Pair evalComp : evalCompList) metrics.update(evalComp.getFirst(),evalComp.getSecond()); return metrics; } + private static double repairNaN(double d) { + if ( Double.isNaN(d) ) { + return 0.0; + } + return d; + } + public void onTraversalDone(ConcordanceMetrics metrics) { + // todo -- this is over 200 lines of code just to format the output and could use some serious cleanup GATKReport report = new GATKReport(); GATKReportTable concordanceCounts = new GATKReportTable("GenotypeConcordance_Counts","Per-sample concordance tables: comparison counts",2+GenotypeType.values().length*GenotypeType.values().length); GATKReportTable concordanceEvalProportions = new GATKReportTable("GenotypeConcordance_EvalProportions", "Per-sample concordance tables: proportions of genotypes called in eval",2+GenotypeType.values().length*GenotypeType.values().length); GATKReportTable concordanceCompProportions = new GATKReportTable("GenotypeConcordance_CompProportions", "Per-sample concordance tables: proportions of genotypes called in comp",2+GenotypeType.values().length*GenotypeType.values().length); GATKReportTable concordanceSummary = new GATKReportTable("GenotypeConcordance_Summary","Per-sample summary statistics: NRS and NRD",2); GATKReportTable siteConcordance = new GATKReportTable("SiteConcordance_Summary","Site-level summary statistics",ConcordanceMetrics.SiteConcordanceType.values().length); - concordanceCompProportions.addColumn("Sample","%s"); - concordanceCounts.addColumn("Sample","%s"); - concordanceEvalProportions.addColumn("Sample","%s"); - concordanceSummary.addColumn("Sample","%s"); - for ( GenotypeType evalType : GenotypeType.values() ) { - for ( GenotypeType compType : GenotypeType.values() ) { - String colKey = String.format("%s_%s", evalType.toString(), compType.toString()); - concordanceCounts.addColumn(colKey,"%d"); - if ( evalType == GenotypeType.HET || evalType == GenotypeType.HOM_REF || evalType == GenotypeType.HOM_VAR) - concordanceEvalProportions.addColumn(colKey,"%.3f"); - if ( compType == GenotypeType.HET || compType == GenotypeType.HOM_VAR || compType == GenotypeType.HOM_REF ) - concordanceCompProportions.addColumn(colKey,"%.3f"); - } - } - concordanceEvalProportions.addColumn("Mismatching_Alleles","%.3f"); - concordanceCompProportions.addColumn("Mismatching_Alleles","%.3f"); - concordanceCounts.addColumn("Mismatching_Alleles","%d"); - concordanceSummary.addColumn("Non-Reference Sensitivity","%.3f"); - concordanceSummary.addColumn("Non-Reference Discrepancy","%.3f"); - for (ConcordanceMetrics.SiteConcordanceType type : ConcordanceMetrics.SiteConcordanceType.values() ) { - siteConcordance.addColumn(type.toString(),"%d"); - } + if ( moltenize ) { + concordanceCompProportions.addColumn("Sample","%s"); + concordanceCounts.addColumn("Sample","%s"); + concordanceEvalProportions.addColumn("Sample","%s"); + concordanceSummary.addColumn("Sample","%s"); - for ( Map.Entry entry : metrics.getPerSampleGenotypeConcordance().entrySet() ) { - ConcordanceMetrics.GenotypeConcordanceTable table = entry.getValue(); - concordanceEvalProportions.set(entry.getKey(),"Sample",entry.getKey()); - concordanceCompProportions.set(entry.getKey(),"Sample",entry.getKey()); - concordanceCounts.set(entry.getKey(),"Sample",entry.getKey()); + concordanceCompProportions.addColumn("Eval_Genotype","%s"); + concordanceCounts.addColumn("Eval_Genotype","%s"); + concordanceEvalProportions.addColumn("Eval_Genotype","%s"); + concordanceSummary.addColumn("Non-Reference_Discrepancy","%.3f"); + + concordanceCompProportions.addColumn("Comp_Genotype","%s"); + concordanceCounts.addColumn("Comp_Genotype","%s"); + concordanceEvalProportions.addColumn("Comp_Genotype","%s"); + concordanceSummary.addColumn("Non-Reference_Sensitivity","%.3f"); + + concordanceCompProportions.addColumn("Proportion","%.3f"); + concordanceCounts.addColumn("Count","%d"); + concordanceEvalProportions.addColumn("Proportion","%.3f"); + + for ( Map.Entry entry : metrics.getPerSampleGenotypeConcordance().entrySet() ) { + ConcordanceMetrics.GenotypeConcordanceTable table = entry.getValue(); + for ( GenotypeType evalType : GenotypeType.values() ) { + for ( GenotypeType compType : GenotypeType.values() ) { + String rowKey = String.format("%s_%s_%s",entry.getKey(),evalType.toString(),compType.toString()); + concordanceCounts.set(rowKey,"Sample",entry.getKey()); + concordanceCounts.set(rowKey,"Eval_Genotype",evalType.toString()); + concordanceCounts.set(rowKey,"Comp_Genotype",evalType.toString()); + int count = table.get(evalType, compType); + concordanceCounts.set(rowKey,"Count",count); + if ( evalType == GenotypeType.HET || evalType == GenotypeType.HOM_REF || evalType == GenotypeType.HOM_VAR) { + concordanceEvalProportions.set(rowKey,"Sample",entry.getKey()); + concordanceEvalProportions.set(rowKey,"Eval_Genotype",evalType.toString()); + concordanceEvalProportions.set(rowKey,"Comp_Genotype",evalType.toString()); + concordanceEvalProportions.set(rowKey,"Proportion",repairNaN(( (double) count)/table.getnEvalGenotypes(evalType))); + } + if ( compType == GenotypeType.HET || compType == GenotypeType.HOM_VAR || compType == GenotypeType.HOM_REF ) { + concordanceCompProportions.set(rowKey,"Sample",entry.getKey()); + concordanceCompProportions.set(rowKey,"Eval_Genotype",evalType.toString()); + concordanceCompProportions.set(rowKey,"Comp_Genotype",evalType.toString()); + concordanceCompProportions.set(rowKey,"Proportion",repairNaN(( (double) count)/table.getnCompGenotypes(compType))); + } + } + } + String mismatchKey = String.format("%s_%s",entry.getKey(),"Mismatching"); + concordanceCounts.set(mismatchKey,"Sample",entry.getKey()); + concordanceCounts.set(mismatchKey,"Eval_Genotype","Mismatching_Alleles"); + concordanceCounts.set(mismatchKey,"Comp_Genotype","Mismatching_Alleles"); + concordanceEvalProportions.set(mismatchKey,"Sample",entry.getKey()); + concordanceEvalProportions.set(mismatchKey,"Eval_Genotype","Mismatching_Alleles"); + concordanceEvalProportions.set(mismatchKey,"Comp_Genotype","Mismatching_Alleles"); + concordanceCompProportions.set(mismatchKey,"Sample",entry.getKey()); + concordanceCompProportions.set(mismatchKey,"Eval_Genotype","Mismatching_Alleles"); + concordanceCompProportions.set(mismatchKey,"Comp_Genotype","Mismatching_Alleles"); + concordanceEvalProportions.set(mismatchKey,"Proportion", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledEvalGenotypes())); + concordanceCompProportions.set(mismatchKey,"Proportion", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledCompGenotypes())); + concordanceCounts.set(mismatchKey,"Count",table.getnMismatchingAlt()); + } + + String sampleKey = "ALL"; + ConcordanceMetrics.GenotypeConcordanceTable table = metrics.getOverallGenotypeConcordance(); + for ( GenotypeType evalType : GenotypeType.values() ) { + for ( GenotypeType compType : GenotypeType.values() ) { + String rowKey = String.format("%s_%s_%s",sampleKey,evalType.toString(),compType.toString()); + concordanceCounts.set(rowKey,"Sample",sampleKey); + concordanceCounts.set(rowKey,"Eval_Genotype",evalType.toString()); + concordanceCounts.set(rowKey,"Comp_Genotype",evalType.toString()); + int count = table.get(evalType, compType); + concordanceCounts.set(rowKey,"Count",count); + if ( evalType == GenotypeType.HET || evalType == GenotypeType.HOM_REF || evalType == GenotypeType.HOM_VAR) { + concordanceEvalProportions.set(rowKey,"Sample",sampleKey); + concordanceEvalProportions.set(rowKey,"Eval_Genotype",evalType.toString()); + concordanceEvalProportions.set(rowKey,"Comp_Genotype",evalType.toString()); + concordanceEvalProportions.set(rowKey,"Proportion",repairNaN(( (double) count)/table.getnEvalGenotypes(evalType))); + } + if ( compType == GenotypeType.HET || compType == GenotypeType.HOM_VAR || compType == GenotypeType.HOM_REF ) { + concordanceCompProportions.set(rowKey,"Sample",sampleKey); + concordanceCompProportions.set(rowKey,"Eval_Genotype",evalType.toString()); + concordanceCompProportions.set(rowKey,"Comp_Genotype",evalType.toString()); + concordanceCompProportions.set(rowKey,"Proportion",repairNaN(( (double) count)/table.getnCompGenotypes(compType))); + } + } + } + String rowKey = String.format("%s_%s",sampleKey,"Mismatching"); + concordanceCounts.set(rowKey,"Sample",sampleKey); + concordanceCounts.set(rowKey,"Eval_Genotype","Mismatching_Alleles"); + concordanceCounts.set(rowKey,"Comp_Genotype","Mismatching_Alleles"); + concordanceEvalProportions.set(rowKey,"Sample",sampleKey); + concordanceEvalProportions.set(rowKey,"Eval_Genotype","Mismatching_Alleles"); + concordanceEvalProportions.set(rowKey,"Comp_Genotype","Mismatching_Alleles"); + concordanceCompProportions.set(rowKey,"Sample",sampleKey); + concordanceCompProportions.set(rowKey,"Eval_Genotype","Mismatching_Alleles"); + concordanceCompProportions.set(rowKey,"Comp_Genotype","Mismatching_Alleles"); + concordanceEvalProportions.set(rowKey,"Proportion", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledEvalGenotypes())); + concordanceCompProportions.set(rowKey,"Proportion", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledCompGenotypes())); + concordanceCounts.set(rowKey,"Count",table.getnMismatchingAlt()); + + for ( Map.Entry nrsEntry : metrics.getPerSampleNRS().entrySet() ) { + concordanceSummary.set(nrsEntry.getKey(),"Sample",nrsEntry.getKey()); + concordanceSummary.set(nrsEntry.getKey(),"Non-Reference_Sensitivity",nrsEntry.getValue()); + } + for ( Map.Entry nrdEntry : metrics.getPerSampleNRD().entrySet() ) { + concordanceSummary.set(nrdEntry.getKey(),"Non-Reference_Discrepancy",nrdEntry.getValue()); + } + concordanceSummary.set("ALL_NRS_NRD","Sample","ALL"); + concordanceSummary.set("ALL_NRS_NRD","Non-Reference_Sensitivity",metrics.getOverallNRS()); + concordanceSummary.set("ALL_NRS_NRD","Non-Reference_Discrepancy",metrics.getOverallNRD()); + + + for (ConcordanceMetrics.SiteConcordanceType type : ConcordanceMetrics.SiteConcordanceType.values() ) { + siteConcordance.addColumn(type.toString(),"%d"); + } + + for (ConcordanceMetrics.SiteConcordanceType type : ConcordanceMetrics.SiteConcordanceType.values() ) { + siteConcordance.set("Comparison",type.toString(),metrics.getOverallSiteConcordance().get(type)); + } + + } else { + concordanceCompProportions.addColumn("Sample","%s"); + concordanceCounts.addColumn("Sample","%s"); + concordanceEvalProportions.addColumn("Sample","%s"); + concordanceSummary.addColumn("Sample","%s"); + for ( GenotypeType evalType : GenotypeType.values() ) { + for ( GenotypeType compType : GenotypeType.values() ) { + String colKey = String.format("%s_%s", evalType.toString(), compType.toString()); + concordanceCounts.addColumn(colKey,"%d"); + if ( evalType == GenotypeType.HET || evalType == GenotypeType.HOM_REF || evalType == GenotypeType.HOM_VAR) + concordanceEvalProportions.addColumn(colKey,"%.3f"); + if ( compType == GenotypeType.HET || compType == GenotypeType.HOM_VAR || compType == GenotypeType.HOM_REF ) + concordanceCompProportions.addColumn(colKey,"%.3f"); + } + } + concordanceEvalProportions.addColumn("Mismatching_Alleles","%.3f"); + concordanceCompProportions.addColumn("Mismatching_Alleles","%.3f"); + concordanceCounts.addColumn("Mismatching_Alleles","%d"); + concordanceSummary.addColumn("Non-Reference Sensitivity","%.3f"); + concordanceSummary.addColumn("Non-Reference Discrepancy","%.3f"); + for (ConcordanceMetrics.SiteConcordanceType type : ConcordanceMetrics.SiteConcordanceType.values() ) { + siteConcordance.addColumn(type.toString(),"%d"); + } + + for ( Map.Entry entry : metrics.getPerSampleGenotypeConcordance().entrySet() ) { + ConcordanceMetrics.GenotypeConcordanceTable table = entry.getValue(); + concordanceEvalProportions.set(entry.getKey(),"Sample",entry.getKey()); + concordanceCompProportions.set(entry.getKey(),"Sample",entry.getKey()); + concordanceCounts.set(entry.getKey(),"Sample",entry.getKey()); + for ( GenotypeType evalType : GenotypeType.values() ) { + for ( GenotypeType compType : GenotypeType.values() ) { + String colKey = String.format("%s_%s",evalType.toString(),compType.toString()); + int count = table.get(evalType, compType); + concordanceCounts.set(entry.getKey(),colKey,count); + if ( evalType == GenotypeType.HET || evalType == GenotypeType.HOM_REF || evalType == GenotypeType.HOM_VAR) + concordanceEvalProportions.set(entry.getKey(),colKey,repairNaN(( (double) count)/table.getnEvalGenotypes(evalType))); + if ( compType == GenotypeType.HET || compType == GenotypeType.HOM_VAR || compType == GenotypeType.HOM_REF ) + concordanceCompProportions.set(entry.getKey(),colKey,repairNaN(( (double) count)/table.getnCompGenotypes(compType))); + } + } + concordanceEvalProportions.set(entry.getKey(),"Mismatching_Alleles", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledEvalGenotypes())); + concordanceCompProportions.set(entry.getKey(),"Mismatching_Alleles", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledCompGenotypes())); + concordanceCounts.set(entry.getKey(),"Mismatching_Alleles",table.getnMismatchingAlt()); + } + + String rowKey = "ALL"; + concordanceCompProportions.set(rowKey,"Sample",rowKey); + concordanceEvalProportions.set(rowKey,"Sample",rowKey); + concordanceCounts.set(rowKey,"Sample",rowKey); + ConcordanceMetrics.GenotypeConcordanceTable table = metrics.getOverallGenotypeConcordance(); for ( GenotypeType evalType : GenotypeType.values() ) { for ( GenotypeType compType : GenotypeType.values() ) { String colKey = String.format("%s_%s",evalType.toString(),compType.toString()); - int count = table.get(evalType, compType); - concordanceCounts.set(entry.getKey(),colKey,count); + int count = table.get(evalType,compType); + concordanceCounts.set(rowKey,colKey,count); if ( evalType == GenotypeType.HET || evalType == GenotypeType.HOM_REF || evalType == GenotypeType.HOM_VAR) - concordanceEvalProportions.set(entry.getKey(),colKey,( (double) count)/table.getnEvalGenotypes(evalType)); + concordanceEvalProportions.set(rowKey,colKey,repairNaN(( (double) count)/table.getnEvalGenotypes(evalType))); if ( compType == GenotypeType.HET || compType == GenotypeType.HOM_VAR || compType == GenotypeType.HOM_REF ) - concordanceCompProportions.set(entry.getKey(),colKey,( (double) count)/table.getnCompGenotypes(compType)); + concordanceCompProportions.set(rowKey,colKey,repairNaN(( (double) count)/table.getnCompGenotypes(compType))); } } - concordanceEvalProportions.set(entry.getKey(),"Mismatching_Alleles", ( (double) table.getnMismatchingAlt() )/table.getnCalledEvalGenotypes()); - concordanceCompProportions.set(entry.getKey(),"Mismatching_Alleles", ( (double) table.getnMismatchingAlt() )/table.getnCalledCompGenotypes()); - concordanceCounts.set(entry.getKey(),"Mismatching_Alleles",table.getnMismatchingAlt()); - } + concordanceEvalProportions.set(rowKey,"Mismatching_Alleles", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledEvalGenotypes())); + concordanceCompProportions.set(rowKey,"Mismatching_Alleles", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledCompGenotypes())); + concordanceCounts.set(rowKey,"Mismatching_Alleles",table.getnMismatchingAlt()); - String rowKey = "ALL"; - concordanceCompProportions.set(rowKey,"Sample",rowKey); - concordanceEvalProportions.set(rowKey,"Sample",rowKey); - concordanceCounts.set(rowKey,"Sample",rowKey); - ConcordanceMetrics.GenotypeConcordanceTable table = metrics.getOverallGenotypeConcordance(); - for ( GenotypeType evalType : GenotypeType.values() ) { - for ( GenotypeType compType : GenotypeType.values() ) { - String colKey = String.format("%s_%s",evalType.toString(),compType.toString()); - int count = table.get(evalType,compType); - concordanceCounts.set(rowKey,colKey,count); - if ( evalType == GenotypeType.HET || evalType == GenotypeType.HOM_REF || evalType == GenotypeType.HOM_VAR) - concordanceEvalProportions.set(rowKey,colKey,( (double) count)/table.getnEvalGenotypes(evalType)); - if ( compType == GenotypeType.HET || compType == GenotypeType.HOM_VAR || compType == GenotypeType.HOM_REF ) - concordanceCompProportions.set(rowKey,colKey,( (double) count)/table.getnCompGenotypes(compType)); + for ( Map.Entry nrsEntry : metrics.getPerSampleNRS().entrySet() ) { + concordanceSummary.set(nrsEntry.getKey(),"Sample",nrsEntry.getKey()); + concordanceSummary.set(nrsEntry.getKey(),"Non-Reference Sensitivity",nrsEntry.getValue()); } - } - concordanceEvalProportions.set(rowKey,"Mismatching_Alleles", ( (double) table.getnMismatchingAlt() )/table.getnCalledEvalGenotypes()); - concordanceCompProportions.set(rowKey,"Mismatching_Alleles", ( (double) table.getnMismatchingAlt() )/table.getnCalledCompGenotypes()); - concordanceCounts.set(rowKey,"Mismatching_Alleles",table.getnMismatchingAlt()); + for ( Map.Entry nrdEntry : metrics.getPerSampleNRD().entrySet() ) { + concordanceSummary.set(nrdEntry.getKey(),"Non-Reference Discrepancy",nrdEntry.getValue()); + } + concordanceSummary.set("ALL","Sample","ALL"); + concordanceSummary.set("ALL","Non-Reference Sensitivity",metrics.getOverallNRS()); + concordanceSummary.set("ALL","Non-Reference Discrepancy",metrics.getOverallNRD()); - for ( Map.Entry nrsEntry : metrics.getPerSampleNRS().entrySet() ) { - concordanceSummary.set(nrsEntry.getKey(),"Sample",nrsEntry.getKey()); - concordanceSummary.set(nrsEntry.getKey(),"Non-Reference Sensitivity",nrsEntry.getValue()); - } - for ( Map.Entry nrdEntry : metrics.getPerSampleNRD().entrySet() ) { - concordanceSummary.set(nrdEntry.getKey(),"Non-Reference Discrepancy",nrdEntry.getValue()); - } - concordanceSummary.set("ALL","Sample","ALL"); - concordanceSummary.set("ALL","Non-Reference Sensitivity",metrics.getOverallNRS()); - concordanceSummary.set("ALL","Non-Reference Discrepancy",metrics.getOverallNRD()); - - for (ConcordanceMetrics.SiteConcordanceType type : ConcordanceMetrics.SiteConcordanceType.values() ) { - siteConcordance.set("Comparison",type.toString(),metrics.getOverallSiteConcordance().get(type)); + for (ConcordanceMetrics.SiteConcordanceType type : ConcordanceMetrics.SiteConcordanceType.values() ) { + siteConcordance.set("Comparison",type.toString(),metrics.getOverallSiteConcordance().get(type)); + } } report.addTable(concordanceCompProportions); @@ -226,7 +446,7 @@ public class GenotypeConcordance extends RodWalker samples) { + public VariantContext createEmptyContext(VariantContext other, List samples) { VariantContextBuilder builder = new VariantContextBuilder(); // set the alleles to be the same builder.alleles(other.getAlleles()); @@ -239,13 +459,29 @@ public class GenotypeConcordance extends RodWalker exps) { if ( ! context.isFiltered() || ignoreSiteFilter ) { - // todo -- add genotype-level jexl filtering here - return context; + List filteredGenotypes = new ArrayList(context.getNSamples()); + for ( Genotype g : context.getGenotypes() ) { + Map matchMap = VariantContextUtils.match(context, g, exps); + boolean filtered = false; + for ( Boolean b : matchMap.values() ) { + if ( b ) { + filtered = true; + break; + } + } + if ( filtered ) { + filteredGenotypes.add(GenotypeBuilder.create(g.getSampleName(),Arrays.asList(Allele.NO_CALL,Allele.NO_CALL),g.getExtendedAttributes())); + } else { + filteredGenotypes.add(g); + } + } + VariantContextBuilder builder = new VariantContextBuilder(context); + builder.genotypes(filteredGenotypes); + return builder.make(); } + VariantContextBuilder builder = new VariantContextBuilder(); builder.alleles(Arrays.asList(context.getReference())); builder.loc(context.getChr(),context.getStart(),context.getEnd()); @@ -256,4 +492,4 @@ public class GenotypeConcordance extends RodWalker implements TreeRe headerLines = new LinkedHashSet(getSelectedHeaders(headerLines)); // Optionally add in the intervals. - if (includeIntervals && getToolkit().getArguments().intervals != null) { - for (IntervalBinding intervalBinding : getToolkit().getArguments().intervals) { - String source = intervalBinding.getSource(); - if (source == null) - continue; - File file = new File(source); - if (file.exists()) { - headerLines.add(new VCFHeaderLine(VCFHeader.INTERVALS_KEY, FilenameUtils.getBaseName(file.getName()))); - } else { - headerLines.add(new VCFHeaderLine(VCFHeader.INTERVALS_KEY, source)); + if (includeIntervals) { + IntervalArgumentCollection intervalArguments = getToolkit().getArguments().intervalArguments; + if (intervalArguments.intervals != null) { + for (IntervalBinding intervalBinding : intervalArguments.intervals) { + String source = intervalBinding.getSource(); + if (source == null) + continue; + File file = new File(source); + if (file.exists()) { + headerLines.add(new VCFHeaderLine(VCFHeader.INTERVALS_KEY, FilenameUtils.getBaseName(file.getName()))); + } else { + headerLines.add(new VCFHeaderLine(VCFHeader.INTERVALS_KEY, source)); + } } } + + if (intervalArguments.excludeIntervals != null) { + for (IntervalBinding intervalBinding : intervalArguments.excludeIntervals) { + String source = intervalBinding.getSource(); + if (source == null) + continue; + File file = new File(source); + if (file.exists()) { + headerLines.add(new VCFHeaderLine(VCFHeader.EXCLUDE_INTERVALS_KEY, FilenameUtils.getBaseName(file.getName()))); + } else { + headerLines.add(new VCFHeaderLine(VCFHeader.EXCLUDE_INTERVALS_KEY, source)); + } + } + } + + if (intervalArguments.intervalMerging != IntervalMergingRule.ALL) { + headerLines.add(new VCFHeaderLine(VCFHeader.INTERVAL_MERGING_KEY, String.valueOf(intervalArguments.intervalMerging))); + } + + if (intervalArguments.intervalSetRule != IntervalSetRule.UNION) { + headerLines.add(new VCFHeaderLine(VCFHeader.INTERVAL_SET_RULE_KEY, String.valueOf(intervalArguments.intervalSetRule))); + } + + if (intervalArguments.intervalPadding != 0) { + headerLines.add(new VCFHeaderLine(VCFHeader.INTERVAL_PADDING_KEY, String.valueOf(intervalArguments.intervalPadding))); + } } TreeSet vcfSamples = new TreeSet(SampleUtils.getSampleList(vcfRods, VariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE)); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/ArtificialReadPileupTestProvider.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/ArtificialReadPileupTestProvider.java index 80ef7293f..047d69c5f 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/ArtificialReadPileupTestProvider.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/ArtificialReadPileupTestProvider.java @@ -51,6 +51,7 @@ import net.sf.samtools.SAMReadGroupRecord; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState; import org.broadinstitute.variant.utils.BaseUtils; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLocParser; @@ -214,8 +215,7 @@ public class ArtificialReadPileupTestProvider { read.setReadNegativeStrandFlag(false); read.setReadGroup(sampleRG(sample)); - - pileupElements.add(new PileupElement(read,readOffset,false,isBeforeDeletion, false, isBeforeInsertion,false,false,altBases,Math.abs(eventLength))); + pileupElements.add(LocusIteratorByState.createPileupForReadAndOffset(read, readOffset)); } return pileupElements; diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java index 527e5c5e1..45a42d018 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java @@ -108,7 +108,7 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest { public void testMultipleSNPAlleles() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper -R " + b37KGReference + " --no_cmdline_in_header -glm BOTH --dbsnp " + b37dbSNP129 + " -I " + privateTestDir + "multiallelic.snps.bam -o %s -L " + privateTestDir + "multiallelic.snps.intervals", 1, - Arrays.asList("b41b95aaa2c453c9b75b3b29a9c2718e")); + Arrays.asList("35479a79e1ce7c15493bd77e58cadcaa")); executeTest("test Multiple SNP alleles", spec); } @@ -124,7 +124,7 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest { public void testReverseTrim() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper -R " + b37KGReference + " --no_cmdline_in_header -glm INDEL -I " + validationDataLocation + "CEUTrio.HiSeq.b37.chr20.10_11mb.bam -o %s -L 20:10289124 -L 20:10090289", 1, - Arrays.asList("44e9f6cf11b4efecb454cd3de8de9877")); + Arrays.asList("1e61de694b51d7c0f26da5179ee6bb0c")); executeTest("test reverse trim", spec); } @@ -238,12 +238,12 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest { @Test public void testOutputParameterAllConfident() { - testOutputParameters("--output_mode EMIT_ALL_CONFIDENT_SITES", "9dbc9389db39cf9697e93e0bf529314f"); + testOutputParameters("--output_mode EMIT_ALL_CONFIDENT_SITES", "5649f72de04e1391e0f2bb86843d3d72"); } @Test public void testOutputParameterAllSites() { - testOutputParameters("--output_mode EMIT_ALL_SITES", "8b26088a035e579c4afd3b46737291e4"); + testOutputParameters("--output_mode EMIT_ALL_SITES", "cb151bb9e90680b12714d481091ed209"); } private void testOutputParameters(final String args, final String md5) { @@ -363,7 +363,7 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest { " -o %s" + " -L 1:10,000,000-10,500,000", 1, - Arrays.asList("39c7a813fd6ee82d3604f2a868b35b2a")); + Arrays.asList("8231ae37b52b927db9fc1e5c221b0ba0")); executeTest(String.format("test indel calling, multiple technologies"), spec); } @@ -391,13 +391,13 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest { public void testMultiSampleIndels1() { WalkerTest.WalkerTestSpec spec1 = new WalkerTest.WalkerTestSpec( baseCommandIndels + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L 1:10450700-10551000", 1, - Arrays.asList("5667a699a3a13474f2d1cd2d6b01cd5b")); + Arrays.asList("a47810de2f6ef8087f4644064a0814bc")); List result = executeTest("test MultiSample Pilot1 CEU indels", spec1).getFirst(); WalkerTest.WalkerTestSpec spec2 = new WalkerTest.WalkerTestSpec( baseCommandIndels + " --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + result.get(0).getAbsolutePath() + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L 1:10450700-10551000", 1, - Arrays.asList("b6c1d5cd28ff584c5f5037afef4e883a")); + Arrays.asList("53b8d2b0fa63c5d1019855e8e0db28f0")); executeTest("test MultiSample Pilot1 CEU indels using GENOTYPE_GIVEN_ALLELES", spec2); } @@ -497,18 +497,18 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest { public void testReducedBam() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "bamExample.ReducedRead.ADAnnotation.bam -o %s -L 1:67,225,396-67,288,518", 1, - Arrays.asList("092e42a712afb660ec79ff11c55933e2")); + Arrays.asList("8b9a9fc2e7150acbe2dac91b4620f304")); executeTest("test calling on a ReducedRead BAM", spec); } @Test public void testReducedBamSNPs() { - testReducedCalling("SNP", "c0de74ab8f4f14eb3a2c5d55c200ac5f"); + testReducedCalling("SNP", "b5991dddbfb59366614ff8819062649f"); } @Test public void testReducedBamINDELs() { - testReducedCalling("INDEL", "1c9aaf65ffaa12bb766855265a1c3f8e"); + testReducedCalling("INDEL", "a85c110fcac9574a54c7daccb1e2d5ae"); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java index 142fa39bf..939b9873c 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java @@ -50,6 +50,7 @@ import org.broadinstitute.sting.WalkerTest; import org.testng.annotations.Test; import java.util.Arrays; +import java.util.Collections; public class HaplotypeCallerIntegrationTest extends WalkerTest { final static String REF = b37KGReference; @@ -67,7 +68,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSample() { - HCTest(CEUTRIO_BAM, "", "35c8425b44429ac7468c2cd26f8f5a42"); + HCTest(CEUTRIO_BAM, "", "b8f7b741445ce6b6ea491c794ce75c17"); } @Test @@ -75,11 +76,33 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { HCTest(NA12878_BAM, "", "a2c63f6e6e51a01019bdbd23125bdb15"); } - // TODO -- add more tests for GGA mode, especially with input alleles that are complex variants and/or not trimmed + @Test(enabled = false) + public void testHaplotypeCallerSingleSampleWithDbsnp() { + HCTest(NA12878_BAM, "-D " + b37dbSNP132, ""); + } + @Test public void testHaplotypeCallerMultiSampleGGA() { HCTest(CEUTRIO_BAM, "--max_alternate_alleles 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf", - "d918d25b22a551cae5d70ea30d7feed1"); + "c679ae7f04bdfda896b5c046d35e043c"); + } + + private void HCTestComplexGGA(String bam, String args, String md5) { + final String base = String.format("-T HaplotypeCaller -R %s -I %s", REF, bam) + " --no_cmdline_in_header -o %s -minPruning 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf"; + final WalkerTestSpec spec = new WalkerTestSpec(base + " " + args, Arrays.asList(md5)); + executeTest("testHaplotypeCallerComplexGGA: args=" + args, spec); + } + + @Test + public void testHaplotypeCallerMultiSampleGGAComplex() { + HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:119673-119823 -L 20:121408-121538", + "8730a9ebaeecae913dca2fb5a0d4e946"); + } + + @Test + public void testHaplotypeCallerMultiSampleGGAMultiAllelic() { + HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:133041-133161 -L 20:300207-300337", + "d590c8d6d5e58d685401b65a23846893"); } private void HCTestComplexVariants(String bam, String args, String md5) { @@ -115,10 +138,15 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "29f1125df5ab27cc937a144ae08ac735"); } + // That problem bam came from a user on the forum and it spotted a problem where the ReadClipper + // was modifying the GATKSamRecord and that was screwing up the traversal engine from map call to + // map call. So the test is there for consistency but not for correctness. I'm not sure we can trust + // any of the calls in that region because it is so messy. The only thing I would maybe be worried about is + // that the three calls that are missing happen to all be the left most calls in the region @Test public void HCTestProblematicReadsModifiedInActiveRegions() { final String base = String.format("-T HaplotypeCaller -R %s -I %s", REF, privateTestDir + "haplotype-problem-4.bam") + " --no_cmdline_in_header -o %s -minPruning 3 -L 4:49139026-49139965"; - final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("2e8e6313228b0387008437feae7f5469")); + final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("31db0a2d9eb07f86e0a89f0d97169072")); executeTest("HCTestProblematicReadsModifiedInActiveRegions: ", spec); } @@ -129,6 +157,14 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { executeTest("HCTestStructuralIndels: ", spec); } + @Test + public void HCTestDoesNotFailOnBadRefBase() { + // don't care about the output - just want to make sure it doesn't fail + final String base = String.format("-T HaplotypeCaller -R %s -I %s", REF, privateTestDir + "NA12878.readsOverBadBase.chr3.bam") + " --no_cmdline_in_header -o /dev/null -L 3:60830000-60840000 --minPruning 3 -stand_call_conf 2 -stand_emit_conf 2"; + final WalkerTestSpec spec = new WalkerTestSpec(base, Collections.emptyList()); + executeTest("HCTestDoesNotFailOnBadRefBase: ", spec); + } + // -------------------------------------------------------------------------------------------------------------- // // testing reduced reads @@ -142,4 +178,12 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { Arrays.asList("8a400b0c46f41447fcc35a907e34f384")); executeTest("HC calling on a ReducedRead BAM", spec); } + + @Test + public void testReducedBamWithReadsNotFullySpanningDeletion() { + WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( + "-T HaplotypeCaller -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "reduced.readNotFullySpanningDeletion.bam -o %s -L 1:167871297", 1, + Arrays.asList("4e8121dd9dc90478f237bd6ae4d19920")); + executeTest("test calling on a ReducedRead BAM where the reads do not fully span a deletion", spec); + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetricsUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetricsUnitTest.java index 28f128dd3..6db44efd5 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetricsUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetricsUnitTest.java @@ -111,8 +111,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest { private Pair getData1() { - Allele reference_A = Allele.create(BaseUtils.A,true); - Allele alt_C = Allele.create(BaseUtils.C); + Allele reference_A = Allele.create(BaseUtils.Base.A.base,true); + Allele alt_C = Allele.create(BaseUtils.Base.C.base); Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A)); Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", Arrays.asList(reference_A,alt_C)); @@ -160,9 +160,9 @@ public class ConcordanceMetricsUnitTest extends BaseTest { private Pair getData2() { - Allele reference_A = Allele.create(BaseUtils.A,true); - Allele alt_C = Allele.create(BaseUtils.C); - Allele alt_T = Allele.create(BaseUtils.T); + Allele reference_A = Allele.create(BaseUtils.Base.A.base,true); + Allele alt_C = Allele.create(BaseUtils.Base.C.base); + Allele alt_T = Allele.create(BaseUtils.Base.T.base); Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A)); Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", Arrays.asList(reference_A,alt_T)); @@ -213,10 +213,10 @@ public class ConcordanceMetricsUnitTest extends BaseTest { private Pair getData3() { - Allele reference_ACT = Allele.create(new byte[]{BaseUtils.A,BaseUtils.C,BaseUtils.T},true); - Allele alt_AC = Allele.create(new byte[]{BaseUtils.A,BaseUtils.C}); - Allele alt_A = Allele.create(BaseUtils.A); - Allele alt_ATT = Allele.create(new byte[]{BaseUtils.A,BaseUtils.T,BaseUtils.T}); + Allele reference_ACT = Allele.create(new byte[]{BaseUtils.Base.A.base,BaseUtils.Base.C.base,BaseUtils.Base.T.base},true); + Allele alt_AC = Allele.create(new byte[]{BaseUtils.Base.A.base,BaseUtils.Base.C.base}); + Allele alt_A = Allele.create(BaseUtils.Base.A.base); + Allele alt_ATT = Allele.create(new byte[]{BaseUtils.Base.A.base,BaseUtils.Base.T.base,BaseUtils.Base.T.base}); Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_ACT,alt_ATT)); Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", Arrays.asList(alt_A,alt_A)); @@ -267,9 +267,9 @@ public class ConcordanceMetricsUnitTest extends BaseTest { private Pair getData4() { - Allele reference_A = Allele.create(BaseUtils.A,true); - Allele alt_C = Allele.create(BaseUtils.C); - Allele alt_T = Allele.create(BaseUtils.T); + Allele reference_A = Allele.create(BaseUtils.Base.A.base,true); + Allele alt_C = Allele.create(BaseUtils.Base.C.base); + Allele alt_T = Allele.create(BaseUtils.Base.T.base); Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A)); Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", Arrays.asList(Allele.NO_CALL,Allele.NO_CALL)); @@ -316,9 +316,9 @@ public class ConcordanceMetricsUnitTest extends BaseTest { private Pair getData5() { - Allele reference_A = Allele.create(BaseUtils.A,true); - Allele alt_C = Allele.create(BaseUtils.C); - Allele alt_T = Allele.create(BaseUtils.T); + Allele reference_A = Allele.create(BaseUtils.Base.A.base,true); + Allele alt_C = Allele.create(BaseUtils.Base.C.base); + Allele alt_T = Allele.create(BaseUtils.Base.T.base); Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A)); Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", new ArrayList(0)); @@ -368,8 +368,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest { private List> getData6() { - Allele reference_A = Allele.create(BaseUtils.A,true); - Allele alt_C = Allele.create(BaseUtils.C); + Allele reference_A = Allele.create(BaseUtils.Base.A.base,true); + Allele alt_C = Allele.create(BaseUtils.Base.C.base); // site 1 - @@ -396,8 +396,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest { Pair testDataSite1 = new Pair(eval_1_builder.make(),truth_1_builder.make()); - reference_A = Allele.create(BaseUtils.A,true); - Allele alt_T = Allele.create(BaseUtils.T); + reference_A = Allele.create(BaseUtils.Base.A.base,true); + Allele alt_T = Allele.create(BaseUtils.Base.T.base); // site 2 - // sample 1: no-call/hom-ref @@ -421,7 +421,7 @@ public class ConcordanceMetricsUnitTest extends BaseTest { Pair testDataSite2 = new Pair(eval_1_builder.make(),truth_1_builder.make()); - Allele alt_G = Allele.create(BaseUtils.G); + Allele alt_G = Allele.create(BaseUtils.Base.G.base); // site 3 - // sample 1: alleles do not match @@ -605,10 +605,10 @@ public class ConcordanceMetricsUnitTest extends BaseTest { public List> getData7() { - Allele ref1 = Allele.create(BaseUtils.T,true); - Allele alt1 = Allele.create(BaseUtils.C); - Allele alt2 = Allele.create(BaseUtils.G); - Allele alt3 = Allele.create(BaseUtils.A); + Allele ref1 = Allele.create(BaseUtils.Base.T.base,true); + Allele alt1 = Allele.create(BaseUtils.Base.C.base); + Allele alt2 = Allele.create(BaseUtils.Base.G.base); + Allele alt3 = Allele.create(BaseUtils.Base.A.base); GenomeLoc loc1 = genomeLocParser.createGenomeLoc("chr1",1,1); VariantContextBuilder site1Eval = new VariantContextBuilder(); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordanceIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordanceIntegrationTest.java new file mode 100644 index 000000000..e69d1ee60 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordanceIntegrationTest.java @@ -0,0 +1,118 @@ +/* + * Copyright (c) 2010. + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.gatk.walkers.variantutils; + +import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.testng.annotations.Test; + +import java.util.Arrays; + +public class GenotypeConcordanceIntegrationTest extends WalkerTest { + + protected static final String emptyMd5 = "d41d8cd98f00b204e9800998ecf8427e"; + + public static String baseTestString(String eval, String comp) { + return "-T GenotypeConcordance -R " + b37KGReference + " --eval " + validationDataLocation + eval + " --comp " + validationDataLocation + comp + " -o %s"; + } + + @Test + public void testIndelConcordance() { + WalkerTestSpec spec = new WalkerTestSpec( + baseTestString("NA12878.Jan2013.haplotypeCaller.subset.indels.vcf", "NA12878.Jan2013.bestPractices.subset.indels.vcf"), + 0, + Arrays.asList("0f29a0c6dc44066228c8cb204fd53ec0") + ); + + executeTest("test indel concordance", spec); + } + + @Test + public void testNonoverlapingSamples() { + WalkerTestSpec spec = new WalkerTestSpec( + baseTestString("GenotypeConcordanceNonOverlapTest_Eval.vcf", "GenotypeConcordanceNonOverlapTest_Comp.vcf"), + 0, + Arrays.asList("fc725022d47b4b5f8a6ef87f0f1ffe89") + ); + + executeTest("test non-overlapping samples", spec); + } + + @Test + public void testNonoverlappingSamplesMoltenized() { + WalkerTestSpec spec = new WalkerTestSpec( + baseTestString("GenotypeConcordanceNonOverlapTest_Eval.vcf", "GenotypeConcordanceNonOverlapTest_Comp.vcf") + " -moltenize", + 0, + Arrays.asList("370141088362d0ab7054be5249c49c11") + ); + + executeTest("Test moltenized output",spec); + } + + @Test + public void testMultipleRecordsPerSite() { + WalkerTestSpec spec = new WalkerTestSpec( + baseTestString("GenotypeConcordance.multipleRecordsTest1.eval.vcf","GenotypeConcordance.multipleRecordsTest1.comp.vcf"), + 0, + Arrays.asList("352d59c4ac0cee5eb8ddbc9404b19ce9") + ); + + executeTest("test multiple records per site",spec); + } + + @Test + public void testGQFilteringEval() { + WalkerTestSpec spec = new WalkerTestSpec( + baseTestString("genotypeConcordanceFilterTest.vcf","genotypeConcordanceFilterTest.vcf") + " -gfe 'GQ<30'", + 0, + Arrays.asList("b7b495ccfa6d50a6be3e095d3f6d3c52") + ); + + executeTest("Test filtering on the EVAL rod",spec); + } + + @Test + public void testFloatFilteringComp() { + WalkerTestSpec spec = new WalkerTestSpec( + baseTestString("genotypeConcordanceFilterTest.vcf","genotypeConcordanceFilterTest.vcf") + " -gfc 'LX<0.50'", + 0, + Arrays.asList("6406b16cde7960b8943edf594303afd6") + ); + + executeTest("Test filtering on the COMP rod", spec); + } + + @Test + public void testCombinedFilters() { + WalkerTestSpec spec = new WalkerTestSpec( + baseTestString("genotypeConcordanceFilterTest.vcf","genotypeConcordanceFilterTest.vcf") + " -gfc 'LX<0.52' -gfe 'DP<5' -gfe 'GQ<37'", + 0, + Arrays.asList("26ffd06215b6177acce0ea9f35d73d31") + ); + + executeTest("Test filtering on both rods",spec); + } +} diff --git a/public/java/src/org/broadinstitute/sting/commandline/IntervalArgumentCollection.java b/public/java/src/org/broadinstitute/sting/commandline/IntervalArgumentCollection.java new file mode 100644 index 000000000..3f76ae652 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/commandline/IntervalArgumentCollection.java @@ -0,0 +1,70 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.commandline; + +import org.broad.tribble.Feature; +import org.broadinstitute.sting.utils.interval.IntervalMergingRule; +import org.broadinstitute.sting.utils.interval.IntervalSetRule; + +import java.util.List; + +public class IntervalArgumentCollection { + /** + * Using this option one can instruct the GATK engine to traverse over only part of the genome. This argument can be specified multiple times. + * One may use samtools-style intervals either explicitly (e.g. -L chr1 or -L chr1:100-200) or listed in a file (e.g. -L myFile.intervals). + * Additionally, one may specify a rod file to traverse over the positions for which there is a record in the file (e.g. -L file.vcf). + * To specify the completely unmapped reads in the BAM file (i.e. those without a reference contig) use -L unmapped. + */ + @Input(fullName = "intervals", shortName = "L", doc = "One or more genomic intervals over which to operate. Can be explicitly specified on the command line or in a file (including a rod file)", required = false) + public List> intervals = null; + + /** + * Using this option one can instruct the GATK engine NOT to traverse over certain parts of the genome. This argument can be specified multiple times. + * One may use samtools-style intervals either explicitly (e.g. -XL chr1 or -XL chr1:100-200) or listed in a file (e.g. -XL myFile.intervals). + * Additionally, one may specify a rod file to skip over the positions for which there is a record in the file (e.g. -XL file.vcf). + */ + @Input(fullName = "excludeIntervals", shortName = "XL", doc = "One or more genomic intervals to exclude from processing. Can be explicitly specified on the command line or in a file (including a rod file)", required = false) + public List> excludeIntervals = null; + + /** + * How should the intervals specified by multiple -L or -XL arguments be combined? Using this argument one can, for example, traverse over all of the positions + * for which there is a record in a VCF but just in chromosome 20 (-L chr20 -L file.vcf -isr INTERSECTION). + */ + @Argument(fullName = "interval_set_rule", shortName = "isr", doc = "Indicates the set merging approach the interval parser should use to combine the various -L or -XL inputs", required = false) + public IntervalSetRule intervalSetRule = IntervalSetRule.UNION; + + /** + * Should abutting (but not overlapping) intervals be treated as separate intervals? + */ + @Argument(fullName = "interval_merging", shortName = "im", doc = "Indicates the interval merging rule we should use for abutting intervals", required = false) + public IntervalMergingRule intervalMerging = IntervalMergingRule.ALL; + + /** + * For example, '-L chr1:100' with a padding value of 20 would turn into '-L chr1:80-120'. + */ + @Argument(fullName = "interval_padding", shortName = "ip", doc = "Indicates how many basepairs of padding to include around each of the intervals specified with the -L/--intervals argument", required = false) + public int intervalPadding = 0; +} diff --git a/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java b/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java index ba5577730..9b801be7d 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java @@ -55,7 +55,6 @@ import org.broadinstitute.sting.gatk.samples.SampleDBBuilder; import org.broadinstitute.sting.gatk.walkers.*; import org.broadinstitute.sting.utils.*; import org.broadinstitute.sting.utils.classloader.PluginManager; -import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.interval.IntervalUtils; @@ -361,7 +360,6 @@ public class GenomeAnalysisEngine { * Returns a list of active, initialized read transformers * * @param walker the walker we need to apply read transformers too - * @return a non-null list of read transformers */ public void initializeReadTransformers(final Walker walker) { final List activeTransformers = new ArrayList(); @@ -672,41 +670,7 @@ public class GenomeAnalysisEngine { * Setup the intervals to be processed */ protected void initializeIntervals() { - // return if no interval arguments at all - if ( argCollection.intervals == null && argCollection.excludeIntervals == null ) - return; - - // Note that the use of '-L all' is no longer supported. - - // if include argument isn't given, create new set of all possible intervals - - final Pair includeExcludePair = IntervalUtils.parseIntervalBindingsPair( - this.referenceDataSource, - argCollection.intervals, - argCollection.intervalSetRule, argCollection.intervalMerging, argCollection.intervalPadding, - argCollection.excludeIntervals); - - final GenomeLocSortedSet includeSortedSet = includeExcludePair.getFirst(); - final GenomeLocSortedSet excludeSortedSet = includeExcludePair.getSecond(); - - // if no exclude arguments, can return parseIntervalArguments directly - if ( excludeSortedSet == null ) - intervals = includeSortedSet; - - // otherwise there are exclude arguments => must merge include and exclude GenomeLocSortedSets - else { - intervals = includeSortedSet.subtractRegions(excludeSortedSet); - - // logging messages only printed when exclude (-XL) arguments are given - final long toPruneSize = includeSortedSet.coveredSize(); - final long toExcludeSize = excludeSortedSet.coveredSize(); - final long intervalSize = intervals.coveredSize(); - logger.info(String.format("Initial include intervals span %d loci; exclude intervals span %d loci", toPruneSize, toExcludeSize)); - logger.info(String.format("Excluding %d loci from original intervals (%.2f%% reduction)", - toPruneSize - intervalSize, (toPruneSize - intervalSize) / (0.01 * toPruneSize))); - } - - logger.info(String.format("Processing %d bp from intervals", intervals.coveredSize())); + intervals = IntervalUtils.parseIntervalArguments(this.referenceDataSource, argCollection.intervalArguments); } /** @@ -842,6 +806,8 @@ public class GenomeAnalysisEngine { if (argCollection.keepProgramRecords) removeProgramRecords = false; + final boolean keepReadsInLIBS = walker instanceof ActiveRegionWalker; + return new SAMDataSource( samReaderIDs, threadAllocation, @@ -856,7 +822,8 @@ public class GenomeAnalysisEngine { readTransformers, includeReadsWithDeletionAtLoci(), argCollection.defaultBaseQualities, - removeProgramRecords); + removeProgramRecords, + keepReadsInLIBS); } /** diff --git a/public/java/src/org/broadinstitute/sting/gatk/ReadProperties.java b/public/java/src/org/broadinstitute/sting/gatk/ReadProperties.java index 409b08e5d..1ca0a8a46 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/ReadProperties.java +++ b/public/java/src/org/broadinstitute/sting/gatk/ReadProperties.java @@ -61,6 +61,7 @@ public class ReadProperties { private final ValidationExclusion exclusionList; private final Collection supplementalFilters; private final List readTransformers; + private final boolean keepUniqueReadListInLIBS; private final boolean includeReadsWithDeletionAtLoci; private final boolean useOriginalBaseQualities; private final byte defaultBaseQualities; @@ -74,6 +75,10 @@ public class ReadProperties { return includeReadsWithDeletionAtLoci; } + public boolean keepUniqueReadListInLIBS() { + return keepUniqueReadListInLIBS; + } + /** * Gets a list of the files acting as sources of reads. * @return A list of files storing reads data. @@ -161,6 +166,8 @@ public class ReadProperties { * will explicitly list reads with deletion over the current reference base; otherwise, only observed * bases will be seen in the pileups, and the deletions will be skipped silently. * @param defaultBaseQualities if the reads have incomplete quality scores, set them all to defaultBaseQuality. + * @param keepUniqueReadListInLIBS If true, we will tell LocusIteratorByState to track the unique reads it sees + * This is really useful for ActiveRegionTraversals */ public ReadProperties( Collection samFiles, SAMFileHeader header, @@ -172,7 +179,8 @@ public class ReadProperties { Collection supplementalFilters, List readTransformers, boolean includeReadsWithDeletionAtLoci, - byte defaultBaseQualities) { + byte defaultBaseQualities, + final boolean keepUniqueReadListInLIBS) { this.readers = samFiles; this.header = header; this.sortOrder = sortOrder; @@ -184,5 +192,6 @@ public class ReadProperties { this.includeReadsWithDeletionAtLoci = includeReadsWithDeletionAtLoci; this.useOriginalBaseQualities = useOriginalBaseQualities; this.defaultBaseQualities = defaultBaseQualities; + this.keepUniqueReadListInLIBS = keepUniqueReadListInLIBS; } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java b/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java index ab09064dd..62ca38ad2 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java +++ b/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java @@ -26,11 +26,7 @@ package org.broadinstitute.sting.gatk.arguments; import net.sf.samtools.SAMFileReader; -import org.broad.tribble.Feature; -import org.broadinstitute.sting.commandline.Argument; -import org.broadinstitute.sting.commandline.Hidden; -import org.broadinstitute.sting.commandline.Input; -import org.broadinstitute.sting.commandline.IntervalBinding; +import org.broadinstitute.sting.commandline.*; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod; @@ -38,8 +34,6 @@ import org.broadinstitute.sting.gatk.phonehome.GATKRunReport; import org.broadinstitute.sting.gatk.samples.PedigreeValidationType; import org.broadinstitute.sting.utils.QualityUtils; import org.broadinstitute.sting.utils.baq.BAQ; -import org.broadinstitute.sting.utils.interval.IntervalMergingRule; -import org.broadinstitute.sting.utils.interval.IntervalSetRule; import java.io.File; import java.util.ArrayList; @@ -100,41 +94,8 @@ public class GATKArgumentCollection { @Argument(fullName = "read_filter", shortName = "rf", doc = "Specify filtration criteria to apply to each read individually", required = false) public List readFilters = new ArrayList(); - /** - * Using this option one can instruct the GATK engine to traverse over only part of the genome. This argument can be specified multiple times. - * One may use samtools-style intervals either explicitly (e.g. -L chr1 or -L chr1:100-200) or listed in a file (e.g. -L myFile.intervals). - * Additionally, one may specify a rod file to traverse over the positions for which there is a record in the file (e.g. -L file.vcf). - * To specify the completely unmapped reads in the BAM file (i.e. those without a reference contig) use -L unmapped. - */ - @Input(fullName = "intervals", shortName = "L", doc = "One or more genomic intervals over which to operate. Can be explicitly specified on the command line or in a file (including a rod file)", required = false) - public List> intervals = null; - - /** - * Using this option one can instruct the GATK engine NOT to traverse over certain parts of the genome. This argument can be specified multiple times. - * One may use samtools-style intervals either explicitly (e.g. -XL chr1 or -XL chr1:100-200) or listed in a file (e.g. -XL myFile.intervals). - * Additionally, one may specify a rod file to skip over the positions for which there is a record in the file (e.g. -XL file.vcf). - */ - @Input(fullName = "excludeIntervals", shortName = "XL", doc = "One or more genomic intervals to exclude from processing. Can be explicitly specified on the command line or in a file (including a rod file)", required = false) - public List> excludeIntervals = null; - - /** - * How should the intervals specified by multiple -L or -XL arguments be combined? Using this argument one can, for example, traverse over all of the positions - * for which there is a record in a VCF but just in chromosome 20 (-L chr20 -L file.vcf -isr INTERSECTION). - */ - @Argument(fullName = "interval_set_rule", shortName = "isr", doc = "Indicates the set merging approach the interval parser should use to combine the various -L or -XL inputs", required = false) - public IntervalSetRule intervalSetRule = IntervalSetRule.UNION; - - /** - * Should abutting (but not overlapping) intervals be treated as separate intervals? - */ - @Argument(fullName = "interval_merging", shortName = "im", doc = "Indicates the interval merging rule we should use for abutting intervals", required = false) - public IntervalMergingRule intervalMerging = IntervalMergingRule.ALL; - - /** - * For example, '-L chr1:100' with a padding value of 20 would turn into '-L chr1:80-120'. - */ - @Argument(fullName = "interval_padding", shortName = "ip", doc = "Indicates how many basepairs of padding to include around each of the intervals specified with the -L/--intervals argument", required = false) - public int intervalPadding = 0; + @ArgumentCollection + public IntervalArgumentCollection intervalArguments = new IntervalArgumentCollection(); @Input(fullName = "reference_sequence", shortName = "R", doc = "Reference sequence file", required = false) public File referenceFile = null; diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusShardDataProvider.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusShardDataProvider.java index 41fe5a175..45c9af995 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusShardDataProvider.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusShardDataProvider.java @@ -29,7 +29,7 @@ import net.sf.picard.reference.IndexedFastaSequenceFile; import org.broadinstitute.sting.gatk.ReadProperties; import org.broadinstitute.sting.gatk.datasources.reads.Shard; import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; -import org.broadinstitute.sting.gatk.iterators.LocusIterator; +import org.broadinstitute.sting.utils.locusiterator.LocusIterator; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLocParser; diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusView.java index b020a43ba..f77819426 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusView.java @@ -28,9 +28,10 @@ package org.broadinstitute.sting.gatk.datasources.providers; import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.ReadProperties; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; -import org.broadinstitute.sting.gatk.iterators.LocusIterator; +import org.broadinstitute.sting.utils.locusiterator.LocusIterator; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState; import java.util.Arrays; import java.util.Collection; @@ -212,4 +213,10 @@ public abstract class LocusView extends LocusIterator implements View { private boolean isContainedInShard(GenomeLoc location) { return locus.containsP(location); } + + // TODO -- remove me + @Override + public LocusIteratorByState getLIBS() { + return loci.getLIBS(); + } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java index 7d3cac33d..09b72f5eb 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java @@ -77,13 +77,17 @@ public class ManagingReferenceOrderedView implements ReferenceOrderedView { * @return A tracker containing information about this locus. */ public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc, ReferenceContext referenceContext ) { - List bindings = states.isEmpty() ? Collections.emptyList() : new ArrayList(states.size()); + if ( states.isEmpty() ) + return RefMetaDataTracker.EMPTY_TRACKER; + else { + List bindings = new ArrayList(states.size()); - for ( ReferenceOrderedDataState state: states ) - // todo -- warning, I removed the reference to the name from states - bindings.add( state.iterator.seekForward(loc) ); + for ( ReferenceOrderedDataState state: states ) + // todo -- warning, I removed the reference to the name from states + bindings.add( state.iterator.seekForward(loc) ); - return new RefMetaDataTracker(bindings); + return new RefMetaDataTracker(bindings); + } } /** diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java index cb47ffe4c..c9a3b0df0 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java @@ -158,6 +158,9 @@ public class SAMDataSource { /** * Create a new SAM data source given the supplied read metadata. + * + * For testing purposes + * * @param samFiles list of reads files. */ public SAMDataSource(Collection samFiles, ThreadAllocation threadAllocation, Integer numFileHandles, GenomeLocParser genomeLocParser) { @@ -177,6 +180,8 @@ public class SAMDataSource { /** * See complete constructor. Does not enable BAQ by default. + * + * For testing purposes */ public SAMDataSource( Collection samFiles, @@ -203,6 +208,7 @@ public class SAMDataSource { Collections.emptyList(), includeReadsWithDeletionAtLoci, (byte) -1, + false, false); } @@ -219,6 +225,7 @@ public class SAMDataSource { * will explicitly list reads with deletion over the current reference base; otherwise, only observed * bases will be seen in the pileups, and the deletions will be skipped silently. * @param defaultBaseQualities if the reads have incomplete quality scores, set them all to defaultBaseQuality. + * @param keepReadsInLIBS should we keep a unique list of reads in LIBS? */ public SAMDataSource( Collection samFiles, @@ -234,7 +241,8 @@ public class SAMDataSource { List readTransformers, boolean includeReadsWithDeletionAtLoci, byte defaultBaseQualities, - boolean removeProgramRecords) { + boolean removeProgramRecords, + final boolean keepReadsInLIBS) { this.readMetrics = new ReadMetrics(); this.genomeLocParser = genomeLocParser; @@ -306,7 +314,8 @@ public class SAMDataSource { supplementalFilters, readTransformers, includeReadsWithDeletionAtLoci, - defaultBaseQualities); + defaultBaseQualities, + keepReadsInLIBS); // cache the read group id (original) -> read group id (merged) // and read group id (merged) -> read group id (original) mappings. diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/Shard.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/Shard.java index 2c03363ba..5b4c2afda 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/Shard.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/Shard.java @@ -95,7 +95,10 @@ public abstract class Shard implements HasGenomeLocation { */ private final Map fileSpans; - + /** + * Lazy-calculated span of all of the genome locs in this shard + */ + private GenomeLoc spanningLocation = null; /** * Statistics about which reads in this shards were used and which were filtered away. @@ -148,27 +151,34 @@ public abstract class Shard implements HasGenomeLocation { /** * Returns the span of the genomeLocs comprising this shard - * @param - * @return + * @return a GenomeLoc that starts as the first position in getGenomeLocs() and stops at the stop of the last + * position in getGenomeLocs() */ public GenomeLoc getLocation() { - if ( getGenomeLocs() == null ) - return GenomeLoc.WHOLE_GENOME; + if ( spanningLocation == null ) { + if ( getGenomeLocs() == null ) + spanningLocation = GenomeLoc.WHOLE_GENOME; + else if ( getGenomeLocs().size() == 0 ) { + spanningLocation = getGenomeLocs().get(0); + } else { + int start = Integer.MAX_VALUE; + int stop = Integer.MIN_VALUE; + String contig = null; - int start = Integer.MAX_VALUE; - int stop = Integer.MIN_VALUE; - String contig = null; + for ( GenomeLoc loc : getGenomeLocs() ) { + if ( GenomeLoc.isUnmapped(loc) ) + // special case the unmapped region marker, just abort out + return loc; + contig = loc.getContig(); + if ( loc.getStart() < start ) start = loc.getStart(); + if ( loc.getStop() > stop ) stop = loc.getStop(); + } - for ( GenomeLoc loc : getGenomeLocs() ) { - if ( GenomeLoc.isUnmapped(loc) ) - // special case the unmapped region marker, just abort out - return loc; - contig = loc.getContig(); - if ( loc.getStart() < start ) start = loc.getStart(); - if ( loc.getStop() > stop ) stop = loc.getStop(); + spanningLocation = parser.createGenomeLoc(contig, start, stop); + } } - return parser.createGenomeLoc(contig, start, stop); + return spanningLocation; } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java index 600834012..b06d5f5b4 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java @@ -27,8 +27,8 @@ package org.broadinstitute.sting.gatk.downsampling; import net.sf.samtools.SAMRecord; -import java.util.ArrayList; import java.util.Collection; +import java.util.LinkedList; import java.util.List; /** @@ -41,7 +41,7 @@ import java.util.List; */ public class PassThroughDownsampler implements ReadsDownsampler { - private ArrayList selectedReads; + private LinkedList selectedReads; public PassThroughDownsampler() { clear(); @@ -59,9 +59,13 @@ public class PassThroughDownsampler implements ReadsDownsam } public boolean hasFinalizedItems() { - return selectedReads.size() > 0; + return ! selectedReads.isEmpty(); } + /** + * Note that this list is a linked list and so doesn't support fast random access + * @return + */ public List consumeFinalizedItems() { // pass by reference rather than make a copy, for speed List downsampledItems = selectedReads; @@ -74,7 +78,7 @@ public class PassThroughDownsampler implements ReadsDownsam } public T peekFinalized() { - return selectedReads.isEmpty() ? null : selectedReads.get(0); + return selectedReads.isEmpty() ? null : selectedReads.getFirst(); } public T peekPending() { @@ -90,7 +94,7 @@ public class PassThroughDownsampler implements ReadsDownsam } public void clear() { - selectedReads = new ArrayList(); + selectedReads = new LinkedList(); } public void reset() { diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java index 0d7a0dd14..4331fd723 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java @@ -29,9 +29,7 @@ import net.sf.samtools.SAMRecord; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; +import java.util.*; /** * Reservoir Downsampler: Selects n reads out of a stream whose size is not known in advance, with @@ -42,10 +40,25 @@ import java.util.List; * @author David Roazen */ public class ReservoirDownsampler implements ReadsDownsampler { + private final int targetSampleSize; - private ArrayList reservoir; + /** + * if true, this downsampler will be optimized for the case + * where most of the time we won't fill up anything like the + * targetSampleSize elements. If this is false, we will allocate + * internal buffers to targetSampleSize initially, which minimizes + * the cost of allocation if we often use targetSampleSize or more + * elements. + */ + private final boolean expectFewOverflows; - private int targetSampleSize; + /** + * At times this can be a linked list or an array list, depending on how we're accessing the + * data and whether or not we're expecting few overflows + */ + private List reservoir; + + private boolean isLinkedList; private int totalReadsSeen; @@ -56,17 +69,35 @@ public class ReservoirDownsampler implements ReadsDownsampl * * @param targetSampleSize Size of the reservoir used by this downsampler. Number of items retained * after downsampling will be min(totalReads, targetSampleSize) + * @param expectFewOverflows if true, this downsampler will be optimized for the case + * where most of the time we won't fill up anything like the + * targetSampleSize elements. If this is false, we will allocate + * internal buffers to targetSampleSize initially, which minimizes + * the cost of allocation if we often use targetSampleSize or more + * elements. */ - public ReservoirDownsampler ( int targetSampleSize ) { + public ReservoirDownsampler ( final int targetSampleSize, final boolean expectFewOverflows) { if ( targetSampleSize <= 0 ) { throw new ReviewedStingException("Cannot do reservoir downsampling with a sample size <= 0"); } this.targetSampleSize = targetSampleSize; + this.expectFewOverflows = expectFewOverflows; clear(); reset(); } + /** + * Construct a ReservoirDownsampler + * + * @param targetSampleSize Size of the reservoir used by this downsampler. Number of items retained + * after downsampling will be min(totalReads, targetSampleSize) + */ + public ReservoirDownsampler ( int targetSampleSize ) { + this(targetSampleSize, false); + } + + public void submit ( T newRead ) { totalReadsSeen++; @@ -74,7 +105,12 @@ public class ReservoirDownsampler implements ReadsDownsampl reservoir.add(newRead); } else { - int randomSlot = GenomeAnalysisEngine.getRandomGenerator().nextInt(totalReadsSeen); + if ( isLinkedList ) { + reservoir = new ArrayList(reservoir); + isLinkedList = false; + } + + final int randomSlot = GenomeAnalysisEngine.getRandomGenerator().nextInt(totalReadsSeen); if ( randomSlot < targetSampleSize ) { reservoir.set(randomSlot, newRead); } @@ -93,10 +129,15 @@ public class ReservoirDownsampler implements ReadsDownsampl } public List consumeFinalizedItems() { - // pass by reference rather than make a copy, for speed - List downsampledItems = reservoir; - clear(); - return downsampledItems; + if ( reservoir.isEmpty() ) { + // if there's nothing here, don't both allocating a new list completely + return Collections.emptyList(); + } else { + // pass by reference rather than make a copy, for speed + List downsampledItems = reservoir; + clear(); + return downsampledItems; + } } public boolean hasPendingItems() { @@ -119,9 +160,18 @@ public class ReservoirDownsampler implements ReadsDownsampl // NO-OP } + /** + * Clear the data structures used to hold information + */ public void clear() { - reservoir = new ArrayList(targetSampleSize); - totalReadsSeen = 0; // an internal stat used by the downsampling process, so not cleared by reset() below + // if we aren't expecting many overflows, allocate a linked list not an arraylist + reservoir = expectFewOverflows ? new LinkedList() : new ArrayList(targetSampleSize); + + // it's a linked list if we allocate one + isLinkedList = expectFewOverflows; + + // an internal stat used by the downsampling process, so not cleared by reset() below + totalReadsSeen = 0; } public void reset() { diff --git a/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java b/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java index 36d087735..4c0358d40 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java @@ -114,7 +114,7 @@ public class LinearMicroScheduler extends MicroScheduler { } // Special function call to empty out the work queue. Ugly for now but will be cleaned up when we eventually push this functionality more into the engine - if( traversalEngine instanceof TraverseActiveRegions ) { + if( traversalEngine instanceof TraverseActiveRegions) { final Object result = ((TraverseActiveRegions) traversalEngine).endTraversal(walker, accumulator.getReduceInit()); accumulator.accumulate(null, result); // Assumes only used with StandardAccumulator } diff --git a/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java b/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java index a01af80ac..371cce778 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java @@ -213,7 +213,7 @@ public abstract class MicroScheduler implements MicroSchedulerMBean { // Now that we have a progress meter, go through and initialize the traversal engines for ( final TraversalEngine traversalEngine : allCreatedTraversalEngines ) - traversalEngine.initialize(engine, progressMeter); + traversalEngine.initialize(engine, walker, progressMeter); // JMX does not allow multiple instances with the same ObjectName to be registered with the same platform MXBean. // To get around this limitation and since we have no job identifier at this point, register a simple counter that diff --git a/public/java/src/org/broadinstitute/sting/gatk/executive/WindowMaker.java b/public/java/src/org/broadinstitute/sting/gatk/executive/WindowMaker.java index cbcc4abae..f587442d7 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/executive/WindowMaker.java +++ b/public/java/src/org/broadinstitute/sting/gatk/executive/WindowMaker.java @@ -29,13 +29,14 @@ import net.sf.picard.util.PeekableIterator; import org.broadinstitute.sting.gatk.ReadProperties; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.datasources.reads.Shard; -import org.broadinstitute.sting.gatk.iterators.LegacyLocusIteratorByState; -import org.broadinstitute.sting.gatk.iterators.LocusIterator; -import org.broadinstitute.sting.gatk.iterators.LocusIteratorByState; +import org.broadinstitute.sting.gatk.iterators.GATKSAMIterator; import org.broadinstitute.sting.gatk.iterators.StingSAMIterator; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; +import org.broadinstitute.sting.utils.locusiterator.LocusIterator; +import org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import java.util.Collection; import java.util.Iterator; @@ -71,7 +72,7 @@ public class WindowMaker implements Iterable, I /** * Hold the read iterator so that it can be closed later. */ - private final StingSAMIterator readIterator; + private final GATKSAMIterator readIterator; /** * The data source for reads. Will probably come directly from the BAM file. @@ -104,22 +105,23 @@ public class WindowMaker implements Iterable, I * @param sampleNames The complete set of sample names in the reads in shard */ + private final LocusIteratorByState libs; + public WindowMaker(Shard shard, GenomeLocParser genomeLocParser, StingSAMIterator iterator, List intervals, Collection sampleNames) { this.sourceInfo = shard.getReadProperties(); - this.readIterator = iterator; + this.readIterator = new GATKSAMIterator(iterator); // Use the legacy version of LocusIteratorByState if legacy downsampling was requested: - this.sourceIterator = sourceInfo.getDownsamplingMethod().useLegacyDownsampler ? - new PeekableIterator(new LegacyLocusIteratorByState(iterator,sourceInfo,genomeLocParser,sampleNames)) - : - new PeekableIterator(new LocusIteratorByState(iterator,sourceInfo,genomeLocParser,sampleNames)); - + if ( sourceInfo.getDownsamplingMethod().useLegacyDownsampler ) + throw new IllegalArgumentException("legacy downsampler no longer supported in the window maker"); + this.libs = new LocusIteratorByState(readIterator,sourceInfo,genomeLocParser,sampleNames); + this.sourceIterator = new PeekableIterator(libs); this.intervalIterator = intervals.size()>0 ? new PeekableIterator(intervals.iterator()) : null; } public WindowMaker(Shard shard, GenomeLocParser genomeLocParser, StingSAMIterator iterator, List intervals ) { - this(shard, genomeLocParser, iterator, intervals, LegacyLocusIteratorByState.sampleListForSAMWithoutReadGroups()); + this(shard, genomeLocParser, iterator, intervals, LocusIteratorByState.sampleListForSAMWithoutReadGroups()); } public Iterator iterator() { @@ -209,5 +211,10 @@ public class WindowMaker implements Iterable, I throw new ReviewedStingException("BUG: filtering locus does not contain, is not before, and is not past the given alignment context"); } } + + @Override + public LocusIteratorByState getLIBS() { + return libs; + } } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/iterators/GATKSAMIterator.java b/public/java/src/org/broadinstitute/sting/gatk/iterators/GATKSAMIterator.java new file mode 100644 index 000000000..30a520e09 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/gatk/iterators/GATKSAMIterator.java @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.gatk.iterators; + +import net.sf.samtools.SAMRecord; +import net.sf.samtools.util.CloseableIterator; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; + +import java.util.Iterator; + +/** + * Temporarily hack to convert SAMRecords to GATKSAMRecords + * + * User: depristo + * Date: 1/11/13 + * Time: 1:19 PM + */ +public class GATKSAMIterator implements CloseableIterator, Iterable { + final CloseableIterator it; + + public GATKSAMIterator(final CloseableIterator it) { + this.it = it; + } + + public GATKSAMIterator(final StingSAMIterator it) { + this.it = it; + } + + @Override public boolean hasNext() { return it.hasNext(); } + @Override public GATKSAMRecord next() { return (GATKSAMRecord)it.next(); } + @Override public void remove() { it.remove(); } + @Override public void close() { it.close(); } + @Override public Iterator iterator() { return this; } +} diff --git a/public/java/src/org/broadinstitute/sting/gatk/iterators/LegacyLocusIteratorByState.java b/public/java/src/org/broadinstitute/sting/gatk/iterators/LegacyLocusIteratorByState.java deleted file mode 100644 index e4d2fcefc..000000000 --- a/public/java/src/org/broadinstitute/sting/gatk/iterators/LegacyLocusIteratorByState.java +++ /dev/null @@ -1,962 +0,0 @@ -/* -* Copyright (c) 2012 The Broad Institute -* -* Permission is hereby granted, free of charge, to any person -* obtaining a copy of this software and associated documentation -* files (the "Software"), to deal in the Software without -* restriction, including without limitation the rights to use, -* copy, modify, merge, publish, distribute, sublicense, and/or sell -* copies of the Software, and to permit persons to whom the -* Software is furnished to do so, subject to the following -* conditions: -* -* The above copyright notice and this permission notice shall be -* included in all copies or substantial portions of the Software. -* -* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR -* THE USE OR OTHER DEALINGS IN THE SOFTWARE. -*/ - -package org.broadinstitute.sting.gatk.iterators; - -import net.sf.picard.util.PeekableIterator; -import net.sf.samtools.Cigar; -import net.sf.samtools.CigarElement; -import net.sf.samtools.CigarOperator; -import net.sf.samtools.SAMRecord; -import org.apache.log4j.Logger; -import org.broadinstitute.sting.gatk.downsampling.DownsampleType; -import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod; -import org.broadinstitute.sting.gatk.ReadProperties; -import org.broadinstitute.sting.gatk.contexts.AlignmentContext; -import org.broadinstitute.sting.utils.GenomeLoc; -import org.broadinstitute.sting.utils.GenomeLocParser; -import org.broadinstitute.sting.utils.MathUtils; -import org.broadinstitute.sting.utils.LegacyReservoirDownsampler; -import org.broadinstitute.sting.utils.exceptions.UserException; -import org.broadinstitute.sting.utils.pileup.PileupElement; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileupImpl; -import org.broadinstitute.sting.utils.sam.GATKSAMRecord; -import org.broadinstitute.sting.utils.sam.ReadUtils; - -import java.util.*; - -/** - * Iterator that traverses a SAM File, accumulating information on a per-locus basis - */ -public class LegacyLocusIteratorByState extends LocusIterator { - /** - * our log, which we want to capture anything from this class - */ - private static Logger logger = Logger.getLogger(LegacyLocusIteratorByState.class); - - // ----------------------------------------------------------------------------------------------------------------- - // - // member fields - // - // ----------------------------------------------------------------------------------------------------------------- - - /** - * Used to create new GenomeLocs. - */ - private final GenomeLocParser genomeLocParser; - private final ArrayList samples; - private final ReadStateManager readStates; - - static private class SAMRecordState { - SAMRecord read; - int readOffset = -1; // how far are we offset from the start of the read bases? - int genomeOffset = -1; // how far are we offset from the alignment start on the genome? - - Cigar cigar = null; - int cigarOffset = -1; - CigarElement curElement = null; - int nCigarElements = 0; - - int cigarElementCounter = -1; // how far are we into a single cigarElement - - // The logical model for generating extended events is as follows: the "record state" implements the traversal - // along the reference; thus stepForwardOnGenome() returns on every and only on actual reference bases. This - // can be a (mis)match or a deletion (in the latter case, we still return on every individual reference base the - // deletion spans). In the extended events mode, the record state also remembers if there was an insertion, or - // if the deletion just started *right before* the current reference base the record state is pointing to upon the return from - // stepForwardOnGenome(). The next call to stepForwardOnGenome() will clear that memory (as we remember only extended - // events immediately preceding the current reference base). - - public SAMRecordState(SAMRecord read) { - this.read = read; - cigar = read.getCigar(); - nCigarElements = cigar.numCigarElements(); - - //System.out.printf("Creating a SAMRecordState: %s%n", this); - } - - public SAMRecord getRead() { - return read; - } - - /** - * What is our current offset in the read's bases that aligns us with the reference genome? - * - * @return - */ - public int getReadOffset() { - return readOffset; - } - - /** - * What is the current offset w.r.t. the alignment state that aligns us to the readOffset? - * - * @return - */ - public int getGenomeOffset() { - return genomeOffset; - } - - public int getGenomePosition() { - return read.getAlignmentStart() + getGenomeOffset(); - } - - public GenomeLoc getLocation(GenomeLocParser genomeLocParser) { - return genomeLocParser.createGenomeLoc(read.getReferenceName(), getGenomePosition()); - } - - public CigarOperator getCurrentCigarOperator() { - return curElement.getOperator(); - } - - public String toString() { - return String.format("%s ro=%d go=%d co=%d cec=%d %s", read.getReadName(), readOffset, genomeOffset, cigarOffset, cigarElementCounter, curElement); - } - - public CigarElement peekForwardOnGenome() { - return ( cigarElementCounter + 1 > curElement.getLength() && cigarOffset + 1 < nCigarElements ? cigar.getCigarElement(cigarOffset + 1) : curElement ); - } - - public CigarElement peekBackwardOnGenome() { - return ( cigarElementCounter - 1 == 0 && cigarOffset - 1 > 0 ? cigar.getCigarElement(cigarOffset - 1) : curElement ); - } - - - public CigarOperator stepForwardOnGenome() { - // we enter this method with readOffset = index of the last processed base on the read - // (-1 if we did not process a single base yet); this can be last matching base, or last base of an insertion - - - if (curElement == null || ++cigarElementCounter > curElement.getLength()) { - cigarOffset++; - if (cigarOffset < nCigarElements) { - curElement = cigar.getCigarElement(cigarOffset); - cigarElementCounter = 0; - // next line: guards against cigar elements of length 0; when new cigar element is retrieved, - // we reenter in order to re-check cigarElementCounter against curElement's length - return stepForwardOnGenome(); - } else { - if (curElement != null && curElement.getOperator() == CigarOperator.D) - throw new UserException.MalformedBAM(read, "read ends with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar"); - - // Reads that contain indels model the genomeOffset as the following base in the reference. Because - // we fall into this else block only when indels end the read, increment genomeOffset such that the - // current offset of this read is the next ref base after the end of the indel. This position will - // model a point on the reference somewhere after the end of the read. - genomeOffset++; // extended events need that. Logically, it's legal to advance the genomic offset here: - // we do step forward on the ref, and by returning null we also indicate that we are past the read end. - - return null; - } - } - - boolean done = false; - switch (curElement.getOperator()) { - case H: // ignore hard clips - case P: // ignore pads - cigarElementCounter = curElement.getLength(); - break; - case I: // insertion w.r.t. the reference - case S: // soft clip - cigarElementCounter = curElement.getLength(); - readOffset += curElement.getLength(); - break; - case D: // deletion w.r.t. the reference - if (readOffset < 0) // we don't want reads starting with deletion, this is a malformed cigar string - throw new UserException.MalformedBAM(read, "read starts with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar"); - // should be the same as N case - genomeOffset++; - done = true; - break; - case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning) - genomeOffset++; - done = true; - break; - case M: - case EQ: - case X: - readOffset++; - genomeOffset++; - done = true; - break; - default: - throw new IllegalStateException("Case statement didn't deal with cigar op: " + curElement.getOperator()); - } - - return done ? curElement.getOperator() : stepForwardOnGenome(); - } - } - - //final boolean DEBUG = false; - //final boolean DEBUG2 = false && DEBUG; - private ReadProperties readInfo; - private AlignmentContext nextAlignmentContext; - - // ----------------------------------------------------------------------------------------------------------------- - // - // constructors and other basic operations - // - // ----------------------------------------------------------------------------------------------------------------- - - public LegacyLocusIteratorByState(final Iterator samIterator, ReadProperties readInformation, GenomeLocParser genomeLocParser, Collection samples) { - this.readInfo = readInformation; - this.genomeLocParser = genomeLocParser; - this.samples = new ArrayList(samples); - this.readStates = new ReadStateManager(samIterator, readInformation.getDownsamplingMethod()); - - // currently the GATK expects this LocusIteratorByState to accept empty sample lists, when - // there's no read data. So we need to throw this error only when samIterator.hasNext() is true - if (this.samples.isEmpty() && samIterator.hasNext()) { - throw new IllegalArgumentException("samples list must not be empty"); - } - } - - /** - * For testing only. Assumes that the incoming SAMRecords have no read groups, so creates a dummy sample list - * for the system. - */ - public final static Collection sampleListForSAMWithoutReadGroups() { - List samples = new ArrayList(); - samples.add(null); - return samples; - } - - public Iterator iterator() { - return this; - } - - public void close() { - //this.it.close(); - } - - public boolean hasNext() { - lazyLoadNextAlignmentContext(); - return (nextAlignmentContext != null); - //if ( DEBUG ) System.out.printf("hasNext() = %b%n", r); - } - - private GenomeLoc getLocation() { - return readStates.isEmpty() ? null : readStates.getFirst().getLocation(genomeLocParser); - } - - // ----------------------------------------------------------------------------------------------------------------- - // - // next() routine and associated collection operations - // - // ----------------------------------------------------------------------------------------------------------------- - public AlignmentContext next() { - lazyLoadNextAlignmentContext(); - if (!hasNext()) - throw new NoSuchElementException("LocusIteratorByState: out of elements."); - AlignmentContext currentAlignmentContext = nextAlignmentContext; - nextAlignmentContext = null; - return currentAlignmentContext; - } - - /** - * Creates the next alignment context from the given state. Note that this is implemented as a lazy load method. - * nextAlignmentContext MUST BE null in order for this method to advance to the next entry. - */ - private void lazyLoadNextAlignmentContext() { - while (nextAlignmentContext == null && readStates.hasNext()) { - readStates.collectPendingReads(); - - final GenomeLoc location = getLocation(); - final Map fullPileup = new HashMap(); - boolean hasBeenSampled = false; - for (final String sample : samples) { - final Iterator iterator = readStates.iterator(sample); - final List pile = new ArrayList(readStates.size(sample)); - hasBeenSampled |= location.getStart() <= readStates.getDownsamplingExtent(sample); - - int size = 0; // number of elements in this sample's pileup - int nDeletions = 0; // number of deletions in this sample's pileup - int nMQ0Reads = 0; // number of MQ0 reads in this sample's pileup (warning: current implementation includes N bases that are MQ0) - - while (iterator.hasNext()) { - final SAMRecordState state = iterator.next(); // state object with the read/offset information - final GATKSAMRecord read = (GATKSAMRecord) state.getRead(); // the actual read - final CigarOperator op = state.getCurrentCigarOperator(); // current cigar operator - final CigarElement nextElement = state.peekForwardOnGenome(); // next cigar element - final CigarElement lastElement = state.peekBackwardOnGenome(); // last cigar element - final boolean isSingleElementCigar = nextElement == lastElement; - final CigarOperator nextOp = nextElement.getOperator(); // next cigar operator - final CigarOperator lastOp = lastElement.getOperator(); // last cigar operator - int readOffset = state.getReadOffset(); // the base offset on this read - - final boolean isBeforeDeletion = nextOp == CigarOperator.DELETION; - final boolean isAfterDeletion = lastOp == CigarOperator.DELETION; - final boolean isBeforeInsertion = nextOp == CigarOperator.INSERTION; - final boolean isAfterInsertion = lastOp == CigarOperator.INSERTION && !isSingleElementCigar; - final boolean isNextToSoftClip = nextOp == CigarOperator.S || (state.getGenomeOffset() == 0 && read.getSoftStart() != read.getAlignmentStart()); - - int nextElementLength = nextElement.getLength(); - - if (op == CigarOperator.N) // N's are never added to any pileup - continue; - - if (op == CigarOperator.D) { - // TODO -- LIBS is totally busted for deletions so that reads with Ds right before Is in their CIGAR are broken; must fix - if (readInfo.includeReadsWithDeletionAtLoci()) { // only add deletions to the pileup if we are authorized to do so - pile.add(new PileupElement(read, readOffset, true, isBeforeDeletion, isAfterDeletion, isBeforeInsertion, isAfterInsertion, isNextToSoftClip, null, nextOp == CigarOperator.D ? nextElementLength : -1)); - size++; - nDeletions++; - if (read.getMappingQuality() == 0) - nMQ0Reads++; - } - } - else { - if (!filterBaseInRead(read, location.getStart())) { - String insertedBaseString = null; - if (nextOp == CigarOperator.I) { - final int insertionOffset = isSingleElementCigar ? 0 : 1; - // TODO -- someone please implement a better fix for the single element insertion CIGAR! - if (isSingleElementCigar) - readOffset -= (nextElement.getLength() - 1); // LIBS has passed over the insertion bases! - insertedBaseString = new String(Arrays.copyOfRange(read.getReadBases(), readOffset + insertionOffset, readOffset + insertionOffset + nextElement.getLength())); - } - - pile.add(new PileupElement(read, readOffset, false, isBeforeDeletion, isAfterDeletion, isBeforeInsertion, isAfterInsertion, isNextToSoftClip, insertedBaseString, nextElementLength)); - size++; - if (read.getMappingQuality() == 0) - nMQ0Reads++; - } - } - } - - if (pile.size() != 0) // if this pileup added at least one base, add it to the full pileup - fullPileup.put(sample, new ReadBackedPileupImpl(location, pile, size, nDeletions, nMQ0Reads)); - } - - updateReadStates(); // critical - must be called after we get the current state offsets and location - if (!fullPileup.isEmpty()) // if we got reads with non-D/N over the current position, we are done - nextAlignmentContext = new AlignmentContext(location, new ReadBackedPileupImpl(location, fullPileup), hasBeenSampled); - } - } - - // fast testing of position - private boolean readIsPastCurrentPosition(SAMRecord read) { - if (readStates.isEmpty()) - return false; - else { - SAMRecordState state = readStates.getFirst(); - SAMRecord ourRead = state.getRead(); - return read.getReferenceIndex() > ourRead.getReferenceIndex() || read.getAlignmentStart() > state.getGenomePosition(); - } - } - - /** - * Generic place to put per-base filters appropriate to LocusIteratorByState - * - * @param rec - * @param pos - * @return - */ - private static boolean filterBaseInRead(GATKSAMRecord rec, long pos) { - return ReadUtils.isBaseInsideAdaptor(rec, pos); - } - - private void updateReadStates() { - for (final String sample : samples) { - Iterator it = readStates.iterator(sample); - while (it.hasNext()) { - SAMRecordState state = it.next(); - CigarOperator op = state.stepForwardOnGenome(); - if (op == null) { - // we discard the read only when we are past its end AND indel at the end of the read (if any) was - // already processed. Keeping the read state that returned null upon stepForwardOnGenome() is safe - // as the next call to stepForwardOnGenome() will return null again AND will clear hadIndel() flag. - it.remove(); // we've stepped off the end of the object - } - } - } - } - - public void remove() { - throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!"); - } - - private class ReadStateManager { - private final PeekableIterator iterator; - private final DownsamplingMethod downsamplingMethod; - private final SamplePartitioner samplePartitioner; - private final Map readStatesBySample = new HashMap(); - private final int targetCoverage; - private int totalReadStates = 0; - - public ReadStateManager(Iterator source, DownsamplingMethod downsamplingMethod) { - this.iterator = new PeekableIterator(source); - this.downsamplingMethod = downsamplingMethod.type != null ? downsamplingMethod : DownsamplingMethod.NONE; - switch (this.downsamplingMethod.type) { - case BY_SAMPLE: - if (downsamplingMethod.toCoverage == null) - throw new UserException.BadArgumentValue("dcov", "Downsampling coverage (-dcov) must be specified when downsampling by sample"); - this.targetCoverage = downsamplingMethod.toCoverage; - break; - default: - this.targetCoverage = Integer.MAX_VALUE; - } - - Map readSelectors = new HashMap(); - for (final String sample : samples) { - readStatesBySample.put(sample, new PerSampleReadStateManager()); - readSelectors.put(sample, downsamplingMethod.type == DownsampleType.BY_SAMPLE ? new NRandomReadSelector(null, targetCoverage) : new AllReadsSelector()); - } - - samplePartitioner = new SamplePartitioner(readSelectors); - } - - /** - * Returns a iterator over all the reads associated with the given sample. Note that remove() is implemented - * for this iterator; if present, total read states will be decremented. - * - * @param sample The sample. - * @return Iterator over the reads associated with that sample. - */ - public Iterator iterator(final String sample) { - return new Iterator() { - private Iterator wrappedIterator = readStatesBySample.get(sample).iterator(); - - public boolean hasNext() { - return wrappedIterator.hasNext(); - } - - public SAMRecordState next() { - return wrappedIterator.next(); - } - - public void remove() { - wrappedIterator.remove(); - totalReadStates--; - } - }; - } - - public boolean isEmpty() { - return totalReadStates == 0; - } - - /** - * Retrieves the total number of reads in the manager across all samples. - * - * @return Total number of reads over all samples. - */ - public int size() { - return totalReadStates; - } - - /** - * Retrieves the total number of reads in the manager in the given sample. - * - * @param sample The sample. - * @return Total number of reads in the given sample. - */ - public int size(final String sample) { - return readStatesBySample.get(sample).size(); - } - - /** - * The extent of downsampling; basically, the furthest base out which has 'fallen - * victim' to the downsampler. - * - * @param sample Sample, downsampled independently. - * @return Integer stop of the furthest undownsampled region. - */ - public int getDownsamplingExtent(final String sample) { - return readStatesBySample.get(sample).getDownsamplingExtent(); - } - - public SAMRecordState getFirst() { - for (final String sample : samples) { - PerSampleReadStateManager reads = readStatesBySample.get(sample); - if (!reads.isEmpty()) - return reads.peek(); - } - return null; - } - - public boolean hasNext() { - return totalReadStates > 0 || iterator.hasNext(); - } - - public void collectPendingReads() { - if (!iterator.hasNext()) - return; - - if (readStates.size() == 0) { - int firstContigIndex = iterator.peek().getReferenceIndex(); - int firstAlignmentStart = iterator.peek().getAlignmentStart(); - while (iterator.hasNext() && iterator.peek().getReferenceIndex() == firstContigIndex && iterator.peek().getAlignmentStart() == firstAlignmentStart) { - samplePartitioner.submitRead(iterator.next()); - } - } else { - // Fast fail in the case that the read is past the current position. - if (readIsPastCurrentPosition(iterator.peek())) - return; - - while (iterator.hasNext() && !readIsPastCurrentPosition(iterator.peek())) { - samplePartitioner.submitRead(iterator.next()); - } - } - samplePartitioner.complete(); - - for (final String sample : samples) { - ReadSelector aggregator = samplePartitioner.getSelectedReads(sample); - - Collection newReads = new ArrayList(aggregator.getSelectedReads()); - - PerSampleReadStateManager statesBySample = readStatesBySample.get(sample); - int numReads = statesBySample.size(); - int downsamplingExtent = aggregator.getDownsamplingExtent(); - - if (numReads + newReads.size() <= targetCoverage || downsamplingMethod.type == DownsampleType.NONE) { - long readLimit = aggregator.getNumReadsSeen(); - addReadsToSample(statesBySample, newReads, readLimit); - statesBySample.specifyNewDownsamplingExtent(downsamplingExtent); - } else { - int[] counts = statesBySample.getCountsPerAlignmentStart(); - int[] updatedCounts = new int[counts.length]; - System.arraycopy(counts, 0, updatedCounts, 0, counts.length); - - boolean readPruned = true; - while (numReads + newReads.size() > targetCoverage && readPruned) { - readPruned = false; - for (int alignmentStart = updatedCounts.length - 1; numReads + newReads.size() > targetCoverage && alignmentStart >= 0; alignmentStart--) { - if (updatedCounts[alignmentStart] > 1) { - updatedCounts[alignmentStart]--; - numReads--; - readPruned = true; - } - } - } - - if (numReads == targetCoverage) { - updatedCounts[0]--; - numReads--; - } - - BitSet toPurge = new BitSet(readStates.size()); - int readOffset = 0; - - for (int i = 0; i < updatedCounts.length; i++) { - int n = counts[i]; - int k = updatedCounts[i]; - - for (Integer purgedElement : MathUtils.sampleIndicesWithoutReplacement(n, n - k)) - toPurge.set(readOffset + purgedElement); - - readOffset += counts[i]; - } - downsamplingExtent = Math.max(downsamplingExtent, statesBySample.purge(toPurge)); - - addReadsToSample(statesBySample, newReads, targetCoverage - numReads); - statesBySample.specifyNewDownsamplingExtent(downsamplingExtent); - } - } - samplePartitioner.reset(); - } - - /** - * Add reads with the given sample name to the given hanger entry. - * - * @param readStates The list of read states to add this collection of reads. - * @param reads Reads to add. Selected reads will be pulled from this source. - * @param maxReads Maximum number of reads to add. - */ - private void addReadsToSample(final PerSampleReadStateManager readStates, final Collection reads, final long maxReads) { - if (reads.isEmpty()) - return; - - Collection newReadStates = new LinkedList(); - int readCount = 0; - for (SAMRecord read : reads) { - if (readCount < maxReads) { - SAMRecordState state = new SAMRecordState(read); - state.stepForwardOnGenome(); - newReadStates.add(state); - readCount++; - } - } - readStates.addStatesAtNextAlignmentStart(newReadStates); - } - - private class PerSampleReadStateManager implements Iterable { - private final Queue readStates = new LinkedList(); - private final Deque readStateCounter = new LinkedList(); - private int downsamplingExtent = 0; - - public void addStatesAtNextAlignmentStart(Collection states) { - readStates.addAll(states); - readStateCounter.add(new Counter(states.size())); - totalReadStates += states.size(); - } - - public boolean isEmpty() { - return readStates.isEmpty(); - } - - public SAMRecordState peek() { - return readStates.peek(); - } - - public int size() { - return readStates.size(); - } - - public void specifyNewDownsamplingExtent(int downsamplingExtent) { - this.downsamplingExtent = Math.max(this.downsamplingExtent, downsamplingExtent); - } - - public int getDownsamplingExtent() { - return downsamplingExtent; - } - - public int[] getCountsPerAlignmentStart() { - int[] counts = new int[readStateCounter.size()]; - int index = 0; - for (Counter counter : readStateCounter) - counts[index++] = counter.getCount(); - return counts; - } - - public Iterator iterator() { - return new Iterator() { - private Iterator wrappedIterator = readStates.iterator(); - - public boolean hasNext() { - return wrappedIterator.hasNext(); - } - - public SAMRecordState next() { - return wrappedIterator.next(); - } - - public void remove() { - wrappedIterator.remove(); - Counter counter = readStateCounter.peek(); - counter.decrement(); - if (counter.getCount() == 0) - readStateCounter.remove(); - } - }; - } - - /** - * Purge the given elements from the bitset. If an element in the bitset is true, purge - * the corresponding read state. - * - * @param elements bits from the set to purge. - * @return the extent of the final downsampled read. - */ - public int purge(final BitSet elements) { - int downsamplingExtent = 0; - - if (elements.isEmpty() || readStates.isEmpty()) return downsamplingExtent; - - Iterator readStateIterator = readStates.iterator(); - - Iterator counterIterator = readStateCounter.iterator(); - Counter currentCounter = counterIterator.next(); - - int readIndex = 0; - long alignmentStartCounter = currentCounter.getCount(); - - int toPurge = elements.nextSetBit(0); - int removedCount = 0; - - while (readStateIterator.hasNext() && toPurge >= 0) { - SAMRecordState state = readStateIterator.next(); - downsamplingExtent = Math.max(downsamplingExtent, state.getRead().getAlignmentEnd()); - - if (readIndex == toPurge) { - readStateIterator.remove(); - currentCounter.decrement(); - if (currentCounter.getCount() == 0) - counterIterator.remove(); - removedCount++; - toPurge = elements.nextSetBit(toPurge + 1); - } - - readIndex++; - alignmentStartCounter--; - if (alignmentStartCounter == 0 && counterIterator.hasNext()) { - currentCounter = counterIterator.next(); - alignmentStartCounter = currentCounter.getCount(); - } - } - - totalReadStates -= removedCount; - - return downsamplingExtent; - } - } - } - - /** - * Note: assuming that, whenever we downsample, we downsample to an integer capacity. - */ - static private class Counter { - private int count; - - public Counter(int count) { - this.count = count; - } - - public int getCount() { - return count; - } - - public void decrement() { - count--; - } - } -} - -/** - * Selects reads passed to it based on a criteria decided through inheritance. - * TODO: This is a temporary abstraction until we can get rid of this downsampling implementation and the mrl option. Get rid of this. - */ -interface ReadSelector { - /** - * All previous selectors in the chain have allowed this read. Submit it to this selector for consideration. - * - * @param read the read to evaluate. - */ - public void submitRead(SAMRecord read); - - /** - * A previous selector has deemed this read unfit. Notify this selector so that this selector's counts are valid. - * - * @param read the read previously rejected. - */ - public void notifyReadRejected(SAMRecord read); - - /** - * Signal the selector that read additions are complete. - */ - public void complete(); - - /** - * Retrieve the number of reads seen by this selector so far. - * - * @return number of reads seen. - */ - public long getNumReadsSeen(); - - /** - * Return the number of reads accepted by this selector so far. - * - * @return number of reads selected. - */ - public long getNumReadsSelected(); - - /** - * Gets the locus at which the last of the downsampled reads selected by this selector ends. The value returned will be the - * last aligned position from this selection to which a downsampled read aligns -- in other words, if a read is thrown out at - * position 3 whose cigar string is 76M, the value of this parameter will be 78. - * - * @return If any read has been downsampled, this will return the last aligned base of the longest alignment. Else, 0. - */ - public int getDownsamplingExtent(); - - /** - * Get the reads selected by this selector. - * - * @return collection of reads selected by this selector. - */ - public Collection getSelectedReads(); - - /** - * Reset this collection to its pre-gathered state. - */ - public void reset(); -} - -/** - * Select every read passed in. - */ -class AllReadsSelector implements ReadSelector { - private Collection reads = new LinkedList(); - private long readsSeen = 0; - private int downsamplingExtent = 0; - - public void submitRead(SAMRecord read) { - reads.add(read); - readsSeen++; - } - - public void notifyReadRejected(SAMRecord read) { - readsSeen++; - downsamplingExtent = Math.max(downsamplingExtent, read.getAlignmentEnd()); - } - - public void complete() { - // NO-OP. - } - - public long getNumReadsSeen() { - return readsSeen; - } - - public long getNumReadsSelected() { - return readsSeen; - } - - public int getDownsamplingExtent() { - return downsamplingExtent; - } - - public Collection getSelectedReads() { - return reads; - } - - public void reset() { - reads.clear(); - readsSeen = 0; - downsamplingExtent = 0; - } -} - - -/** - * Select N reads randomly from the input stream. - */ -class NRandomReadSelector implements ReadSelector { - private final LegacyReservoirDownsampler reservoir; - private final ReadSelector chainedSelector; - private long readsSeen = 0; - private int downsamplingExtent = 0; - - public NRandomReadSelector(ReadSelector chainedSelector, long readLimit) { - this.reservoir = new LegacyReservoirDownsampler((int) readLimit); - this.chainedSelector = chainedSelector; - } - - public void submitRead(SAMRecord read) { - SAMRecord displaced = reservoir.add(read); - if (displaced != null && chainedSelector != null) { - chainedSelector.notifyReadRejected(read); - downsamplingExtent = Math.max(downsamplingExtent, read.getAlignmentEnd()); - } - readsSeen++; - } - - public void notifyReadRejected(SAMRecord read) { - readsSeen++; - } - - public void complete() { - for (SAMRecord read : reservoir.getDownsampledContents()) - chainedSelector.submitRead(read); - if (chainedSelector != null) - chainedSelector.complete(); - } - - - public long getNumReadsSeen() { - return readsSeen; - } - - public long getNumReadsSelected() { - return reservoir.size(); - } - - public int getDownsamplingExtent() { - return downsamplingExtent; - } - - public Collection getSelectedReads() { - return reservoir.getDownsampledContents(); - } - - public void reset() { - reservoir.clear(); - downsamplingExtent = 0; - if (chainedSelector != null) - chainedSelector.reset(); - } -} - -/** - * Note: stores reads by sample ID string, not by sample object - */ -class SamplePartitioner implements ReadSelector { - private final Map readsBySample; - private long readsSeen = 0; - - public SamplePartitioner(Map readSelectors) { - readsBySample = readSelectors; - } - - public void submitRead(SAMRecord read) { - String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null; - if (readsBySample.containsKey(sampleName)) - readsBySample.get(sampleName).submitRead(read); - readsSeen++; - } - - public void notifyReadRejected(SAMRecord read) { - String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null; - if (readsBySample.containsKey(sampleName)) - readsBySample.get(sampleName).notifyReadRejected(read); - readsSeen++; - } - - public void complete() { - // NO-OP. - } - - public long getNumReadsSeen() { - return readsSeen; - } - - public long getNumReadsSelected() { - return readsSeen; - } - - public int getDownsamplingExtent() { - int downsamplingExtent = 0; - for (ReadSelector storage : readsBySample.values()) - downsamplingExtent = Math.max(downsamplingExtent, storage.getDownsamplingExtent()); - return downsamplingExtent; - } - - public Collection getSelectedReads() { - throw new UnsupportedOperationException("Cannot directly get selected reads from a read partitioner."); - } - - public ReadSelector getSelectedReads(String sampleName) { - if (!readsBySample.containsKey(sampleName)) - throw new NoSuchElementException("Sample name not found"); - return readsBySample.get(sampleName); - } - - public void reset() { - for (ReadSelector storage : readsBySample.values()) - storage.reset(); - readsSeen = 0; - } - -} diff --git a/public/java/src/org/broadinstitute/sting/gatk/iterators/LocusIterator.java b/public/java/src/org/broadinstitute/sting/gatk/iterators/LocusIterator.java deleted file mode 100644 index 0f258f5e9..000000000 --- a/public/java/src/org/broadinstitute/sting/gatk/iterators/LocusIterator.java +++ /dev/null @@ -1,56 +0,0 @@ -/* -* Copyright (c) 2012 The Broad Institute -* -* Permission is hereby granted, free of charge, to any person -* obtaining a copy of this software and associated documentation -* files (the "Software"), to deal in the Software without -* restriction, including without limitation the rights to use, -* copy, modify, merge, publish, distribute, sublicense, and/or sell -* copies of the Software, and to permit persons to whom the -* Software is furnished to do so, subject to the following -* conditions: -* -* The above copyright notice and this permission notice shall be -* included in all copies or substantial portions of the Software. -* -* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR -* THE USE OR OTHER DEALINGS IN THE SOFTWARE. -*/ - -package org.broadinstitute.sting.gatk.iterators; - -import net.sf.samtools.util.CloseableIterator; -import org.broadinstitute.sting.gatk.contexts.AlignmentContext; - -import java.util.Iterator; - -/** - * Iterator that traverses a SAM File, accumulating information on a per-locus basis - */ -public abstract class LocusIterator implements Iterable, CloseableIterator { - // ----------------------------------------------------------------------------------------------------------------- - // - // constructors and other basic operations - // - // ----------------------------------------------------------------------------------------------------------------- - public Iterator iterator() { - return this; - } - - public void close() { - //this.it.close(); - } - - public abstract boolean hasNext(); - public abstract AlignmentContext next(); - - public void remove() { - throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!"); - } -} diff --git a/public/java/src/org/broadinstitute/sting/gatk/iterators/LocusIteratorByState.java b/public/java/src/org/broadinstitute/sting/gatk/iterators/LocusIteratorByState.java deleted file mode 100644 index ba383eb0e..000000000 --- a/public/java/src/org/broadinstitute/sting/gatk/iterators/LocusIteratorByState.java +++ /dev/null @@ -1,661 +0,0 @@ -/* -* Copyright (c) 2012 The Broad Institute -* -* Permission is hereby granted, free of charge, to any person -* obtaining a copy of this software and associated documentation -* files (the "Software"), to deal in the Software without -* restriction, including without limitation the rights to use, -* copy, modify, merge, publish, distribute, sublicense, and/or sell -* copies of the Software, and to permit persons to whom the -* Software is furnished to do so, subject to the following -* conditions: -* -* The above copyright notice and this permission notice shall be -* included in all copies or substantial portions of the Software. -* -* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR -* THE USE OR OTHER DEALINGS IN THE SOFTWARE. -*/ - -package org.broadinstitute.sting.gatk.iterators; - -import net.sf.picard.util.PeekableIterator; -import net.sf.samtools.Cigar; -import net.sf.samtools.CigarElement; -import net.sf.samtools.CigarOperator; -import net.sf.samtools.SAMRecord; -import org.apache.log4j.Logger; -import org.broadinstitute.sting.gatk.ReadProperties; -import org.broadinstitute.sting.gatk.contexts.AlignmentContext; -import org.broadinstitute.sting.gatk.downsampling.*; -import org.broadinstitute.sting.utils.GenomeLoc; -import org.broadinstitute.sting.utils.GenomeLocParser; -import org.broadinstitute.sting.utils.exceptions.UserException; -import org.broadinstitute.sting.utils.pileup.PileupElement; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileupImpl; -import org.broadinstitute.sting.utils.sam.GATKSAMRecord; -import org.broadinstitute.sting.utils.sam.ReadUtils; - -import java.util.*; - -/** - * Iterator that traverses a SAM File, accumulating information on a per-locus basis - */ -public class LocusIteratorByState extends LocusIterator { - /** - * our log, which we want to capture anything from this class - */ - private static Logger logger = Logger.getLogger(LegacyLocusIteratorByState.class); - - // ----------------------------------------------------------------------------------------------------------------- - // - // member fields - // - // ----------------------------------------------------------------------------------------------------------------- - - /** - * Used to create new GenomeLocs. - */ - private final GenomeLocParser genomeLocParser; - private final ArrayList samples; - private final ReadStateManager readStates; - - protected static class SAMRecordState { - SAMRecord read; - int readOffset = -1; // how far are we offset from the start of the read bases? - int genomeOffset = -1; // how far are we offset from the alignment start on the genome? - - Cigar cigar = null; - int cigarOffset = -1; - CigarElement curElement = null; - int nCigarElements = 0; - - int cigarElementCounter = -1; // how far are we into a single cigarElement - - // The logical model for generating extended events is as follows: the "record state" implements the traversal - // along the reference; thus stepForwardOnGenome() returns on every and only on actual reference bases. This - // can be a (mis)match or a deletion (in the latter case, we still return on every individual reference base the - // deletion spans). In the extended events mode, the record state also remembers if there was an insertion, or - // if the deletion just started *right before* the current reference base the record state is pointing to upon the return from - // stepForwardOnGenome(). The next call to stepForwardOnGenome() will clear that memory (as we remember only extended - // events immediately preceding the current reference base). - - public SAMRecordState(SAMRecord read) { - this.read = read; - cigar = read.getCigar(); - nCigarElements = cigar.numCigarElements(); - - //System.out.printf("Creating a SAMRecordState: %s%n", this); - } - - public SAMRecord getRead() { - return read; - } - - /** - * What is our current offset in the read's bases that aligns us with the reference genome? - * - * @return - */ - public int getReadOffset() { - return readOffset; - } - - /** - * What is the current offset w.r.t. the alignment state that aligns us to the readOffset? - * - * @return - */ - public int getGenomeOffset() { - return genomeOffset; - } - - public int getGenomePosition() { - return read.getAlignmentStart() + getGenomeOffset(); - } - - public GenomeLoc getLocation(GenomeLocParser genomeLocParser) { - return genomeLocParser.createGenomeLoc(read.getReferenceName(), getGenomePosition()); - } - - public CigarOperator getCurrentCigarOperator() { - return curElement.getOperator(); - } - - public String toString() { - return String.format("%s ro=%d go=%d co=%d cec=%d %s", read.getReadName(), readOffset, genomeOffset, cigarOffset, cigarElementCounter, curElement); - } - - public CigarElement peekForwardOnGenome() { - return ( cigarElementCounter + 1 > curElement.getLength() && cigarOffset + 1 < nCigarElements ? cigar.getCigarElement(cigarOffset + 1) : curElement ); - } - - public CigarElement peekBackwardOnGenome() { - return ( cigarElementCounter - 1 == 0 && cigarOffset - 1 > 0 ? cigar.getCigarElement(cigarOffset - 1) : curElement ); - } - - - public CigarOperator stepForwardOnGenome() { - // we enter this method with readOffset = index of the last processed base on the read - // (-1 if we did not process a single base yet); this can be last matching base, or last base of an insertion - - - if (curElement == null || ++cigarElementCounter > curElement.getLength()) { - cigarOffset++; - if (cigarOffset < nCigarElements) { - curElement = cigar.getCigarElement(cigarOffset); - cigarElementCounter = 0; - // next line: guards against cigar elements of length 0; when new cigar element is retrieved, - // we reenter in order to re-check cigarElementCounter against curElement's length - return stepForwardOnGenome(); - } else { - if (curElement != null && curElement.getOperator() == CigarOperator.D) - throw new UserException.MalformedBAM(read, "read ends with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar"); - - // Reads that contain indels model the genomeOffset as the following base in the reference. Because - // we fall into this else block only when indels end the read, increment genomeOffset such that the - // current offset of this read is the next ref base after the end of the indel. This position will - // model a point on the reference somewhere after the end of the read. - genomeOffset++; // extended events need that. Logically, it's legal to advance the genomic offset here: - // we do step forward on the ref, and by returning null we also indicate that we are past the read end. - - return null; - } - } - - boolean done = false; - switch (curElement.getOperator()) { - case H: // ignore hard clips - case P: // ignore pads - cigarElementCounter = curElement.getLength(); - break; - case I: // insertion w.r.t. the reference - case S: // soft clip - cigarElementCounter = curElement.getLength(); - readOffset += curElement.getLength(); - break; - case D: // deletion w.r.t. the reference - if (readOffset < 0) // we don't want reads starting with deletion, this is a malformed cigar string - throw new UserException.MalformedBAM(read, "read starts with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar"); - // should be the same as N case - genomeOffset++; - done = true; - break; - case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning) - genomeOffset++; - done = true; - break; - case M: - case EQ: - case X: - readOffset++; - genomeOffset++; - done = true; - break; - default: - throw new IllegalStateException("Case statement didn't deal with cigar op: " + curElement.getOperator()); - } - - return done ? curElement.getOperator() : stepForwardOnGenome(); - } - } - - //final boolean DEBUG = false; - //final boolean DEBUG2 = false && DEBUG; - private ReadProperties readInfo; - private AlignmentContext nextAlignmentContext; - private boolean performDownsampling; - - // ----------------------------------------------------------------------------------------------------------------- - // - // constructors and other basic operations - // - // ----------------------------------------------------------------------------------------------------------------- - - public LocusIteratorByState(final Iterator samIterator, ReadProperties readInformation, GenomeLocParser genomeLocParser, Collection samples) { - this.readInfo = readInformation; - this.genomeLocParser = genomeLocParser; - this.samples = new ArrayList(samples); - - // LIBS will invoke the Reservoir and Leveling downsamplers on the read stream if we're - // downsampling to coverage by sample. SAMDataSource will have refrained from applying - // any downsamplers to the read stream in this case, in the expectation that LIBS will - // manage the downsampling. The reason for this is twofold: performance (don't have to - // split/re-assemble the read stream in SAMDataSource), and to enable partial downsampling - // of reads (eg., using half of a read, and throwing the rest away). - this.performDownsampling = readInfo.getDownsamplingMethod() != null && - readInfo.getDownsamplingMethod().type == DownsampleType.BY_SAMPLE && - readInfo.getDownsamplingMethod().toCoverage != null; - - this.readStates = new ReadStateManager(samIterator); - - // currently the GATK expects this LocusIteratorByState to accept empty sample lists, when - // there's no read data. So we need to throw this error only when samIterator.hasNext() is true - if (this.samples.isEmpty() && samIterator.hasNext()) { - throw new IllegalArgumentException("samples list must not be empty"); - } - } - - /** - * For testing only. Assumes that the incoming SAMRecords have no read groups, so creates a dummy sample list - * for the system. - */ - public final static Collection sampleListForSAMWithoutReadGroups() { - List samples = new ArrayList(); - samples.add(null); - return samples; - } - - public Iterator iterator() { - return this; - } - - public void close() { - //this.it.close(); - } - - public boolean hasNext() { - lazyLoadNextAlignmentContext(); - return (nextAlignmentContext != null); - //if ( DEBUG ) System.out.printf("hasNext() = %b%n", r); - } - - private GenomeLoc getLocation() { - return readStates.isEmpty() ? null : readStates.getFirst().getLocation(genomeLocParser); - } - - // ----------------------------------------------------------------------------------------------------------------- - // - // next() routine and associated collection operations - // - // ----------------------------------------------------------------------------------------------------------------- - public AlignmentContext next() { - lazyLoadNextAlignmentContext(); - if (!hasNext()) - throw new NoSuchElementException("LocusIteratorByState: out of elements."); - AlignmentContext currentAlignmentContext = nextAlignmentContext; - nextAlignmentContext = null; - return currentAlignmentContext; - } - - /** - * Creates the next alignment context from the given state. Note that this is implemented as a lazy load method. - * nextAlignmentContext MUST BE null in order for this method to advance to the next entry. - */ - private void lazyLoadNextAlignmentContext() { - while (nextAlignmentContext == null && readStates.hasNext()) { - readStates.collectPendingReads(); - - final GenomeLoc location = getLocation(); - final Map fullPileup = new HashMap(); - - // TODO: How can you determine here whether the current pileup has been downsampled? - boolean hasBeenSampled = false; - - for (final String sample : samples) { - final Iterator iterator = readStates.iterator(sample); - final List pile = new ArrayList(readStates.size(sample)); - - int size = 0; // number of elements in this sample's pileup - int nDeletions = 0; // number of deletions in this sample's pileup - int nMQ0Reads = 0; // number of MQ0 reads in this sample's pileup (warning: current implementation includes N bases that are MQ0) - - while (iterator.hasNext()) { - final SAMRecordState state = iterator.next(); // state object with the read/offset information - final GATKSAMRecord read = (GATKSAMRecord) state.getRead(); // the actual read - final CigarOperator op = state.getCurrentCigarOperator(); // current cigar operator - final CigarElement nextElement = state.peekForwardOnGenome(); // next cigar element - final CigarElement lastElement = state.peekBackwardOnGenome(); // last cigar element - final boolean isSingleElementCigar = nextElement == lastElement; - final CigarOperator nextOp = nextElement.getOperator(); // next cigar operator - final CigarOperator lastOp = lastElement.getOperator(); // last cigar operator - int readOffset = state.getReadOffset(); // the base offset on this read - - final boolean isBeforeDeletion = nextOp == CigarOperator.DELETION; - final boolean isAfterDeletion = lastOp == CigarOperator.DELETION; - final boolean isBeforeInsertion = nextOp == CigarOperator.INSERTION; - final boolean isAfterInsertion = lastOp == CigarOperator.INSERTION && !isSingleElementCigar; - final boolean isNextToSoftClip = nextOp == CigarOperator.S || (state.getGenomeOffset() == 0 && read.getSoftStart() != read.getAlignmentStart()); - - int nextElementLength = nextElement.getLength(); - - if (op == CigarOperator.N) // N's are never added to any pileup - continue; - - if (op == CigarOperator.D) { - // TODO -- LIBS is totally busted for deletions so that reads with Ds right before Is in their CIGAR are broken; must fix - if (readInfo.includeReadsWithDeletionAtLoci()) { // only add deletions to the pileup if we are authorized to do so - pile.add(new PileupElement(read, readOffset, true, isBeforeDeletion, isAfterDeletion, isBeforeInsertion, isAfterInsertion, isNextToSoftClip, null, nextOp == CigarOperator.D ? nextElementLength : -1)); - size++; - nDeletions++; - if (read.getMappingQuality() == 0) - nMQ0Reads++; - } - } - else { - if (!filterBaseInRead(read, location.getStart())) { - String insertedBaseString = null; - if (nextOp == CigarOperator.I) { - final int insertionOffset = isSingleElementCigar ? 0 : 1; - // TODO -- someone please implement a better fix for the single element insertion CIGAR! - if (isSingleElementCigar) - readOffset -= (nextElement.getLength() - 1); // LIBS has passed over the insertion bases! - insertedBaseString = new String(Arrays.copyOfRange(read.getReadBases(), readOffset + insertionOffset, readOffset + insertionOffset + nextElement.getLength())); - } - - pile.add(new PileupElement(read, readOffset, false, isBeforeDeletion, isAfterDeletion, isBeforeInsertion, isAfterInsertion, isNextToSoftClip, insertedBaseString, nextElementLength)); - size++; - if (read.getMappingQuality() == 0) - nMQ0Reads++; - } - } - } - - if (pile.size() != 0) // if this pileup added at least one base, add it to the full pileup - fullPileup.put(sample, new ReadBackedPileupImpl(location, pile, size, nDeletions, nMQ0Reads)); - } - - updateReadStates(); // critical - must be called after we get the current state offsets and location - if (!fullPileup.isEmpty()) // if we got reads with non-D/N over the current position, we are done - nextAlignmentContext = new AlignmentContext(location, new ReadBackedPileupImpl(location, fullPileup), hasBeenSampled); - } - } - - // fast testing of position - private boolean readIsPastCurrentPosition(SAMRecord read) { - if (readStates.isEmpty()) - return false; - else { - SAMRecordState state = readStates.getFirst(); - SAMRecord ourRead = state.getRead(); - return read.getReferenceIndex() > ourRead.getReferenceIndex() || read.getAlignmentStart() > state.getGenomePosition(); - } - } - - /** - * Generic place to put per-base filters appropriate to LocusIteratorByState - * - * @param rec - * @param pos - * @return - */ - private static boolean filterBaseInRead(GATKSAMRecord rec, long pos) { - return ReadUtils.isBaseInsideAdaptor(rec, pos); - } - - private void updateReadStates() { - for (final String sample : samples) { - Iterator it = readStates.iterator(sample); - while (it.hasNext()) { - SAMRecordState state = it.next(); - CigarOperator op = state.stepForwardOnGenome(); - if (op == null) { - // we discard the read only when we are past its end AND indel at the end of the read (if any) was - // already processed. Keeping the read state that returned null upon stepForwardOnGenome() is safe - // as the next call to stepForwardOnGenome() will return null again AND will clear hadIndel() flag. - it.remove(); // we've stepped off the end of the object - } - } - } - } - - public void remove() { - throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!"); - } - - protected class ReadStateManager { - private final PeekableIterator iterator; - private final SamplePartitioner samplePartitioner; - private final Map readStatesBySample = new HashMap(); - private int totalReadStates = 0; - - public ReadStateManager(Iterator source) { - this.iterator = new PeekableIterator(source); - - for (final String sample : samples) { - readStatesBySample.put(sample, new PerSampleReadStateManager()); - } - - samplePartitioner = new SamplePartitioner(performDownsampling); - } - - /** - * Returns a iterator over all the reads associated with the given sample. Note that remove() is implemented - * for this iterator; if present, total read states will be decremented. - * - * @param sample The sample. - * @return Iterator over the reads associated with that sample. - */ - public Iterator iterator(final String sample) { - return new Iterator() { - private Iterator wrappedIterator = readStatesBySample.get(sample).iterator(); - - public boolean hasNext() { - return wrappedIterator.hasNext(); - } - - public SAMRecordState next() { - return wrappedIterator.next(); - } - - public void remove() { - wrappedIterator.remove(); - } - }; - } - - public boolean isEmpty() { - return totalReadStates == 0; - } - - /** - * Retrieves the total number of reads in the manager across all samples. - * - * @return Total number of reads over all samples. - */ - public int size() { - return totalReadStates; - } - - /** - * Retrieves the total number of reads in the manager in the given sample. - * - * @param sample The sample. - * @return Total number of reads in the given sample. - */ - public int size(final String sample) { - return readStatesBySample.get(sample).size(); - } - - public SAMRecordState getFirst() { - for (final String sample : samples) { - PerSampleReadStateManager reads = readStatesBySample.get(sample); - if (!reads.isEmpty()) - return reads.peek(); - } - return null; - } - - public boolean hasNext() { - return totalReadStates > 0 || iterator.hasNext(); - } - - public void collectPendingReads() { - if (!iterator.hasNext()) - return; - - if (readStates.size() == 0) { - int firstContigIndex = iterator.peek().getReferenceIndex(); - int firstAlignmentStart = iterator.peek().getAlignmentStart(); - while (iterator.hasNext() && iterator.peek().getReferenceIndex() == firstContigIndex && iterator.peek().getAlignmentStart() == firstAlignmentStart) { - samplePartitioner.submitRead(iterator.next()); - } - } else { - // Fast fail in the case that the read is past the current position. - if (readIsPastCurrentPosition(iterator.peek())) - return; - - while (iterator.hasNext() && !readIsPastCurrentPosition(iterator.peek())) { - samplePartitioner.submitRead(iterator.next()); - } - } - - samplePartitioner.doneSubmittingReads(); - - for (final String sample : samples) { - Collection newReads = samplePartitioner.getReadsForSample(sample); - PerSampleReadStateManager statesBySample = readStatesBySample.get(sample); - addReadsToSample(statesBySample, newReads); - } - - samplePartitioner.reset(); - } - - /** - * Add reads with the given sample name to the given hanger entry. - * - * @param readStates The list of read states to add this collection of reads. - * @param reads Reads to add. Selected reads will be pulled from this source. - */ - private void addReadsToSample(final PerSampleReadStateManager readStates, final Collection reads) { - if (reads.isEmpty()) - return; - - Collection newReadStates = new LinkedList(); - - for (SAMRecord read : reads) { - SAMRecordState state = new SAMRecordState(read); - state.stepForwardOnGenome(); - newReadStates.add(state); - } - - readStates.addStatesAtNextAlignmentStart(newReadStates); - } - - protected class PerSampleReadStateManager implements Iterable { - private List> readStatesByAlignmentStart = new LinkedList>(); - private int thisSampleReadStates = 0; - private Downsampler> levelingDownsampler = - performDownsampling ? - new LevelingDownsampler, SAMRecordState>(readInfo.getDownsamplingMethod().toCoverage) : - null; - - public void addStatesAtNextAlignmentStart(Collection states) { - if ( states.isEmpty() ) { - return; - } - - readStatesByAlignmentStart.add(new LinkedList(states)); - thisSampleReadStates += states.size(); - totalReadStates += states.size(); - - if ( levelingDownsampler != null ) { - levelingDownsampler.submit(readStatesByAlignmentStart); - levelingDownsampler.signalEndOfInput(); - - thisSampleReadStates -= levelingDownsampler.getNumberOfDiscardedItems(); - totalReadStates -= levelingDownsampler.getNumberOfDiscardedItems(); - - // use returned List directly rather than make a copy, for efficiency's sake - readStatesByAlignmentStart = levelingDownsampler.consumeFinalizedItems(); - levelingDownsampler.reset(); - } - } - - public boolean isEmpty() { - return readStatesByAlignmentStart.isEmpty(); - } - - public SAMRecordState peek() { - return isEmpty() ? null : readStatesByAlignmentStart.get(0).peek(); - } - - public int size() { - return thisSampleReadStates; - } - - public Iterator iterator() { - return new Iterator() { - private Iterator> alignmentStartIterator = readStatesByAlignmentStart.iterator(); - private LinkedList currentPositionReadStates = null; - private Iterator currentPositionReadStatesIterator = null; - - public boolean hasNext() { - return alignmentStartIterator.hasNext() || - (currentPositionReadStatesIterator != null && currentPositionReadStatesIterator.hasNext()); - } - - public SAMRecordState next() { - if ( currentPositionReadStatesIterator == null || ! currentPositionReadStatesIterator.hasNext() ) { - currentPositionReadStates = alignmentStartIterator.next(); - currentPositionReadStatesIterator = currentPositionReadStates.iterator(); - } - - return currentPositionReadStatesIterator.next(); - } - - public void remove() { - currentPositionReadStatesIterator.remove(); - thisSampleReadStates--; - totalReadStates--; - - if ( currentPositionReadStates.isEmpty() ) { - alignmentStartIterator.remove(); - } - } - }; - } - } - } - - /** - * Divides reads by sample and (if requested) does a preliminary downsampling pass with a ReservoirDownsampler. - * - * Note: stores reads by sample ID string, not by sample object - */ - private class SamplePartitioner { - private Map> readsBySample; - - public SamplePartitioner( boolean downsampleReads ) { - readsBySample = new HashMap>(); - - for ( String sample : samples ) { - readsBySample.put(sample, - downsampleReads ? new ReservoirDownsampler(readInfo.getDownsamplingMethod().toCoverage) : - new PassThroughDownsampler()); - } - } - - public void submitRead(SAMRecord read) { - String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null; - if (readsBySample.containsKey(sampleName)) - readsBySample.get(sampleName).submit(read); - } - - public void doneSubmittingReads() { - for ( Map.Entry> perSampleReads : readsBySample.entrySet() ) { - perSampleReads.getValue().signalEndOfInput(); - } - } - - public Collection getReadsForSample(String sampleName) { - if ( ! readsBySample.containsKey(sampleName) ) - throw new NoSuchElementException("Sample name not found"); - - return readsBySample.get(sampleName).consumeFinalizedItems(); - } - - public void reset() { - for ( Map.Entry> perSampleReads : readsBySample.entrySet() ) { - perSampleReads.getValue().clear(); - perSampleReads.getValue().reset(); - } - } - } -} \ No newline at end of file diff --git a/public/java/src/org/broadinstitute/sting/gatk/refdata/RefMetaDataTracker.java b/public/java/src/org/broadinstitute/sting/gatk/refdata/RefMetaDataTracker.java index 9cb38b840..5a1b015fe 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/refdata/RefMetaDataTracker.java +++ b/public/java/src/org/broadinstitute/sting/gatk/refdata/RefMetaDataTracker.java @@ -61,6 +61,7 @@ public class RefMetaDataTracker { final Map bindings; final protected static Logger logger = Logger.getLogger(RefMetaDataTracker.class); + public final static RefMetaDataTracker EMPTY_TRACKER = new RefMetaDataTracker(); // ------------------------------------------------------------------------------------------ // diff --git a/public/java/src/org/broadinstitute/sting/gatk/refdata/VariantContextAdaptors.java b/public/java/src/org/broadinstitute/sting/gatk/refdata/VariantContextAdaptors.java index c7edebd81..a77341a5d 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/refdata/VariantContextAdaptors.java +++ b/public/java/src/org/broadinstitute/sting/gatk/refdata/VariantContextAdaptors.java @@ -194,17 +194,18 @@ public class VariantContextAdaptors { return null; // we weren't given enough reference context to create the VariantContext final byte refBaseForIndel = ref.getBases()[index]; + final boolean refBaseIsDash = dbsnp.getNCBIRefBase().equals("-"); boolean addPaddingBase; if ( isSNP(dbsnp) || isMNP(dbsnp) ) addPaddingBase = false; else if ( isIndel(dbsnp) || dbsnp.getVariantType().contains("mixed") ) - addPaddingBase = VariantContextUtils.requiresPaddingBase(stripNullDashes(getAlleleList(dbsnp))); + addPaddingBase = refBaseIsDash || VariantContextUtils.requiresPaddingBase(stripNullDashes(getAlleleList(dbsnp))); else return null; // can't handle anything else Allele refAllele; - if ( dbsnp.getNCBIRefBase().equals("-") ) + if ( refBaseIsDash ) refAllele = Allele.create(refBaseForIndel, true); else if ( ! Allele.acceptableAlleleBases(dbsnp.getNCBIRefBase()) ) return null; diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java index 3dc3e1501..0811e5e70 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java @@ -74,7 +74,7 @@ public abstract class TraversalEngine,Provide * @param engine GenomeAnalysisEngine for this traversal * @param progressMeter An optional (null == optional) meter to track our progress */ - public void initialize(final GenomeAnalysisEngine engine, final ProgressMeter progressMeter) { + public void initialize(final GenomeAnalysisEngine engine, final Walker walker, final ProgressMeter progressMeter) { if ( engine == null ) throw new ReviewedStingException("BUG: GenomeAnalysisEngine cannot be null!"); @@ -87,8 +87,8 @@ public abstract class TraversalEngine,Provide * * @param engine */ - protected void initialize(final GenomeAnalysisEngine engine) { - initialize(engine, null); + protected void initialize(final GenomeAnalysisEngine engine, final Walker walker) { + initialize(engine, walker, null); } /** diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java index 34fa704c1..a7e4d7649 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java @@ -25,11 +25,14 @@ package org.broadinstitute.sting.gatk.traversals; +import com.google.java.contract.Requires; import org.apache.log4j.Logger; +import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.gatk.WalkerManager; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.datasources.providers.*; +import org.broadinstitute.sting.gatk.datasources.reads.Shard; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.ActiveRegionExtension; import org.broadinstitute.sting.gatk.walkers.ActiveRegionWalker; @@ -39,31 +42,182 @@ import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.activeregion.ActiveRegion; import org.broadinstitute.sting.utils.activeregion.ActivityProfile; import org.broadinstitute.sting.utils.activeregion.ActivityProfileResult; -import org.broadinstitute.sting.utils.pileup.PileupElement; +import org.broadinstitute.sting.utils.progressmeter.ProgressMeter; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import java.util.*; /** - * Created by IntelliJ IDEA. - * User: rpoplin - * Date: 12/9/11 + * Implement active region traversal + * + * User: depristo + * Date: 1/9/13 + * Time: 4:45 PM + * + * Live region: + * + * The ART tracks a thing called the live region. The live region is a position on a specific contig + * of the alignment start of the last read we processed during this traversal. Because the + * read stream is sorted, future reads must occurs in the the live region. Therefore the the dead region + * (everything to the left of the live boundary) cannot have any more read data. The live / dead + * regions are used to decide when we can safely call map on active regions, as only active regions + * contained completely within the dead region (including extensions) have a complete set of read data + * in the collected read list. All of the data related to the live region is captured by the local + * variable spanOfLastReadSeen + * */ - -public class TraverseActiveRegions extends TraversalEngine,LocusShardDataProvider> { - /** - * our log, which we want to capture anything from this class - */ +public class TraverseActiveRegions extends TraversalEngine,LocusShardDataProvider> { protected final static Logger logger = Logger.getLogger(TraversalEngine.class); + protected final static boolean DEBUG = false; + + // set by the tranversal + private int activeRegionExtension = -1; + private int maxRegionSize = -1; private final LinkedList workQueue = new LinkedList(); - private final LinkedHashSet myReads = new LinkedHashSet(); + + private LinkedList myReads = new LinkedList(); + private GenomeLoc spanOfLastReadSeen = null; + + protected int getActiveRegionExtension() { + return activeRegionExtension; + } + + protected int getMaxRegionSize() { + return maxRegionSize; + } @Override public String getTraversalUnits() { return "active regions"; } + @Override + public String toString() { + return "TraverseActiveRegions"; + } + + @Override + public void initialize(GenomeAnalysisEngine engine, Walker walker, ProgressMeter progressMeter) { + super.initialize(engine, walker, progressMeter); + activeRegionExtension = walker.getClass().getAnnotation(ActiveRegionExtension.class).extension(); + maxRegionSize = walker.getClass().getAnnotation(ActiveRegionExtension.class).maxRegion(); + + final ActiveRegionWalker arWalker = (ActiveRegionWalker)walker; + if ( arWalker.wantsExtendedReads() && ! arWalker.wantsNonPrimaryReads() ) { + throw new IllegalArgumentException("Active region walker " + arWalker + " requested extended events but not " + + "non-primary reads, an inconsistent state. Please modify the walker"); + } + } + + /** + * Is the loc outside of the intervals being requested for processing by the GATK? + * @param loc + * @return + */ + protected boolean outsideEngineIntervals(final GenomeLoc loc) { + return engine.getIntervals() != null && ! engine.getIntervals().overlaps(loc); + } + + /** + * Take the individual isActive calls and integrate them into contiguous active regions and + * add these blocks of work to the work queue + * band-pass filter the list of isActive probabilities and turn into active regions + * + * @param profile + * @param activeRegions + * @return + */ + protected ActivityProfile incorporateActiveRegions(final ActivityProfile profile, + final List activeRegions) { + if ( profile.isEmpty() ) + throw new IllegalStateException("trying to incorporate an empty active profile " + profile); + + final ActivityProfile bandPassFiltered = profile.bandPassFilter(); + activeRegions.addAll(bandPassFiltered.createActiveRegions( getActiveRegionExtension(), getMaxRegionSize() )); + return new ActivityProfile( engine.getGenomeLocParser(), profile.hasPresetRegions() ); + } + + protected final ActivityProfileResult walkerActiveProb(final ActiveRegionWalker walker, + final RefMetaDataTracker tracker, final ReferenceContext refContext, + final AlignmentContext locus, final GenomeLoc location) { + if ( walker.hasPresetActiveRegions() ) { + return new ActivityProfileResult(location, walker.presetActiveRegions.overlaps(location) ? 1.0 : 0.0); + } else { + return walker.isActive( tracker, refContext, locus ); + } + } + + protected ReferenceOrderedView getReferenceOrderedView(final ActiveRegionWalker walker, + final LocusShardDataProvider dataProvider, + final LocusView locusView) { + if ( WalkerManager.getWalkerDataSource(walker) != DataSource.REFERENCE_ORDERED_DATA ) + return new ManagingReferenceOrderedView( dataProvider ); + else + return (RodLocusView)locusView; + } + + /** + * Write out each active region to the walker activeRegionOutStream + * + * @param walker + */ + protected void writeActiveRegionsToStream( final ActiveRegionWalker walker ) { + // Just want to output the active regions to a file, not actually process them + for( final ActiveRegion activeRegion : workQueue ) { + if( activeRegion.isActive ) { + walker.activeRegionOutStream.println( activeRegion.getLocation() ); + } + } + } + + /** + * Did read appear in the last shard? + * + * When we transition across shard boundaries we see duplicate reads because + * each shard contains the reads that *overlap* the shard. So if we just finished + * shard 1-1000 and are now in 1001-2000 we'll see duplicate reads from 1001 + * that overlapped 1-1000. This function tests read to determine if we would have + * seen it before by asking if read.getAlignmentStart() is less than the + * stop position of the last seen read at the start of the traversal. The reason + * we need to use the location of the last read at the start of the traversal + * is that we update the lastRead during the traversal, and we only want to filter + * out reads whose start is before the last read of the previous shard, not the + * current shard. + * + * @param locOfLastReadAtTraversalStart the location of the last read seen at the start of the traversal + * @param read the read we want to test if it's already been seen in the last shard + * @return true if read would have appeared in the last shard, false otherwise + */ + protected boolean appearedInLastShard(final GenomeLoc locOfLastReadAtTraversalStart, final GATKSAMRecord read) { + if ( locOfLastReadAtTraversalStart == null ) + // we're in the first shard, so obviously the answer is no + return false; + else { + // otherwise check to see if the alignment occurred in the previous shard + return read.getAlignmentStart() <= locOfLastReadAtTraversalStart.getStart() + // we're on the same contig + && read.getReferenceIndex() == locOfLastReadAtTraversalStart.getContigIndex(); + } + + } + + // ------------------------------------------------------------------------------------- + // + // Actual traverse function + // + // ------------------------------------------------------------------------------------- + + /** + * Is the current shard on a new contig w.r.t. the previous shard? + * @param currentShard the current shard we are processing + * @return true if the last shard was on a different contig than the current shard + */ + private boolean onNewContig(final Shard currentShard) { + return spanOfLastSeenRead() != null + && spanOfLastSeenRead().getContigIndex() != currentShard.getLocation().getContigIndex(); + } + @Override public T traverse( final ActiveRegionWalker walker, final LocusShardDataProvider dataProvider, @@ -73,16 +227,19 @@ public class TraverseActiveRegions extends TraversalEngine activeRegions = new LinkedList(); ActivityProfile profile = new ActivityProfile(engine.getGenomeLocParser(), walker.hasPresetActiveRegions() ); ReferenceOrderedView referenceOrderedDataView = getReferenceOrderedView(walker, dataProvider, locusView); // We keep processing while the next reference location is within the interval + final GenomeLoc locOfLastReadAtTraversalStart = spanOfLastSeenRead(); + + // if we've moved onto a new contig, process all of the active regions + if ( onNewContig(dataProvider.getShard()) ) + sum = processActiveRegions(walker, sum, true); + GenomeLoc prevLoc = null; while( locusView.hasNext() ) { final AlignmentContext locus = locusView.next(); @@ -91,17 +248,15 @@ public class TraverseActiveRegions extends TraversalEngine reads = locusView.getLIBS().transferReadsFromAllPreviousPileups(); + for( final GATKSAMRecord read : reads ) { + if ( appearedInLastShard(locOfLastReadAtTraversalStart, read) ) { + if ( DEBUG ) logger.warn("Skipping duplicated " + read.getReadName()); + } else { + if ( DEBUG ) logger.warn("Adding read " + read.getReadName() + " at " + engine.getGenomeLocParser().createGenomeLoc(read) + " from provider " + dataProvider); + rememberLastReadLocation(read); myReads.add(read); } - - // If this is the last pileup for this shard calculate the minimum alignment start so that we know - // which active regions in the work queue are now safe to process - minStart = Math.min(minStart, read.getAlignmentStart()); } // skip this location -- it's not part of our engine intervals @@ -110,7 +265,7 @@ public class TraverseActiveRegions extends TraversalEngine extends TraversalEngine extends TraversalEngine extends TraversalEngine walker, T sum) { + return processActiveRegions((ActiveRegionWalker)walker, sum, true); } + // ------------------------------------------------------------------------------------- + // + // Functions to manage and interact with the live / dead zone + // + // ------------------------------------------------------------------------------------- + /** - * Take the individual isActive calls and integrate them into contiguous active regions and - * add these blocks of work to the work queue - * band-pass filter the list of isActive probabilities and turn into active regions + * Update the live region to reflect that the last read we've seen in the traversal is read * - * @param profile - * @param activeRegions - * @param activeRegionExtension - * @param maxRegionSize - * @return + * Requires that sequential calls always be provided reads in coordinate sorted order + * + * @param read the last read we've seen during the traversal */ - private ActivityProfile incorporateActiveRegions(final ActivityProfile profile, - final List activeRegions, - final int activeRegionExtension, - final int maxRegionSize) { - if ( profile.isEmpty() ) - throw new IllegalStateException("trying to incorporate an empty active profile " + profile); - - final ActivityProfile bandPassFiltered = profile.bandPassFilter(); - activeRegions.addAll(bandPassFiltered.createActiveRegions( activeRegionExtension, maxRegionSize )); - return new ActivityProfile( engine.getGenomeLocParser(), profile.hasPresetRegions() ); - } - - - // -------------------------------------------------------------------------------- - // - // simple utility functions - // - // -------------------------------------------------------------------------------- - - private final ActivityProfileResult walkerActiveProb(final ActiveRegionWalker walker, - final RefMetaDataTracker tracker, final ReferenceContext refContext, - final AlignmentContext locus, final GenomeLoc location) { - if ( walker.hasPresetActiveRegions() ) { - return new ActivityProfileResult(location, walker.presetActiveRegions.overlaps(location) ? 1.0 : 0.0); - } else { - return walker.isActive( tracker, refContext, locus ); + protected void rememberLastReadLocation(final GATKSAMRecord read) { + final GenomeLoc currentLocation = engine.getGenomeLocParser().createGenomeLoc(read); + if ( spanOfLastReadSeen == null ) + spanOfLastReadSeen = currentLocation; + else { + if ( currentLocation.isBefore(spanOfLastReadSeen) ) + throw new IllegalStateException("Updating last read seen in the traversal with read " + read + " with span " + currentLocation + " but this occurs before the previously seen read " + spanOfLastReadSeen); + spanOfLastReadSeen = currentLocation; } } - private ReferenceOrderedView getReferenceOrderedView( final ActiveRegionWalker walker, - final LocusShardDataProvider dataProvider, - final LocusView locusView) { - if ( WalkerManager.getWalkerDataSource(walker) != DataSource.REFERENCE_ORDERED_DATA ) - return new ManagingReferenceOrderedView( dataProvider ); - else - return (RodLocusView)locusView; + /** + * Get a GenomeLoc indicating the start (heading to the right) of the live ART region. + * @return the left-most position of the live region on the genome + */ + protected GenomeLoc spanOfLastSeenRead() { + return spanOfLastReadSeen; } - // -------------------------------------------------------------------------------- - // - // code to handle processing active regions - // - // -------------------------------------------------------------------------------- + /** + * Is the active region completely within the traversal's dead zone? + * + * @param region the region we want to test + * @return true if the extended location of region is completely within the current dead zone, false otherwise + */ + protected boolean regionCompletelyWithinDeadZone(final ActiveRegion region) { + return region.getExtendedLoc().getStop() < spanOfLastSeenRead().getStart() + || ! region.getExtendedLoc().onSameContig(spanOfLastSeenRead()); + } - private T processActiveRegions( final ActiveRegionWalker walker, T sum, final int minStart, final String currentContig ) { + /** + * Is the read dead? That is, can it no longer be in any future active region, and therefore can be discarded? + * + * read: start |--------> stop ------ stop + extension + * region: start |-----------------| end + * + * Since the regions are coming in order, read could potentially be contained in a future interval if + * stop + activeRegionExtension >= end. If, on the other hand, stop + extension is < the end + * of this region, then we can discard it, since any future region could only include reads + * up to end + 1 - extension. + * + * Note that this function doesn't care about the dead zone. We're assuming that by + * actually calling this function with an active region that region is already in the dead zone, + * so checking that the read is in the dead zone doesn't make sense. + * + * @param read the read we're testing + * @param activeRegion the current active region + * @return true if the read is dead, false other + */ + @Requires({"read != null", "activeRegion != null"}) + private boolean readCannotOccurInAnyMoreActiveRegions(final GATKSAMRecord read, final ActiveRegion activeRegion) { + return read.getAlignmentEnd() + getActiveRegionExtension() < activeRegion.getLocation().getStop(); + } + + // ------------------------------------------------------------------------------------- + // + // Functions to process active regions that are ready for map / reduce calls + // + // ------------------------------------------------------------------------------------- + + private T processActiveRegions(final ActiveRegionWalker walker, T sum, final boolean forceRegionsToBeActive) { if( walker.activeRegionOutStream != null ) { writeActiveRegionsToStream(walker); return sum; } else { - return callWalkerMapOnActiveRegions(walker, sum, minStart, currentContig); + return callWalkerMapOnActiveRegions(walker, sum, forceRegionsToBeActive); } } - /** - * Write out each active region to the walker activeRegionOutStream - * - * @param walker - */ - private void writeActiveRegionsToStream( final ActiveRegionWalker walker ) { - // Just want to output the active regions to a file, not actually process them - for( final ActiveRegion activeRegion : workQueue ) { - if( activeRegion.isActive ) { - walker.activeRegionOutStream.println( activeRegion.getLocation() ); - } - } - } - - private T callWalkerMapOnActiveRegions( final ActiveRegionWalker walker, T sum, final int minStart, final String currentContig ) { + private T callWalkerMapOnActiveRegions(final ActiveRegionWalker walker, T sum, final boolean forceRegionsToBeActive) { // Since we've traversed sufficiently past this point (or this contig!) in the workQueue we can unload those regions and process them // TODO can implement parallel traversal here while( workQueue.peek() != null ) { - final GenomeLoc extendedLoc = workQueue.peek().getExtendedLoc(); - if ( extendedLoc.getStop() < minStart || (currentContig != null && !workQueue.peek().getExtendedLoc().getContig().equals(currentContig))) { - final ActiveRegion activeRegion = workQueue.remove(); - sum = processActiveRegion( activeRegion, sum, walker ); + final ActiveRegion activeRegion = workQueue.peek(); + if ( forceRegionsToBeActive || regionCompletelyWithinDeadZone(activeRegion) ) { + if ( DEBUG ) logger.warn("Processing active region " + activeRegion + " dead zone " + spanOfLastSeenRead()); + sum = processActiveRegion( workQueue.remove(), sum, walker ); } else { break; } @@ -261,61 +419,33 @@ public class TraverseActiveRegions extends TraversalEngine walker ) { - final ArrayList placedReads = new ArrayList(); - for( final GATKSAMRecord read : myReads ) { + protected T processActiveRegion(final ActiveRegion activeRegion, final T sum, final ActiveRegionWalker walker) { + final Iterator liveReads = myReads.iterator(); + while ( liveReads.hasNext() ) { + boolean killed = false; + final GATKSAMRecord read = liveReads.next(); final GenomeLoc readLoc = this.engine.getGenomeLocParser().createGenomeLoc( read ); - if( activeRegion.getLocation().overlapsP( readLoc ) ) { - // The region which the highest amount of overlap is chosen as the primary region for the read (tie breaking is done as right most region) - long maxOverlap = activeRegion.getLocation().sizeOfOverlap( readLoc ); - ActiveRegion bestRegion = activeRegion; - for( final ActiveRegion otherRegionToTest : workQueue ) { - if( otherRegionToTest.getLocation().sizeOfOverlap(readLoc) >= maxOverlap ) { - maxOverlap = otherRegionToTest.getLocation().sizeOfOverlap( readLoc ); - bestRegion = otherRegionToTest; - } - } - bestRegion.add( read ); - // The read is also added to all other regions in which it overlaps but marked as non-primary - if( walker.wantsNonPrimaryReads() ) { - if( !bestRegion.equals(activeRegion) ) { - activeRegion.add( read ); - } - for( final ActiveRegion otherRegionToTest : workQueue ) { - if( !bestRegion.equals(otherRegionToTest) ) { - // check for non-primary vs. extended - if ( otherRegionToTest.getLocation().overlapsP( readLoc ) ) { - otherRegionToTest.add( read ); - } else if ( walker.wantsExtendedReads() && otherRegionToTest.getExtendedLoc().overlapsP( readLoc ) ) { - otherRegionToTest.add( read ); - } - } - } - } - placedReads.add( read ); - // check for non-primary vs. extended - } else if( activeRegion.getLocation().overlapsP( readLoc ) ) { - if ( walker.wantsNonPrimaryReads() ) { - activeRegion.add( read ); + if( activeRegion.getLocation().overlapsP( readLoc ) ) { + activeRegion.add(read); + + if ( ! walker.wantsNonPrimaryReads() ) { + if ( DEBUG ) logger.warn("Removing read " + read.getReadName() + " at " + readLoc + " with dead zone start " + spanOfLastSeenRead()); + liveReads.remove(); + killed = true; } } else if( walker.wantsExtendedReads() && activeRegion.getExtendedLoc().overlapsP( readLoc )) { activeRegion.add( read ); } + + if ( ! killed && readCannotOccurInAnyMoreActiveRegions(read, activeRegion) ) { + if ( DEBUG ) logger.warn("Removing read " + read.getReadName() + " at " + readLoc + " with dead zone start " + spanOfLastSeenRead()); + liveReads.remove(); + } } - myReads.removeAll( placedReads ); // remove all the reads which have been placed into their active region - // WARNING: This hashset relies on reads being exactly equal when they are placed in the list as when they are removed. So the ActiveRegionWalker can't modify the reads in any way. logger.debug(">> Map call with " + activeRegion.getReads().size() + " " + (activeRegion.isActive ? "active" : "inactive") + " reads @ " + activeRegion.getLocation() + " with full extent: " + activeRegion.getReferenceLoc()); - final M x = walker.map( activeRegion, null ); + final M x = walker.map(activeRegion, null); return walker.reduce( x, sum ); } - - /** - * Special function called in LinearMicroScheduler to empty out the work queue. - * Ugly for now but will be cleaned up when we push this functionality more into the engine - */ - public T endTraversal( final Walker walker, T sum) { - return processActiveRegions((ActiveRegionWalker)walker, sum, Integer.MAX_VALUE, null); - } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/CallableLoci.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/CallableLoci.java index e8fa86346..566aac6b5 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/CallableLoci.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/CallableLoci.java @@ -1,47 +1,26 @@ /* -* By downloading the PROGRAM you agree to the following terms of use: -* -* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY -* -* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). -* -* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and -* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. -* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: -* -* 1. DEFINITIONS -* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. -* -* 2. LICENSE -* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. -* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. -* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. -* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. -* -* 3. OWNERSHIP OF INTELLECTUAL PROPERTY -* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. -* Copyright 2012 Broad Institute, Inc. -* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. -* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. -* -* 4. INDEMNIFICATION -* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. -* -* 5. NO REPRESENTATIONS OR WARRANTIES -* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. -* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. -* -* 6. ASSIGNMENT -* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. -* -* 7. MISCELLANEOUS -* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. -* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. -* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. -* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. -* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. -* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. -* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.broadinstitute.sting.gatk.walkers.coverage; diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/CompareCallableLoci.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/CompareCallableLoci.java index 898f890c6..6f1c9d020 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/CompareCallableLoci.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/CompareCallableLoci.java @@ -1,47 +1,26 @@ /* -* By downloading the PROGRAM you agree to the following terms of use: -* -* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY -* -* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). -* -* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and -* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. -* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: -* -* 1. DEFINITIONS -* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. -* -* 2. LICENSE -* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. -* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. -* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. -* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. -* -* 3. OWNERSHIP OF INTELLECTUAL PROPERTY -* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. -* Copyright 2012 Broad Institute, Inc. -* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. -* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. -* -* 4. INDEMNIFICATION -* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. -* -* 5. NO REPRESENTATIONS OR WARRANTIES -* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. -* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. -* -* 6. ASSIGNMENT -* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. -* -* 7. MISCELLANEOUS -* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. -* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. -* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. -* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. -* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. -* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. -* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.broadinstitute.sting.gatk.walkers.coverage; diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/CoverageUtils.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/CoverageUtils.java index 573291d06..fe2eee2a2 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/CoverageUtils.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/CoverageUtils.java @@ -217,9 +217,9 @@ public class CoverageUtils { private static void updateCounts(int[] counts, PileupElement e) { if ( e.isDeletion() ) { - counts[BaseUtils.DELETION_INDEX] += e.getRepresentativeCount(); - } else if ( BaseUtils.basesAreEqual((byte) 'N', e.getBase()) ) { - counts[BaseUtils.NO_CALL_INDEX] += e.getRepresentativeCount(); + counts[BaseUtils.Base.D.ordinal()] += e.getRepresentativeCount(); + } else if ( BaseUtils.basesAreEqual(BaseUtils.Base.N.base, e.getBase()) ) { + counts[BaseUtils.Base.N.ordinal()] += e.getRepresentativeCount(); } else { try { counts[BaseUtils.simpleBaseToBaseIndex(e.getBase())] += e.getRepresentativeCount(); diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/GCContentByInterval.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/GCContentByInterval.java index f416806a8..668d3fd5f 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/GCContentByInterval.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/coverage/GCContentByInterval.java @@ -1,47 +1,26 @@ /* -* By downloading the PROGRAM you agree to the following terms of use: -* -* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY -* -* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). -* -* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and -* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. -* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: -* -* 1. DEFINITIONS -* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. -* -* 2. LICENSE -* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. -* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. -* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. -* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. -* -* 3. OWNERSHIP OF INTELLECTUAL PROPERTY -* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. -* Copyright 2012 Broad Institute, Inc. -* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. -* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. -* -* 4. INDEMNIFICATION -* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. -* -* 5. NO REPRESENTATIONS OR WARRANTIES -* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. -* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. -* -* 6. ASSIGNMENT -* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. -* -* 7. MISCELLANEOUS -* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. -* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. -* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. -* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. -* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. -* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. -* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.broadinstitute.sting.gatk.walkers.coverage; @@ -107,7 +86,7 @@ public class GCContentByInterval extends LocusWalker { if (tracker == null) return null; int baseIndex = ref.getBaseIndex(); - return (baseIndex == BaseUtils.gIndex || baseIndex == BaseUtils.cIndex) ? 1L : 0L; + return (baseIndex == BaseUtils.Base.G.ordinal() || baseIndex == BaseUtils.Base.C.ordinal()) ? 1L : 0L; } public Long reduce(Long toAdd, Long runningCount) { diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/CoveredByNSamplesSites.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/CoveredByNSamplesSites.java index 09f94c9bf..0ad6e9d3b 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/CoveredByNSamplesSites.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/CoveredByNSamplesSites.java @@ -1,3 +1,28 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + package org.broadinstitute.sting.gatk.walkers.diagnostics; diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/ErrorRatePerCycle.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/ErrorRatePerCycle.java index 8f30a2c40..8a7f2bcc3 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/ErrorRatePerCycle.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/ErrorRatePerCycle.java @@ -1,47 +1,26 @@ /* -* By downloading the PROGRAM you agree to the following terms of use: -* -* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY -* -* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). -* -* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and -* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. -* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: -* -* 1. DEFINITIONS -* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. -* -* 2. LICENSE -* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. -* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. -* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. -* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. -* -* 3. OWNERSHIP OF INTELLECTUAL PROPERTY -* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. -* Copyright 2012 Broad Institute, Inc. -* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. -* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. -* -* 4. INDEMNIFICATION -* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. -* -* 5. NO REPRESENTATIONS OR WARRANTIES -* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. -* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. -* -* 6. ASSIGNMENT -* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. -* -* 7. MISCELLANEOUS -* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. -* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. -* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. -* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. -* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. -* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. -* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.broadinstitute.sting.gatk.walkers.diagnostics; diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/ReadGroupProperties.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/ReadGroupProperties.java index 77f1a4578..368e0bb5c 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/ReadGroupProperties.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/ReadGroupProperties.java @@ -1,47 +1,26 @@ /* -* By downloading the PROGRAM you agree to the following terms of use: -* -* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY -* -* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). -* -* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and -* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. -* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: -* -* 1. DEFINITIONS -* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. -* -* 2. LICENSE -* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. -* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. -* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. -* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. -* -* 3. OWNERSHIP OF INTELLECTUAL PROPERTY -* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. -* Copyright 2012 Broad Institute, Inc. -* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. -* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. -* -* 4. INDEMNIFICATION -* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. -* -* 5. NO REPRESENTATIONS OR WARRANTIES -* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. -* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. -* -* 6. ASSIGNMENT -* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. -* -* 7. MISCELLANEOUS -* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. -* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. -* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. -* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. -* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. -* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. -* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.broadinstitute.sting.gatk.walkers.diagnostics; diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/ReadLengthDistribution.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/ReadLengthDistribution.java index 9000dcf8b..4965521ce 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/ReadLengthDistribution.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/ReadLengthDistribution.java @@ -1,47 +1,26 @@ /* -* By downloading the PROGRAM you agree to the following terms of use: -* -* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY -* -* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). -* -* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and -* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. -* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: -* -* 1. DEFINITIONS -* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. -* -* 2. LICENSE -* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. -* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. -* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. -* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. -* -* 3. OWNERSHIP OF INTELLECTUAL PROPERTY -* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. -* Copyright 2012 Broad Institute, Inc. -* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. -* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. -* -* 4. INDEMNIFICATION -* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. -* -* 5. NO REPRESENTATIONS OR WARRANTIES -* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. -* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. -* -* 6. ASSIGNMENT -* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. -* -* 7. MISCELLANEOUS -* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. -* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. -* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. -* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. -* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. -* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. -* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.broadinstitute.sting.gatk.walkers.diagnostics; diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/fasta/FastaStats.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/fasta/FastaStats.java index a152f79a4..ad7d85031 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/fasta/FastaStats.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/fasta/FastaStats.java @@ -1,47 +1,26 @@ /* -* By downloading the PROGRAM you agree to the following terms of use: -* -* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY -* -* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). -* -* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and -* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. -* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: -* -* 1. DEFINITIONS -* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. -* -* 2. LICENSE -* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. -* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. -* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. -* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. -* -* 3. OWNERSHIP OF INTELLECTUAL PROPERTY -* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. -* Copyright 2012 Broad Institute, Inc. -* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. -* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. -* -* 4. INDEMNIFICATION -* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. -* -* 5. NO REPRESENTATIONS OR WARRANTIES -* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. -* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. -* -* 6. ASSIGNMENT -* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. -* -* 7. MISCELLANEOUS -* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. -* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. -* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. -* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. -* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. -* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. -* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.broadinstitute.sting.gatk.walkers.fasta; diff --git a/public/java/src/org/broadinstitute/sting/utils/Haplotype.java b/public/java/src/org/broadinstitute/sting/utils/Haplotype.java index efe9460cb..baab1f5fa 100644 --- a/public/java/src/org/broadinstitute/sting/utils/Haplotype.java +++ b/public/java/src/org/broadinstitute/sting/utils/Haplotype.java @@ -37,12 +37,10 @@ import org.broadinstitute.variant.variantcontext.VariantContext; import java.io.Serializable; import java.util.*; -public class Haplotype { - protected final byte[] bases; - protected final double[] quals; +public class Haplotype extends Allele { + private GenomeLoc genomeLocation = null; private HashMap eventMap = null; - private boolean isRef = false; private Cigar cigar; private int alignmentStartHapwrtRef; public int leftBreakPoint = 0; @@ -50,44 +48,37 @@ public class Haplotype { private Event artificialEvent = null; /** - * Create a simple consensus sequence with provided bases and a uniform quality over all bases of qual + * Main constructor * * @param bases bases - * @param qual qual + * @param isRef is reference allele? */ - public Haplotype( final byte[] bases, final int qual ) { - this.bases = bases.clone(); - quals = new double[bases.length]; - Arrays.fill(quals, (double)qual); - } - - public Haplotype( final byte[] bases, final double[] quals ) { - this.bases = bases.clone(); - this.quals = quals.clone(); + public Haplotype( final byte[] bases, final boolean isRef ) { + super(bases.clone(), isRef); } public Haplotype( final byte[] bases ) { - this(bases, 0); + this(bases, false); } protected Haplotype( final byte[] bases, final Event artificialEvent ) { - this(bases, 0); + this(bases, false); this.artificialEvent = artificialEvent; } public Haplotype( final byte[] bases, final GenomeLoc loc ) { - this(bases); + this(bases, false); this.genomeLocation = loc; } @Override public boolean equals( Object h ) { - return h instanceof Haplotype && Arrays.equals(bases, ((Haplotype) h).bases); + return h instanceof Haplotype && Arrays.equals(getBases(), ((Haplotype) h).getBases()); } - + @Override public int hashCode() { - return Arrays.hashCode(bases); + return Arrays.hashCode(getBases()); } public HashMap getEventMap() { @@ -98,32 +89,13 @@ public class Haplotype { this.eventMap = eventMap; } - public boolean isReference() { - return isRef; - } - - public void setIsReference( boolean isRef ) { - this.isRef = isRef; - } - - public double getQualitySum() { - double s = 0; - for (int k=0; k < bases.length; k++) { - s += quals[k]; - } - return s; - } - @Override public String toString() { - return new String(bases); + return getDisplayString(); } - public double[] getQuals() { - return quals.clone(); - } public byte[] getBases() { - return bases.clone(); + return super.getBases().clone(); } public long getStartPosition() { @@ -178,19 +150,23 @@ public class Haplotype { public Haplotype insertAllele( final Allele refAllele, final Allele altAllele, final int refInsertLocation, final int genomicInsertLocation ) { // refInsertLocation is in ref haplotype offset coordinates NOT genomic coordinates final int haplotypeInsertLocation = ReadUtils.getReadCoordinateForReferenceCoordinate(alignmentStartHapwrtRef, cigar, refInsertLocation, ReadUtils.ClippingTail.RIGHT_TAIL, true); - if( haplotypeInsertLocation == -1 || haplotypeInsertLocation + refAllele.length() >= bases.length ) { // desired change falls inside deletion so don't bother creating a new haplotype + if( haplotypeInsertLocation == -1 || haplotypeInsertLocation + refAllele.length() >= getBases().length ) { // desired change falls inside deletion so don't bother creating a new haplotype return null; } byte[] newHaplotypeBases = new byte[]{}; - newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(bases, 0, haplotypeInsertLocation)); // bases before the variant + newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(getBases(), 0, haplotypeInsertLocation)); // bases before the variant newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, altAllele.getBases()); // the alt allele of the variant - newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(bases, haplotypeInsertLocation + refAllele.length(), bases.length)); // bases after the variant + newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(getBases(), haplotypeInsertLocation + refAllele.length(), getBases().length)); // bases after the variant return new Haplotype(newHaplotypeBases, new Event(refAllele, altAllele, genomicInsertLocation)); } public static class HaplotypeBaseComparator implements Comparator, Serializable { @Override public int compare( final Haplotype hap1, final Haplotype hap2 ) { + return compareHaplotypeBases(hap1, hap2); + } + + public static int compareHaplotypeBases(final Haplotype hap1, final Haplotype hap2) { final byte[] arr1 = hap1.getBases(); final byte[] arr2 = hap2.getBases(); // compares byte arrays using lexical ordering diff --git a/public/java/src/org/broadinstitute/sting/utils/clipping/ReadClipper.java b/public/java/src/org/broadinstitute/sting/utils/clipping/ReadClipper.java index 524c29d64..87526545d 100644 --- a/public/java/src/org/broadinstitute/sting/utils/clipping/ReadClipper.java +++ b/public/java/src/org/broadinstitute/sting/utils/clipping/ReadClipper.java @@ -381,9 +381,9 @@ public class ReadClipper { * @return a new read without adaptor sequence */ private GATKSAMRecord hardClipAdaptorSequence () { - final Integer adaptorBoundary = ReadUtils.getAdaptorBoundary(read); + final int adaptorBoundary = ReadUtils.getAdaptorBoundary(read); - if (adaptorBoundary == null || !ReadUtils.isInsideRead(read, adaptorBoundary)) + if (adaptorBoundary == ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY || !ReadUtils.isInsideRead(read, adaptorBoundary)) return read; return read.getReadNegativeStrandFlag() ? hardClipByReferenceCoordinatesLeftTail(adaptorBoundary) : hardClipByReferenceCoordinatesRightTail(adaptorBoundary); diff --git a/public/java/src/org/broadinstitute/sting/utils/fasta/CachingIndexedFastaSequenceFile.java b/public/java/src/org/broadinstitute/sting/utils/fasta/CachingIndexedFastaSequenceFile.java index 3d43d5d4d..a749625cd 100644 --- a/public/java/src/org/broadinstitute/sting/utils/fasta/CachingIndexedFastaSequenceFile.java +++ b/public/java/src/org/broadinstitute/sting/utils/fasta/CachingIndexedFastaSequenceFile.java @@ -33,6 +33,7 @@ import net.sf.samtools.SAMSequenceRecord; import net.sf.samtools.util.StringUtil; import org.apache.log4j.Priority; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; +import org.broadinstitute.variant.utils.BaseUtils; import java.io.File; import java.io.FileNotFoundException; @@ -41,9 +42,10 @@ import java.util.Arrays; /** * A caching version of the IndexedFastaSequenceFile that avoids going to disk as often as the raw indexer. * - * Thread-safe! Uses a thread-local cache + * Thread-safe! Uses a thread-local cache. * - * Automatically upper-cases the bases coming in, unless they the flag preserveCase is explicitly set + * Automatically upper-cases the bases coming in, unless the flag preserveCase is explicitly set. + * Automatically converts IUPAC bases to Ns, unless the flag preserveIUPAC is explicitly set. */ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile { protected static final org.apache.log4j.Logger logger = org.apache.log4j.Logger.getLogger(CachingIndexedFastaSequenceFile.class); @@ -64,10 +66,15 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile { private final long cacheMissBackup; /** - * If true, we will preserve the case of the original base in the genome, not + * If true, we will preserve the case of the original base in the genome */ private final boolean preserveCase; + /** + * If true, we will preserve the IUPAC bases in the genome + */ + private final boolean preserveIUPAC; + // information about checking efficiency long cacheHits = 0; long cacheMisses = 0; @@ -97,13 +104,15 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile { * @param index the index of the fasta file, used for efficient random access * @param cacheSize the size in bp of the cache we will use for this reader * @param preserveCase If true, we will keep the case of the underlying bases in the FASTA, otherwise everything is converted to upper case + * @param preserveIUPAC If true, we will keep the IUPAC bases in the FASTA, otherwise they are converted to Ns */ - public CachingIndexedFastaSequenceFile(final File fasta, final FastaSequenceIndex index, final long cacheSize, final boolean preserveCase) { + public CachingIndexedFastaSequenceFile(final File fasta, final FastaSequenceIndex index, final long cacheSize, final boolean preserveCase, final boolean preserveIUPAC) { super(fasta, index); if ( cacheSize < 0 ) throw new IllegalArgumentException("cacheSize must be > 0"); this.cacheSize = cacheSize; this.cacheMissBackup = Math.max(cacheSize / 1000, 1); this.preserveCase = preserveCase; + this.preserveIUPAC = preserveIUPAC; } /** @@ -116,25 +125,15 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile { * @param cacheSize the size of the cache to use in this CachingIndexedFastaReader, must be >= 0 * @param preserveCase If true, we will keep the case of the underlying bases in the FASTA, otherwise everything is converted to upper case */ - public CachingIndexedFastaSequenceFile(final File fasta, final long cacheSize, final boolean preserveCase ) throws FileNotFoundException { + public CachingIndexedFastaSequenceFile(final File fasta, final long cacheSize, final boolean preserveCase, final boolean preserveIUPAC) throws FileNotFoundException { super(fasta); if ( cacheSize < 0 ) throw new IllegalArgumentException("cacheSize must be > 0"); this.cacheSize = cacheSize; this.cacheMissBackup = Math.max(cacheSize / 1000, 1); this.preserveCase = preserveCase; + this.preserveIUPAC = preserveIUPAC; } -// /** -// * Open the given indexed fasta sequence file. Throw an exception if the file cannot be opened. -// * -// * @param fasta The file to open. -// * @param index Pre-built FastaSequenceIndex, for the case in which one does not exist on disk. -// * @throws java.io.FileNotFoundException If the fasta or any of its supporting files cannot be found. -// */ -// public CachingIndexedFastaSequenceFile(final File fasta, final FastaSequenceIndex index) { -// this(fasta, index, DEFAULT_CACHE_SIZE); -// } - /** * Same as general constructor but allows one to override the default cacheSize * @@ -145,7 +144,7 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile { * @param cacheSize the size in bp of the cache we will use for this reader */ public CachingIndexedFastaSequenceFile(final File fasta, final FastaSequenceIndex index, final long cacheSize) { - this(fasta, index, cacheSize, false); + this(fasta, index, cacheSize, false, false); } /** @@ -169,7 +168,7 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile { * @param preserveCase If true, we will keep the case of the underlying bases in the FASTA, otherwise everything is converted to upper case */ public CachingIndexedFastaSequenceFile(final File fasta, final boolean preserveCase) throws FileNotFoundException { - this(fasta, DEFAULT_CACHE_SIZE, preserveCase); + this(fasta, DEFAULT_CACHE_SIZE, preserveCase, false); } /** @@ -182,7 +181,7 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile { * @param cacheSize the size of the cache to use in this CachingIndexedFastaReader, must be >= 0 */ public CachingIndexedFastaSequenceFile(final File fasta, final long cacheSize ) throws FileNotFoundException { - this(fasta, cacheSize, false); + this(fasta, cacheSize, false, false); } /** @@ -240,6 +239,15 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile { return ! isPreservingCase(); } + /** + * Is this CachingIndexedFastaReader keeping the IUPAC bases in the fasta, or is it turning them into Ns? + * + * @return true if the IUPAC bases coming from this reader are not modified + */ + public boolean isPreservingIUPAC() { + return preserveIUPAC; + } + /** * Gets the subsequence of the contig in the range [start,stop] * @@ -253,7 +261,7 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile { * all of the bases in the ReferenceSequence returned by this method will be upper cased. */ @Override - public ReferenceSequence getSubsequenceAt( final String contig, final long start, final long stop ) { + public ReferenceSequence getSubsequenceAt( final String contig, long start, final long stop ) { final ReferenceSequence result; final Cache myCache = cache.get(); @@ -261,8 +269,9 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile { cacheMisses++; result = super.getSubsequenceAt(contig, start, stop); if ( ! preserveCase ) StringUtil.toUpperCase(result.getBases()); + if ( ! preserveIUPAC ) BaseUtils.convertIUPACtoN(result.getBases(), true, start < 1); } else { - // todo -- potential optimization is to check if contig.name == contig, as this in generally will be true + // todo -- potential optimization is to check if contig.name == contig, as this in general will be true SAMSequenceRecord contigInfo = super.getSequenceDictionary().getSequence(contig); if (stop > contigInfo.getSequenceLength()) @@ -276,6 +285,7 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile { // convert all of the bases in the sequence to upper case if we aren't preserving cases if ( ! preserveCase ) StringUtil.toUpperCase(myCache.seq.getBases()); + if ( ! preserveIUPAC ) BaseUtils.convertIUPACtoN(myCache.seq.getBases(), true, myCache.start == 0); } else { cacheHits++; } diff --git a/public/java/src/org/broadinstitute/sting/utils/interval/IntervalUtils.java b/public/java/src/org/broadinstitute/sting/utils/interval/IntervalUtils.java index c647a7b80..7374dda14 100644 --- a/public/java/src/org/broadinstitute/sting/utils/interval/IntervalUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/interval/IntervalUtils.java @@ -32,6 +32,7 @@ import net.sf.picard.util.IntervalList; import net.sf.samtools.SAMFileHeader; import org.apache.log4j.Logger; import org.broad.tribble.Feature; +import org.broadinstitute.sting.commandline.IntervalArgumentCollection; import org.broadinstitute.sting.commandline.IntervalBinding; import org.broadinstitute.sting.gatk.datasources.reference.ReferenceDataSource; import org.broadinstitute.sting.utils.GenomeLoc; @@ -534,6 +535,47 @@ public class IntervalUtils { } } + public static GenomeLocSortedSet parseIntervalArguments(final ReferenceDataSource referenceDataSource, IntervalArgumentCollection argCollection) { + GenomeLocSortedSet intervals = null; + + // return if no interval arguments at all + if ( argCollection.intervals == null && argCollection.excludeIntervals == null ) + return intervals; + + // Note that the use of '-L all' is no longer supported. + + // if include argument isn't given, create new set of all possible intervals + + final Pair includeExcludePair = IntervalUtils.parseIntervalBindingsPair( + referenceDataSource, + argCollection.intervals, + argCollection.intervalSetRule, argCollection.intervalMerging, argCollection.intervalPadding, + argCollection.excludeIntervals); + + final GenomeLocSortedSet includeSortedSet = includeExcludePair.getFirst(); + final GenomeLocSortedSet excludeSortedSet = includeExcludePair.getSecond(); + + // if no exclude arguments, can return parseIntervalArguments directly + if ( excludeSortedSet == null ) + intervals = includeSortedSet; + + // otherwise there are exclude arguments => must merge include and exclude GenomeLocSortedSets + else { + intervals = includeSortedSet.subtractRegions(excludeSortedSet); + + // logging messages only printed when exclude (-XL) arguments are given + final long toPruneSize = includeSortedSet.coveredSize(); + final long toExcludeSize = excludeSortedSet.coveredSize(); + final long intervalSize = intervals.coveredSize(); + logger.info(String.format("Initial include intervals span %d loci; exclude intervals span %d loci", toPruneSize, toExcludeSize)); + logger.info(String.format("Excluding %d loci from original intervals (%.2f%% reduction)", + toPruneSize - intervalSize, (toPruneSize - intervalSize) / (0.01 * toPruneSize))); + } + + logger.info(String.format("Processing %d bp from intervals", intervals.coveredSize())); + return intervals; + } + public static Pair parseIntervalBindingsPair( final ReferenceDataSource referenceDataSource, final List> intervals, diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java new file mode 100644 index 000000000..50bc9e25b --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java @@ -0,0 +1,370 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.locusiterator; + +import com.google.java.contract.Ensures; +import com.google.java.contract.Invariant; +import com.google.java.contract.Requires; +import net.sf.samtools.Cigar; +import net.sf.samtools.CigarElement; +import net.sf.samtools.CigarOperator; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.pileup.PileupElement; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; + +/** + * Steps a single read along its alignment to the genome + * + * The logical model for generating extended events is as follows: the "record state" + * implements the traversal along the reference; thus stepForwardOnGenome() returns + * on every and only on actual reference bases. This can be a (mis)match or a deletion + * (in the latter case, we still return on every individual reference base the deletion spans). + * + * User: depristo + * Date: 1/5/13 + * Time: 1:08 PM + */ +@Invariant({ + "nCigarElements >= 0", + "cigar != null", + "read != null", + "currentCigarElementOffset >= -1", + "currentCigarElementOffset <= nCigarElements" +}) +public class AlignmentStateMachine { + /** + * Our read + */ + private final GATKSAMRecord read; + private final Cigar cigar; + private final int nCigarElements; + private int currentCigarElementOffset = -1; + + /** + * how far are we offset from the start of the read bases? + */ + private int readOffset; + + /** + * how far are we offset from the alignment start on the genome? + */ + private int genomeOffset; + + /** + * Our cigar element + */ + private CigarElement currentElement; + + /** + * how far are we into our cigarElement? + */ + private int offsetIntoCurrentCigarElement; + + @Requires({"read != null", "read.getAlignmentStart() != -1", "read.getCigar() != null"}) + public AlignmentStateMachine(final GATKSAMRecord read) { + this.read = read; + this.cigar = read.getCigar(); + this.nCigarElements = cigar.numCigarElements(); + initializeAsLeftEdge(); + } + + /** + * Initialize the state variables to put this machine one bp before the + * start of the alignment, so that a call to stepForwardOnGenome() will advance + * us to the first proper location + */ + @Ensures("isLeftEdge()") + private void initializeAsLeftEdge() { + readOffset = offsetIntoCurrentCigarElement = genomeOffset = -1; + currentElement = null; + } + + /** + * Get the read we are aligning to the genome + * @return a non-null GATKSAMRecord + */ + @Ensures("result != null") + public GATKSAMRecord getRead() { + return read; + } + + /** + * Get the reference index of the underlying read + * + * @return the reference index of the read + */ + @Ensures("result == getRead().getReferenceIndex()") + public int getReferenceIndex() { + return getRead().getReferenceIndex(); + } + + /** + * Is this the left edge state? I.e., one that is before or after the current read? + * @return true if this state is an edge state, false otherwise + */ + public boolean isLeftEdge() { + return readOffset == -1; + } + + /** + * Are we on the right edge? I.e., is the current state off the right of the alignment? + * @return true if off the right edge, false if otherwise + */ + public boolean isRightEdge() { + return readOffset == read.getReadLength(); + } + + /** + * What is our current offset in the read's bases that aligns us with the reference genome? + * + * @return the current read offset position. If an edge will be == -1 + */ + @Ensures("result >= -1") + public int getReadOffset() { + return readOffset; + } + + /** + * What is the current offset w.r.t. the alignment state that aligns us to the readOffset? + * + * @return the current offset from the alignment start on the genome. If this state is + * at the left edge the result will be -1; + */ + @Ensures("result >= -1") + public int getGenomeOffset() { + return genomeOffset; + } + + /** + * Get the position (1-based as standard) of the current alignment on the genome w.r.t. the read's alignment start + * @return the position on the genome of the current state in absolute coordinates + */ + @Ensures("result > 0") + public int getGenomePosition() { + return read.getAlignmentStart() + getGenomeOffset(); + } + + /** + * Gets #getGenomePosition but as a 1 bp GenomeLoc + * @param genomeLocParser the parser to use to create the genome loc + * @return a non-null genome location with start position of getGenomePosition + */ + @Requires("genomeLocParser != null") + @Ensures("result != null") + public GenomeLoc getLocation(final GenomeLocParser genomeLocParser) { + // TODO -- may return wonky results if on an edge (could be 0 or could be beyond genome location) + return genomeLocParser.createGenomeLoc(read.getReferenceName(), getGenomePosition()); + } + + /** + * Get the cigar element we're currently aligning with. + * + * For example, if the cigar string is 2M2D2M and we're in the second step of the + * first 2M, then this function returns the element 2M. After calling stepForwardOnGenome + * this function would return 2D. + * + * @return the cigar element, or null if we're the left edge + */ + @Ensures("result != null || isLeftEdge() || isRightEdge()") + public CigarElement getCurrentCigarElement() { + return currentElement; + } + + /** + * Get the offset of the current cigar element among all cigar elements in the read + * + * Suppose our read's cigar is 1M2D3M, and we're at the first 1M. This would + * return 0. Stepping forward puts us in the 2D, so our offset is 1. Another + * step forward would result in a 1 again (we're in the second position of the 2D). + * Finally, one more step forward brings us to 2 (for the 3M element) + * + * @return the offset of the current cigar element in the reads's cigar. Will return -1 for + * when the state is on the left edge, and be == the number of cigar elements in the + * read when we're past the last position on the genome + */ + @Ensures({"result >= -1", "result <= nCigarElements"}) + public int getCurrentCigarElementOffset() { + return currentCigarElementOffset; + } + + /** + * Get the offset of the current state into the current cigar element + * + * That is, suppose we have a read with cigar 2M3D4M, and we're right at + * the second M position. offsetIntoCurrentCigarElement would be 1, as + * it's two elements into the 2M cigar. Now stepping forward we'd be + * in cigar element 3D, and our offsetIntoCurrentCigarElement would be 0. + * + * @return the offset (from 0) of the current state in the current cigar element. + * Will be 0 on the right edge, and -1 on the left. + */ + @Ensures({"result >= 0 || (result == -1 && isLeftEdge())", "!isRightEdge() || result == 0"}) + public int getOffsetIntoCurrentCigarElement() { + return offsetIntoCurrentCigarElement; + } + + /** + * Convenience accessor of the CigarOperator of the current cigar element + * + * Robust to the case where we're on the edge, and currentElement is null, in which + * case this function returns null as well + * + * @return null if this is an edge state + */ + @Ensures("result != null || isLeftEdge() || isRightEdge()") + public CigarOperator getCigarOperator() { + return currentElement == null ? null : currentElement.getOperator(); + } + + @Override + public String toString() { + return String.format("%s ro=%d go=%d cec=%d %s", read.getReadName(), readOffset, genomeOffset, offsetIntoCurrentCigarElement, currentElement); + } + + // ----------------------------------------------------------------------------------------------- + // + // Code for setting up prev / next states + // + // ----------------------------------------------------------------------------------------------- + + /** + * Step the state machine forward one unit + * + * Takes the current state of this machine, and advances the state until the next on-genome + * cigar element (M, X, =, D) is encountered, at which point this function returns with the + * cigar operator of the current element. + * + * Assumes that the AlignmentStateMachine is in the left edge state at the start, so that + * stepForwardOnGenome() can be called to move the machine to the first alignment position. That + * is, the normal use of this code is: + * + * AlignmentStateMachine machine = new AlignmentStateMachine(read) + * machine.stepForwardOnGenome() + * // now the machine is at the first position on the genome + * + * When stepForwardOnGenome() advances off the right edge of the read, the state machine is + * left in a state such that isRightEdge() returns true and returns null, indicating the + * the machine cannot advance further. The machine may explode, though this is not contracted, + * if stepForwardOnGenome() is called after a previous call returned null. + * + * @return the operator of the cigar element that machine stopped at, null if we advanced off the end of the read + */ + @Ensures("result != null || isRightEdge()") + public CigarOperator stepForwardOnGenome() { + // loop until we either find a cigar element step that moves us one base on the genome, or we run + // out of cigar elements + while ( true ) { + // we enter this method with readOffset = index of the last processed base on the read + // (-1 if we did not process a single base yet); this can be last matching base, + // or last base of an insertion + if (currentElement == null || (offsetIntoCurrentCigarElement + 1) >= currentElement.getLength()) { + currentCigarElementOffset++; + if (currentCigarElementOffset < nCigarElements) { + currentElement = cigar.getCigarElement(currentCigarElementOffset); + offsetIntoCurrentCigarElement = -1; + // next line: guards against cigar elements of length 0; when new cigar element is retrieved, + // we reenter in order to re-check offsetIntoCurrentCigarElement against currentElement's length + continue; + } else { + if (currentElement != null && currentElement.getOperator() == CigarOperator.D) + throw new UserException.MalformedBAM(read, "read ends with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar"); + + // we're done, so set the offset of the cigar to 0 for cleanliness, as well as the current element + offsetIntoCurrentCigarElement = 0; + readOffset = read.getReadLength(); + currentElement = null; + + // Reads that contain indels model the genomeOffset as the following base in the reference. Because + // we fall into this else block only when indels end the read, increment genomeOffset such that the + // current offset of this read is the next ref base after the end of the indel. This position will + // model a point on the reference somewhere after the end of the read. + genomeOffset++; // extended events need that. Logically, it's legal to advance the genomic offset here: + + // we do step forward on the ref, and by returning null we also indicate that we are past the read end. + return null; + } + } + + offsetIntoCurrentCigarElement++; + boolean done = false; + switch (currentElement.getOperator()) { + case H: // ignore hard clips + case P: // ignore pads + offsetIntoCurrentCigarElement = currentElement.getLength(); + break; + case I: // insertion w.r.t. the reference + case S: // soft clip + offsetIntoCurrentCigarElement = currentElement.getLength(); + readOffset += currentElement.getLength(); + break; + case D: // deletion w.r.t. the reference + if (readOffset < 0) // we don't want reads starting with deletion, this is a malformed cigar string + throw new UserException.MalformedBAM(read, "read starts with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar"); + // should be the same as N case + genomeOffset++; + done = true; + break; + case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning) + genomeOffset++; + done = true; + break; + case M: + case EQ: + case X: + readOffset++; + genomeOffset++; + done = true; + break; + default: + throw new IllegalStateException("Case statement didn't deal with cigar op: " + currentElement.getOperator()); + } + + if ( done ) + return currentElement.getOperator(); + } + } + + /** + * Create a new PileupElement based on the current state of this element + * + * Must not be a left or right edge + * + * @return a pileup element + */ + @Ensures("result != null") + public final PileupElement makePileupElement() { + if ( isLeftEdge() || isRightEdge() ) + throw new IllegalStateException("Cannot make a pileup element from an edge alignment state"); + return new PileupElement(read, + getReadOffset(), + getCurrentCigarElement(), + getCurrentCigarElementOffset(), + getOffsetIntoCurrentCigarElement()); + } +} + diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/LIBSDownsamplingInfo.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/LIBSDownsamplingInfo.java new file mode 100644 index 000000000..fc282163e --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/LIBSDownsamplingInfo.java @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.locusiterator; + +/** + * Simple wrapper about the information LIBS needs about downsampling + * + * User: depristo + * Date: 1/5/13 + * Time: 1:26 PM + */ +class LIBSDownsamplingInfo { + public final static LIBSDownsamplingInfo NO_DOWNSAMPLING = new LIBSDownsamplingInfo(false, -1); + + final private boolean performDownsampling; + final private int toCoverage; + + public LIBSDownsamplingInfo(boolean performDownsampling, int toCoverage) { + this.performDownsampling = performDownsampling; + this.toCoverage = toCoverage; + } + + public boolean isPerformDownsampling() { + return performDownsampling; + } + + public int getToCoverage() { + return toCoverage; + } +} diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/LIBSPerformance.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/LIBSPerformance.java new file mode 100644 index 000000000..2d074f420 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/LIBSPerformance.java @@ -0,0 +1,198 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.locusiterator; + +import net.sf.picard.reference.IndexedFastaSequenceFile; +import net.sf.samtools.SAMFileHeader; +import net.sf.samtools.SAMFileReader; +import net.sf.samtools.SAMReadGroupRecord; +import net.sf.samtools.SAMRecordIterator; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.commandline.Argument; +import org.broadinstitute.sting.commandline.CommandLineProgram; +import org.broadinstitute.sting.commandline.Input; +import org.broadinstitute.sting.gatk.ReadProperties; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.iterators.GATKSAMIterator; +import org.broadinstitute.sting.utils.*; +import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.sam.GATKSamRecordFactory; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.*; + +/** + * Caliper microbenchmark of fragment pileup + */ +public class LIBSPerformance extends CommandLineProgram { + private static Logger logger = Logger.getLogger(LIBSPerformance.class); + + @Input(fullName = "input_file", shortName = "I", doc = "SAM or BAM file(s)", required = true) + public File samFile = null; + + @Input(fullName = "reference_sequence", shortName = "R", doc = "Reference sequence file", required = true) + public File referenceFile = null; + + @Argument(fullName = "L", shortName = "L", doc = "Query location", required = false) + public String location = null; + + @Argument(fullName = "dt", shortName = "dt", doc = "Enable downsampling", required = false) + public boolean downsample = false; + + @Override + public int execute() throws IOException { + final IndexedFastaSequenceFile reference = new CachingIndexedFastaSequenceFile(referenceFile); + final GenomeLocParser genomeLocParser = new GenomeLocParser(reference); + + final SAMFileReader reader = new SAMFileReader(samFile); + reader.setSAMRecordFactory(new GATKSamRecordFactory()); + + SAMRecordIterator rawIterator; + if ( location == null ) + rawIterator = reader.iterator(); + else { + final GenomeLoc loc = genomeLocParser.parseGenomeLoc(location); + rawIterator = reader.query(loc.getContig(), loc.getStart(), loc.getStop(), false); + } + + final GATKSAMIterator iterator = new GATKSAMIterator(rawIterator); + + final Set samples = new HashSet(); + for ( final SAMReadGroupRecord rg : reader.getFileHeader().getReadGroups() ) + samples.add(rg.getSample()); + + final LIBSDownsamplingInfo ds = new LIBSDownsamplingInfo(downsample, 250); + + final LocusIteratorByState libs = + new LocusIteratorByState( + iterator, + ds, + true, + genomeLocParser, + samples, + false); + + final SimpleTimer timer = new SimpleTimer().start(); + int bp = 0; + double lastElapsed = 0; + while ( libs.hasNext() ) { + AlignmentContext context = libs.next(); + bp++; + if ( timer.getElapsedTime() - lastElapsed > 10 ) { + logger.info(bp + " iterations at " + context.getLocation()); + lastElapsed = timer.getElapsedTime(); + } + } + logger.info(String.format("runtime in seconds: %.2f", timer.getElapsedTime())); + + return 0; + } + +// private void syntheticTests() { +// final int readLength = 101; +// final int nReads = 10000; +// final int locus = 1; +// +// SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000); +// final GenomeLocParser genomeLocParser = new GenomeLocParser(header.getSequenceDictionary()); +// +// int nIterations = 0; +// for ( final String cigar : Arrays.asList("101M", "50M10I40M", "50M10D40M") ) { +// GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, readLength); +// read.setReadBases(Utils.dupBytes((byte) 'A', readLength)); +// final byte[] quals = new byte[readLength]; +// for ( int i = 0; i < readLength; i++ ) +// quals[i] = (byte)(i % QualityUtils.MAX_QUAL_SCORE); +// read.setBaseQualities(quals); +// read.setCigarString(cigar); +// +// for ( int j = 0; j < nReads; j++ ) { +// for ( int i = 0; i < rep; i++ ) { +// switch ( op ) { +// case NEW_STATE: +// { +// final AlignmentStateMachine alignmentStateMachine = new AlignmentStateMachine(read); +// while ( alignmentStateMachine.stepForwardOnGenome() != null ) { +// nIterations++; +// } +// } +// break; +//// case OLD_STATE: +//// { +//// final SAMRecordAlignmentState alignmentStateMachine = new SAMRecordAlignmentState(read); +//// while ( alignmentStateMachine.stepForwardOnGenome() != null ) { +//// alignmentStateMachine.getRead(); +//// nIterations++; +//// } +//// } +//// break; +// case NEW_LIBS: +// { +// final List reads = Collections.nCopies(30, read); +// final org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState libs = +// new org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState( +// new LocusIteratorByStateBaseTest.FakeCloseableIterator(reads.iterator()), +// LocusIteratorByStateBaseTest.createTestReadProperties(), +// genomeLocParser, +// LocusIteratorByState.sampleListForSAMWithoutReadGroups()); +// +// while ( libs.hasNext() ) { +// AlignmentContext context = libs.next(); +// } +// } +// } +// } +// } +// } +// +// System.out.printf("iterations %d%n", nIterations); +// } + + /** + * Required main method implementation. + * @param argv Command-line argument text. + * @throws Exception on error. + */ + public static void main(String[] argv) throws Exception { + int returnCode = 0; + try { + LIBSPerformance instance = new LIBSPerformance(); + start(instance, argv); + returnCode = 0; + } catch(Exception ex) { + returnCode = 1; + ex.printStackTrace(); + throw ex; + } finally { + System.exit(returnCode); + } + } + +} diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/LocusIterator.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/LocusIterator.java new file mode 100644 index 000000000..f830dcb30 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/LocusIterator.java @@ -0,0 +1,36 @@ +package org.broadinstitute.sting.utils.locusiterator; + +import net.sf.samtools.util.CloseableIterator; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; + +import java.util.Iterator; + +/** + * Iterator that traverses a SAM File, accumulating information on a per-locus basis + */ +public abstract class LocusIterator implements Iterable, CloseableIterator { + // ----------------------------------------------------------------------------------------------------------------- + // + // constructors and other basic operations + // + // ----------------------------------------------------------------------------------------------------------------- + public Iterator iterator() { + return this; + } + + public void close() { + //this.it.close(); + } + + public abstract boolean hasNext(); + public abstract AlignmentContext next(); + + // TODO -- remove me when ART testing is done + public LocusIteratorByState getLIBS() { + return null; + } + + public void remove() { + throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!"); + } +} diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/LocusIteratorByState.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/LocusIteratorByState.java new file mode 100644 index 000000000..e7b75f1f2 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/LocusIteratorByState.java @@ -0,0 +1,393 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.locusiterator; + +import com.google.java.contract.Ensures; +import com.google.java.contract.Requires; +import net.sf.samtools.CigarOperator; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.gatk.ReadProperties; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.pileup.*; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.sam.ReadUtils; + +import java.util.*; + +/** + * Iterator that traverses a SAM File, accumulating information on a per-locus basis + * + * Produces AlignmentContext objects, that contain ReadBackedPileups of PileupElements. This + * class has its core job of converting an iterator of ordered SAMRecords into those + * RBPs. + * + * There are a few constraints on required and ensured by LIBS: + * + * -- Requires the Iterator to returns reads in coordinate sorted order, consistent with the ordering + * defined by the SAM file format. That that for performance reasons this constraint isn't actually enforced. + * The behavior of LIBS is undefined in the case where the reads are badly ordered. + * -- The reads in the ReadBackedPileup are themselves in the order of appearance of the reads from the iterator. + * That is, the pileup is ordered in a way consistent with the SAM coordinate ordering + * -- Only aligned reads with at least one on-genomic cigar operator are passed on in the pileups. That is, + * unmapped reads or reads that are all insertions (10I) or soft clipped (10S) are not passed on. + * -- LIBS can perform per-sample downsampling of a variety of kinds. + * -- Because of downsampling there's no guarantee that: + * -- A read that could be aligned to a position will actually occur in the pileup (downsampled away) + * -- A read that appears in a previous pileup that could align to a future position will actually occur + * in that pileup. That is, a read might show up at position i but be downsampled away in the pileup at j + * -- LIBS can optionally capture all of the reads that come off the iterator, before any leveling downsampling + * occurs, if requested. This allows users of LIBS to see both a ReadBackedPileup view of the data as well as + * a stream of unique, sorted reads + */ +public final class LocusIteratorByState extends LocusIterator { + /** + * our log, which we want to capture anything from this class + */ + private final static Logger logger = Logger.getLogger(LocusIteratorByState.class); + + // ----------------------------------------------------------------------------------------------------------------- + // + // member fields + // + // ----------------------------------------------------------------------------------------------------------------- + + /** + * Used to create new GenomeLocs as needed + */ + private final GenomeLocParser genomeLocParser; + + /** + * A complete list of all samples that may come out of the reads. Must be + * comprehensive. + */ + private final ArrayList samples; + + /** + * The system that maps incoming reads from the iterator to their pileup states + */ + private final ReadStateManager readStates; + + /** + * Should we include reads in the pileup which are aligned with a deletion operator to the reference? + */ + private final boolean includeReadsWithDeletionAtLoci; + + /** + * The next alignment context. A non-null value means that a + * context is waiting from hasNext() for sending off to the next next() call. A null + * value means that either hasNext() has not been called at all or that + * the underlying iterator is exhausted + */ + private AlignmentContext nextAlignmentContext; + + // ----------------------------------------------------------------------------------------------------------------- + // + // constructors and other basic operations + // + // ----------------------------------------------------------------------------------------------------------------- + + /** + * Create a new LocusIteratorByState + * + * @param samIterator the iterator of reads to process into pileups. Reads must be ordered + * according to standard coordinate-sorted BAM conventions + * @param readInformation meta-information about how to process the reads (i.e., should we do downsampling?) + * @param genomeLocParser used to create genome locs + * @param samples a complete list of samples present in the read groups for the reads coming from samIterator. + * This is generally just the set of read group sample fields in the SAMFileHeader. This + * list of samples may contain a null element, and all reads without read groups will + * be mapped to this null sample + */ + public LocusIteratorByState(final Iterator samIterator, + final ReadProperties readInformation, + final GenomeLocParser genomeLocParser, + final Collection samples) { + this(samIterator, + toDownsamplingInfo(readInformation), + readInformation.includeReadsWithDeletionAtLoci(), + genomeLocParser, + samples, + readInformation.keepUniqueReadListInLIBS()); + } + + /** + * Create a new LocusIteratorByState + * + * @param samIterator the iterator of reads to process into pileups. Reads must be ordered + * according to standard coordinate-sorted BAM conventions + * @param downsamplingInfo meta-information about how to downsampling the reads + * @param genomeLocParser used to create genome locs + * @param samples a complete list of samples present in the read groups for the reads coming from samIterator. + * This is generally just the set of read group sample fields in the SAMFileHeader. This + * list of samples may contain a null element, and all reads without read groups will + * be mapped to this null sample + * @param maintainUniqueReadsList if true, we will keep the unique reads from off the samIterator and make them + * available via the transferReadsFromAllPreviousPileups interface + */ protected LocusIteratorByState(final Iterator samIterator, + final LIBSDownsamplingInfo downsamplingInfo, + final boolean includeReadsWithDeletionAtLoci, + final GenomeLocParser genomeLocParser, + final Collection samples, + final boolean maintainUniqueReadsList) { + if ( samIterator == null ) throw new IllegalArgumentException("samIterator cannot be null"); + if ( downsamplingInfo == null ) throw new IllegalArgumentException("downsamplingInfo cannot be null"); + if ( genomeLocParser == null ) throw new IllegalArgumentException("genomeLocParser cannot be null"); + if ( samples == null ) throw new IllegalArgumentException("Samples cannot be null"); + + // currently the GATK expects this LocusIteratorByState to accept empty sample lists, when + // there's no read data. So we need to throw this error only when samIterator.hasNext() is true + if (samples.isEmpty() && samIterator.hasNext()) { + throw new IllegalArgumentException("samples list must not be empty"); + } + + this.genomeLocParser = genomeLocParser; + this.includeReadsWithDeletionAtLoci = includeReadsWithDeletionAtLoci; + this.samples = new ArrayList(samples); + this.readStates = new ReadStateManager(samIterator, this.samples, downsamplingInfo, maintainUniqueReadsList); + } + + @Override + public Iterator iterator() { + return this; + } + + /** + * Get the current location (i.e., the bp of the center of the pileup) of the pileup, or null if not anywhere yet + * + * Assumes that read states is updated to reflect the current pileup position, but not advanced to the + * next location. + * + * @return the location of the current pileup, or null if we're after all reads + */ + private GenomeLoc getLocation() { + return readStates.isEmpty() ? null : readStates.getFirst().getLocation(genomeLocParser); + } + + // ----------------------------------------------------------------------------------------------------------------- + // + // next() routine and associated collection operations + // + // ----------------------------------------------------------------------------------------------------------------- + + /** + * Is there another pileup available? + * @return + */ + @Override + public boolean hasNext() { + lazyLoadNextAlignmentContext(); + return nextAlignmentContext != null; + } + + /** + * Get the next AlignmentContext available from the reads. + * + * @return a non-null AlignmentContext of the pileup after to the next genomic position covered by + * at least one read. + */ + @Override + public AlignmentContext next() { + lazyLoadNextAlignmentContext(); + if (!hasNext()) + throw new NoSuchElementException("LocusIteratorByState: out of elements."); + AlignmentContext currentAlignmentContext = nextAlignmentContext; + nextAlignmentContext = null; + return currentAlignmentContext; + } + + /** + * Creates the next alignment context from the given state. Note that this is implemented as a + * lazy load method. nextAlignmentContext MUST BE null in order for this method to advance to the + * next entry. + */ + private void lazyLoadNextAlignmentContext() { + while (nextAlignmentContext == null && readStates.hasNext()) { + readStates.collectPendingReads(); + + final GenomeLoc location = getLocation(); + final Map fullPileup = new HashMap(); + + for (final Map.Entry sampleStatePair : readStates ) { + final String sample = sampleStatePair.getKey(); + final PerSampleReadStateManager readState = sampleStatePair.getValue(); + final Iterator iterator = readState.iterator(); + final List pile = new ArrayList(readState.size()); + + while (iterator.hasNext()) { + // state object with the read/offset information + final AlignmentStateMachine state = iterator.next(); + final GATKSAMRecord read = state.getRead(); + final CigarOperator op = state.getCigarOperator(); + + if (op == CigarOperator.N) // N's are never added to any pileup + continue; + + if (!dontIncludeReadInPileup(read, location.getStart())) { + if ( ! includeReadsWithDeletionAtLoci && op == CigarOperator.D ) { + continue; + } + + pile.add(state.makePileupElement()); + } + } + + if (! pile.isEmpty() ) // if this pileup added at least one base, add it to the full pileup + fullPileup.put(sample, new ReadBackedPileupImpl(location, pile)); + } + + readStates.updateReadStates(); // critical - must be called after we get the current state offsets and location + if (!fullPileup.isEmpty()) // if we got reads with non-D/N over the current position, we are done + nextAlignmentContext = new AlignmentContext(location, new ReadBackedPileupImpl(location, fullPileup), false); + } + } + + // ----------------------------------------------------------------------------------------------------------------- + // + // getting the list of reads + // + // ----------------------------------------------------------------------------------------------------------------- + + /** + * Transfer current list of all unique reads that have ever been used in any pileup, clearing old list + * + * This list is guaranteed to only contain unique reads, even across calls to the this function. It is + * literally the unique set of reads ever seen. + * + * The list occurs in the same order as they are encountered in the underlying iterator. + * + * Takes the maintained list of submitted reads, and transfers it to the caller of this + * function. The old list of set to a new, cleanly allocated list so the caller officially + * owns the list returned by this call. This is the only way to clear the tracking + * of submitted reads, if enabled. + * + * The purpose of this function is allow users of LIBS to keep track of all of the reads pulled off the + * underlying GATKSAMRecord iterator and that appeared at any point in the list of SAMRecordAlignmentState for + * any reads. This function is intended to allow users to efficiently reconstruct the unique set of reads + * used across all pileups. This is necessary for LIBS to handle because attempting to do + * so from the pileups coming out of LIBS is extremely expensive. + * + * This functionality is only available if LIBS was created with the argument to track the reads + * + * @throws UnsupportedOperationException if called when keepingSubmittedReads is false + * + * @return the current list + */ + @Ensures("result != null") + public List transferReadsFromAllPreviousPileups() { + return readStates.transferSubmittedReads(); + } + + /** + * Get the underlying list of tracked reads. For testing only + * @return a non-null list + */ + @Ensures("result != null") + protected List getReadsFromAllPreviousPileups() { + return readStates.getSubmittedReads(); + } + + // ----------------------------------------------------------------------------------------------------------------- + // + // utility functions + // + // ----------------------------------------------------------------------------------------------------------------- + + /** + * Should this read be excluded from the pileup? + * + * Generic place to put per-base filters appropriate to LocusIteratorByState + * + * @param rec the read to potentially exclude + * @param pos the genomic position of the current alignment + * @return true if the read should be excluded from the pileup, false otherwise + */ + @Requires({"rec != null", "pos > 0"}) + private boolean dontIncludeReadInPileup(GATKSAMRecord rec, long pos) { + return ReadUtils.isBaseInsideAdaptor(rec, pos); + } + + /** + * Create a LIBSDownsamplingInfo object from the requested info in ReadProperties + * + * LIBS will invoke the Reservoir and Leveling downsamplers on the read stream if we're + * downsampling to coverage by sample. SAMDataSource will have refrained from applying + * any downsamplers to the read stream in this case, in the expectation that LIBS will + * manage the downsampling. The reason for this is twofold: performance (don't have to + * split/re-assemble the read stream in SAMDataSource), and to enable partial downsampling + * of reads (eg., using half of a read, and throwing the rest away). + * + * @param readInfo GATK engine information about what should be done to the reads + * @return a LIBS specific info holder about downsampling only + */ + @Requires("readInfo != null") + @Ensures("result != null") + private static LIBSDownsamplingInfo toDownsamplingInfo(final ReadProperties readInfo) { + final boolean performDownsampling = readInfo.getDownsamplingMethod() != null && + readInfo.getDownsamplingMethod().type == DownsampleType.BY_SAMPLE && + readInfo.getDownsamplingMethod().toCoverage != null; + final int coverage = performDownsampling ? readInfo.getDownsamplingMethod().toCoverage : 0; + + return new LIBSDownsamplingInfo(performDownsampling, coverage); + } + + /** + * Create a pileup element for read at offset + * + * offset must correspond to a valid read offset given the read's cigar, or an IllegalStateException will be throw + * + * @param read a read + * @param offset the offset into the bases we'd like to use in the pileup + * @return a valid PileupElement with read and at offset + */ + @Ensures("result != null") + public static PileupElement createPileupForReadAndOffset(final GATKSAMRecord read, final int offset) { + if ( read == null ) throw new IllegalArgumentException("read cannot be null"); + if ( offset < 0 || offset >= read.getReadLength() ) throw new IllegalArgumentException("Invalid offset " + offset + " outside of bounds 0 and " + read.getReadLength()); + + final AlignmentStateMachine stateMachine = new AlignmentStateMachine(read); + + while ( stateMachine.stepForwardOnGenome() != null ) { + if ( stateMachine.getReadOffset() == offset ) + return stateMachine.makePileupElement(); + } + + throw new IllegalStateException("Tried to create a pileup for read " + read + " with offset " + offset + + " but we never saw such an offset in the alignment state machine"); + } + + /** + * For testing only. Assumes that the incoming SAMRecords have no read groups, so creates a dummy sample list + * for the system. + */ + public static List sampleListForSAMWithoutReadGroups() { + List samples = new ArrayList(); + samples.add(null); + return samples; + } +} \ No newline at end of file diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java new file mode 100644 index 000000000..3f3bc706f --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java @@ -0,0 +1,261 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.locusiterator; + +import com.google.java.contract.Ensures; +import com.google.java.contract.Invariant; +import com.google.java.contract.Requires; +import net.sf.samtools.CigarOperator; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.gatk.downsampling.Downsampler; +import org.broadinstitute.sting.gatk.downsampling.LevelingDownsampler; + +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; + +/** + * ReadStateManager for a single sample + * + * User: depristo + * Date: 1/13/13 + * Time: 12:28 PM + */ +@Invariant({ + "readStartsAreWellOrdered()", + "! isDownsampling() || downsamplingTarget > 0", + "nSites >= 0", + "nSitesNeedingDownsampling >= 0", + "nSitesNeedingDownsampling <= nSites" +}) +final class PerSampleReadStateManager implements Iterable { + private final static Logger logger = Logger.getLogger(ReadStateManager.class); + private final static boolean CAPTURE_DOWNSAMPLING_STATS = false; + + /** + * A list (potentially empty) of alignment state machines. + * + * The state machines must be ordered by the alignment start of their underlying reads, with the + * lowest alignment starts on the left, and the largest on the right + */ + private LinkedList readStatesByAlignmentStart = new LinkedList(); + + private final Downsampler> levelingDownsampler; + private final int downsamplingTarget; + + /** + * The number of sites where downsampling has been invoked + */ + private int nSitesNeedingDownsampling = 0; + + /** + * The number of sites we've visited + */ + private int nSites = 0; + + /** + * Create a new PerSampleReadStateManager with downsampling parameters as requested by LIBSDownsamplingInfo + * @param LIBSDownsamplingInfo the downsampling params we want to use + */ + public PerSampleReadStateManager(final LIBSDownsamplingInfo LIBSDownsamplingInfo) { + this.downsamplingTarget = LIBSDownsamplingInfo.isPerformDownsampling() ? LIBSDownsamplingInfo.getToCoverage() : -1; + this.levelingDownsampler = LIBSDownsamplingInfo.isPerformDownsampling() + ? new LevelingDownsampler, AlignmentStateMachine>(LIBSDownsamplingInfo.getToCoverage()) + : null; + } + + /** + * Group the underlying readStatesByAlignmentStart into a list of list of alignment state machines, + * where each list contains machines with a unique genome site. The outer list is ordered + * by alignment start. + * + * For example, if the flat list has alignment starts [10, 10, 11, 12, 12, 13] then + * the resulting grouping will be [[10, 10], [11], [12, 12], [13]]. + * + * @return a non-null list of lists + */ + @Ensures("result != null") + private List> groupByAlignmentStart() { + final LinkedList> grouped = new LinkedList>(); + + AlignmentStateMachine last = null; + for ( final AlignmentStateMachine stateMachine : readStatesByAlignmentStart ) { + if ( last == null || stateMachine.getGenomeOffset() != last.getGenomeOffset() ) { + // we've advanced to a place where the state machine has a different state, + // so start a new list + grouped.add(new LinkedList()); + last = stateMachine; + } + grouped.getLast().add(stateMachine); + } + + return grouped; + } + + /** + * Flattens the grouped list of list of alignment state machines into a single list in order + * @return a non-null list contains the state machines + */ + @Ensures("result != null") + private LinkedList flattenByAlignmentStart(final List> grouped) { + final LinkedList flat = new LinkedList(); + for ( final List l : grouped ) + flat.addAll(l); + return flat; + } + + /** + * Test that the reads are ordered by their alignment starts + * @return true if well ordered, false otherwise + */ + private boolean readStartsAreWellOrdered() { + int lastStart = -1; + for ( final AlignmentStateMachine machine : readStatesByAlignmentStart ) { + if ( lastStart > machine.getRead().getAlignmentStart() ) + return false; + lastStart = machine.getRead().getAlignmentStart(); + } + return true; + } + + /** + * Assumes it can just keep the states linked lists without making a copy + * @param states the new states to add to this manager + * @return The change in the number of states, after including states and potentially downsampling. Note + * that this return result might be negative, if downsampling is enabled, as we might drop + * more sites than have been added by the downsampler + */ + @Requires("states != null") + public int addStatesAtNextAlignmentStart(final LinkedList states) { + if ( states.isEmpty() ) { + return 0; + } + + readStatesByAlignmentStart.addAll(states); + int nStatesAdded = states.size(); + + if ( isDownsampling() && readStatesByAlignmentStart.size() > downsamplingTarget ) { + // only go into the downsampling branch if we are downsampling and the coverage > the target + captureDownsamplingStats(); + levelingDownsampler.submit(groupByAlignmentStart()); + levelingDownsampler.signalEndOfInput(); + + nStatesAdded -= levelingDownsampler.getNumberOfDiscardedItems(); + + // use returned List directly rather than make a copy, for efficiency's sake + readStatesByAlignmentStart = flattenByAlignmentStart(levelingDownsampler.consumeFinalizedItems()); + levelingDownsampler.reset(); + } + + return nStatesAdded; + } + + /** + * Is downsampling enabled for this manager? + * @return true if we are downsampling, false otherwise + */ + private boolean isDownsampling() { + return levelingDownsampler != null; + } + + /** + * Get the leftmost alignment state machine, or null if the read states is empty + * @return a potentially null AlignmentStateMachine + */ + public AlignmentStateMachine getFirst() { + return isEmpty() ? null : readStatesByAlignmentStart.getFirst(); + } + + /** + * Capture some statistics about the behavior of the downsampling, but only if CAPTURE_DOWNSAMPLING_STATS is true + */ + @Requires("isDownsampling()") + private void captureDownsamplingStats() { + if ( CAPTURE_DOWNSAMPLING_STATS ) { + nSites++; + final int loc = getFirst().getGenomePosition(); + String message = "Pass through"; + final boolean downsampling = size() > downsamplingTarget; + if ( downsampling ) { + nSitesNeedingDownsampling++; + message = "Downsampling"; + } + + if ( downsampling || nSites % 10000 == 0 ) + logger.info(String.format("%20s at %s: coverage=%d, max=%d, fraction of downsampled sites=%.2e", + message, loc, size(), downsamplingTarget, (1.0 * nSitesNeedingDownsampling / nSites))); + } + } + + /** + * Is there at least one alignment for this sample in this manager? + * @return true if there's at least one alignment, false otherwise + */ + public boolean isEmpty() { + return readStatesByAlignmentStart.isEmpty(); + } + + /** + * Get the number of read states currently in this manager + * @return the number of read states + */ + @Ensures("result >= 0") + public int size() { + return readStatesByAlignmentStart.size(); + } + + /** + * Advances all read states forward by one element, removing states that are + * no long aligned to the current position. + * @return the number of states we're removed after advancing + */ + public int updateReadStates() { + int nRemoved = 0; + final Iterator it = iterator(); + while (it.hasNext()) { + final AlignmentStateMachine state = it.next(); + final CigarOperator op = state.stepForwardOnGenome(); + if (op == null) { + // we discard the read only when we are past its end AND indel at the end of the read (if any) was + // already processed. Keeping the read state that returned null upon stepForwardOnGenome() is safe + // as the next call to stepForwardOnGenome() will return null again AND will clear hadIndel() flag. + it.remove(); // we've stepped off the end of the object + nRemoved++; + } + } + + return nRemoved; + } + + /** + * Iterate over the AlignmentStateMachine in this manager in alignment start order. + * @return a valid iterator + */ + @Ensures("result != null") + public Iterator iterator() { + return readStatesByAlignmentStart.iterator(); + } +} diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/ReadStateManager.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/ReadStateManager.java new file mode 100644 index 000000000..09ec3b264 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/ReadStateManager.java @@ -0,0 +1,290 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.locusiterator; + +import com.google.java.contract.Ensures; +import com.google.java.contract.Requires; +import net.sf.picard.util.PeekableIterator; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; + +import java.util.*; + +/** + * Manages and updates mapping from sample -> List of SAMRecordAlignmentState + * + * Optionally can keep track of all of the reads pulled off the iterator and + * that appeared at any point in the list of SAMRecordAlignmentState for any reads. + * This functionaly is only possible at this stage, as this object does the popping of + * reads off the underlying source iterator, and presents only a pileup-like interface + * of samples -> SAMRecordAlignmentStates. Reconstructing the unique set of reads + * used across all pileups is extremely expensive from that data structure. + * + * User: depristo + * Date: 1/5/13 + * Time: 2:02 PM + */ +final class ReadStateManager implements Iterable> { + private final List samples; + private final PeekableIterator iterator; + private final SamplePartitioner samplePartitioner; + + /** + * A mapping from sample name -> the per sample read state manager that manages + * + * IT IS CRITICAL THAT THIS BE A LINKED HASH MAP, SO THAT THE ITERATION OF THE MAP OCCURS IN THE SAME + * ORDER AS THE ORIGINL SAMPLES + */ + private final Map readStatesBySample = new LinkedHashMap(); + + private LinkedList submittedReads; + private final boolean keepSubmittedReads; + + private int totalReadStates = 0; + + public ReadStateManager(final Iterator source, + final List samples, + final LIBSDownsamplingInfo LIBSDownsamplingInfo, + final boolean keepSubmittedReads) { + this.samples = samples; + this.iterator = new PeekableIterator(source); + + this.keepSubmittedReads = keepSubmittedReads; + this.submittedReads = new LinkedList(); + + for (final String sample : samples) { + // because this is a linked hash map the order of iteration will be in sample order + readStatesBySample.put(sample, new PerSampleReadStateManager(LIBSDownsamplingInfo)); + } + + samplePartitioner = new SamplePartitioner(LIBSDownsamplingInfo, samples); + } + + /** + * Returns a iterator over all the sample -> per-sample read state managers with each sample in this read state manager. + * + * The order of iteration is the same as the order of the samples provided upon construction to this + * ReadStateManager. + * + * @return Iterator over sample + per sample read state manager pairs for this read state manager. + */ + @Override + public Iterator> iterator() { + return readStatesBySample.entrySet().iterator(); + } + + public boolean isEmpty() { + return totalReadStates == 0; + } + + /** + * Retrieves the total number of reads in the manager across all samples. + * + * @return Total number of reads over all samples. + */ + public int size() { + return totalReadStates; + } + + /** + * Retrieves the total number of reads in the manager in the given sample. + * + * @param sample The sample. + * @return Total number of reads in the given sample. + */ + public int size(final String sample) { + return readStatesBySample.get(sample).size(); + } + + public AlignmentStateMachine getFirst() { + for ( final PerSampleReadStateManager manager : readStatesBySample.values() ) { + if ( ! manager.isEmpty() ) + return manager.getFirst(); + } + return null; + } + + public boolean hasNext() { + return totalReadStates > 0 || iterator.hasNext(); + } + + /** + * Advances all fo the read states by one bp. After this call the read states are reflective + * of the next pileup. + */ + public void updateReadStates() { + for (final PerSampleReadStateManager perSampleReadStateManager : readStatesBySample.values() ) { + totalReadStates -= perSampleReadStateManager.updateReadStates(); + } + } + + /** + * Does read start at the same position as described by currentContextIndex and currentAlignmentStart? + * + * @param read the read we want to test + * @param currentContigIndex the contig index (from the read's getReferenceIndex) of the reads in this state manager + * @param currentAlignmentStart the alignment start of the of the left-most position on the + * genome of the reads in this read state manager + * @return true if read has contig index and start equal to the current ones + */ + private boolean readStartsAtCurrentPosition(final GATKSAMRecord read, final int currentContigIndex, final int currentAlignmentStart) { + return read.getAlignmentStart() == currentAlignmentStart && read.getReferenceIndex() == currentContigIndex; + } + + /** + * Pull all of the reads off the iterator that overlap the left-most position among all + * reads this ReadStateManager + */ + public void collectPendingReads() { + if (!iterator.hasNext()) + return; + + // determine the left-most boundary that determines which reads to keep in this new pileup + final int firstContigIndex; + final int firstAlignmentStart; + if ( isEmpty() ) { + // there are no reads here, so our next state is the next read in the stream + firstContigIndex = iterator.peek().getReferenceIndex(); + firstAlignmentStart = iterator.peek().getAlignmentStart(); + } else { + // there's a read in the system, so it's our targeted first read + final AlignmentStateMachine firstState = getFirst(); + firstContigIndex = firstState.getReferenceIndex(); + // note this isn't the alignment start of the read, but rather the alignment start position + firstAlignmentStart = firstState.getGenomePosition(); + } + + while ( iterator.hasNext() && readStartsAtCurrentPosition(iterator.peek(), firstContigIndex, firstAlignmentStart) ) { + submitRead(iterator.next()); + } + + samplePartitioner.doneSubmittingReads(); + + for (final String sample : samples) { + final Collection newReads = samplePartitioner.getReadsForSample(sample); + + // if we're keeping reads, take the (potentially downsampled) list of new reads for this sample + // and add to the list of reads. Note this may reorder the list of reads someone (it groups them + // by sample, but it cannot change their absolute position on the genome as they all must + // start at the current location + if ( keepSubmittedReads ) + submittedReads.addAll(newReads); + + final PerSampleReadStateManager statesBySample = readStatesBySample.get(sample); + addReadsToSample(statesBySample, newReads); + } + + samplePartitioner.reset(); + } + + /** + * Add a read to the sample partitioner, potentially adding it to all submitted reads, if appropriate + * @param read a non-null read + */ + @Requires("read != null") + protected void submitRead(final GATKSAMRecord read) { + samplePartitioner.submitRead(read); + } + + /** + * Transfer current list of submitted reads, clearing old list + * + * Takes the maintained list of submitted reads, and transfers it to the caller of this + * function. The old list of set to a new, cleanly allocated list so the caller officially + * owns the list returned by this call. This is the only way to clear the tracking + * of submitted reads, if enabled. + * + * How to use this function: + * + * while ( doing some work unit, such as creating pileup at some locus ): + * interact with ReadStateManager in some way to make work unit + * readsUsedInPileup = transferSubmittedReads) + * + * @throws UnsupportedOperationException if called when keepSubmittedReads is false + * + * @return the current list of submitted reads + */ + @Ensures({ + "result != null", + "result != submittedReads" // result and previous submitted reads are not == objects + }) + public List transferSubmittedReads() { + if ( ! keepSubmittedReads ) throw new UnsupportedOperationException("cannot transferSubmittedReads if you aren't keeping them"); + + final List prevSubmittedReads = submittedReads; + this.submittedReads = new LinkedList(); + + return prevSubmittedReads; + } + + /** + * Are we keeping submitted reads, or not? + * @return true if we are keeping them, false otherwise + */ + public boolean isKeepingSubmittedReads() { + return keepSubmittedReads; + } + + /** + * Obtain a pointer to the list of submitted reads. + * + * This is not a copy of the list; it is shared with this ReadStateManager. It should + * not be modified. Updates to this ReadStateManager may change the contains of the + * list entirely. + * + * For testing purposes only. + * + * Will always be empty if we are are not keepSubmittedReads + * + * @return a non-null list of reads that have been submitted to this ReadStateManager + */ + @Ensures({"result != null","keepSubmittedReads || result.isEmpty()"}) + protected List getSubmittedReads() { + return submittedReads; + } + + /** + * Add reads with the given sample name to the given hanger entry. + * + * @param readStates The list of read states to add this collection of reads. + * @param reads Reads to add. Selected reads will be pulled from this source. + */ + private void addReadsToSample(final PerSampleReadStateManager readStates, final Collection reads) { + if (reads.isEmpty()) + return; + + final LinkedList newReadStates = new LinkedList(); + + for (final GATKSAMRecord read : reads) { + final AlignmentStateMachine state = new AlignmentStateMachine(read); + if ( state.stepForwardOnGenome() != null ) // todo -- should be an assertion not a skip + // explicitly filter out reads that are all insertions / soft clips + newReadStates.add(state); + } + + totalReadStates += readStates.addStatesAtNextAlignmentStart(newReadStates); + } +} diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java new file mode 100644 index 000000000..9bb474e4d --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java @@ -0,0 +1,172 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.locusiterator; + +import com.google.java.contract.Ensures; +import com.google.java.contract.Requires; +import net.sf.samtools.SAMRecord; +import org.broadinstitute.sting.gatk.downsampling.Downsampler; +import org.broadinstitute.sting.gatk.downsampling.PassThroughDownsampler; +import org.broadinstitute.sting.gatk.downsampling.ReservoirDownsampler; + +import java.util.*; + +/** + * Divides reads by sample and (if requested) does a preliminary downsampling pass + * with a ReservoirDownsampler. + * + * Note: stores reads by sample ID string, not by sample object + */ +class SamplePartitioner { + /** + * Map from sample name (as a string) to a downsampler of reads for that sample + */ + final private Map> readsBySample; + + /** + * Are we in a state where we're done submitting reads and have semi-finalized the + * underlying per sample downsampler? + */ + boolean doneSubmittingReads = false; + + /** + * Create a new SamplePartitioner capable of splitting reads up into buckets of reads for + * each sample in samples, and perform a preliminary downsampling of these reads + * (separately for each sample) if downsampling is requested in LIBSDownsamplingInfo + * + * Note that samples must be comprehensive, in that all reads every submitted to this + * partitioner must come from one of the samples provided here. If not, submitRead + * will throw an exception. Duplicates in the list of samples will be ignored + * + * @param LIBSDownsamplingInfo do we want to downsample, and if so to what coverage? + * @param samples the complete list of samples we're going to partition reads into. Can be + * empty, but in that case this code cannot function properly if you + * attempt to add data to it. + */ + @Ensures({ + "readsBySample != null", + "readsBySample.size() == new HashSet(samples).size()" + }) + public SamplePartitioner(final LIBSDownsamplingInfo LIBSDownsamplingInfo, final List samples) { + if ( LIBSDownsamplingInfo == null ) throw new IllegalArgumentException("LIBSDownsamplingInfo cannot be null"); + if ( samples == null ) throw new IllegalArgumentException("samples must be a non-null list"); + + readsBySample = new LinkedHashMap>(samples.size()); + for ( final String sample : samples ) { + readsBySample.put(sample, createDownsampler(LIBSDownsamplingInfo)); + } + } + + /** + * Create a new, ready to use downsampler based on the parameters in LIBSDownsamplingInfo + * @param LIBSDownsamplingInfo the parameters to use in creating the downsampler + * @return a downsampler appropriate for LIBSDownsamplingInfo. If no downsampling is requested, + * uses the PassThroughDownsampler, which does nothing at all. + */ + @Requires("LIBSDownsamplingInfo != null") + @Ensures("result != null") + private Downsampler createDownsampler(final LIBSDownsamplingInfo LIBSDownsamplingInfo) { + return LIBSDownsamplingInfo.isPerformDownsampling() + ? new ReservoirDownsampler(LIBSDownsamplingInfo.getToCoverage(), true) + : new PassThroughDownsampler(); + } + + /** + * Offer this read to the partitioner, putting it into the bucket of reads for the sample + * of read (obtained via the read's read group). + * + * If the read group is missing, uses the special "null" read group + * + * @throws IllegalStateException if the sample of read wasn't present in the original + * set of samples provided to this SamplePartitioner at construction + * + * @param read the read to add to the sample's list of reads + */ + @Requires("read != null") + @Ensures("doneSubmittingReads == false") + public void submitRead(final T read) { + final String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null; + final Downsampler downsampler = readsBySample.get(sampleName); + if ( downsampler == null ) + throw new IllegalStateException("Offered read with sample name " + sampleName + " to SamplePartitioner " + + "but this sample wasn't provided as one of possible samples at construction"); + + downsampler.submit(read); + doneSubmittingReads = false; + } + + /** + * Tell this partitioner that all reads in this cycle have been submitted, so that we + * can finalize whatever downsampling is required by each sample. + * + * Note that we *must* call this function before getReadsForSample, or else that + * function will exception out. + */ + @Ensures("doneSubmittingReads == true") + public void doneSubmittingReads() { + for ( final Downsampler downsampler : readsBySample.values() ) { + downsampler.signalEndOfInput(); + } + doneSubmittingReads = true; + } + + /** + * Get the final collection of reads for this sample for this cycle + * + * The cycle is defined as all of the reads that occur between + * the first call to submitRead until doneSubmittingReads is called. At that + * point additional downsampling may occur (depending on construction arguments) + * and that set of reads is returned here. + * + * Note that this function can only be called once per cycle, as underlying + * collection of reads is cleared. + * + * @param sampleName the sample we want reads for, must be present in the original samples + * @return a non-null collection of reads for sample in this cycle + */ + @Ensures("result != null") + public Collection getReadsForSample(final String sampleName) { + if ( ! doneSubmittingReads ) throw new IllegalStateException("getReadsForSample called before doneSubmittingReads was called"); + + final Downsampler downsampler = readsBySample.get(sampleName); + if ( downsampler == null ) throw new NoSuchElementException("Sample name not found"); + + return downsampler.consumeFinalizedItems(); + } + + /** + * Resets this SamplePartitioner, indicating that we're starting a new + * cycle of adding reads to each underlying downsampler. + */ + @Ensures("doneSubmittingReads == false") + public void reset() { + for ( final Downsampler downsampler : readsBySample.values() ) { + downsampler.clear(); + downsampler.reset(); + } + doneSubmittingReads = false; + } +} diff --git a/public/java/src/org/broadinstitute/sting/utils/pileup/AbstractReadBackedPileup.java b/public/java/src/org/broadinstitute/sting/utils/pileup/AbstractReadBackedPileup.java deleted file mode 100644 index 3687732ec..000000000 --- a/public/java/src/org/broadinstitute/sting/utils/pileup/AbstractReadBackedPileup.java +++ /dev/null @@ -1,1065 +0,0 @@ -/* -* Copyright (c) 2012 The Broad Institute -* -* Permission is hereby granted, free of charge, to any person -* obtaining a copy of this software and associated documentation -* files (the "Software"), to deal in the Software without -* restriction, including without limitation the rights to use, -* copy, modify, merge, publish, distribute, sublicense, and/or sell -* copies of the Software, and to permit persons to whom the -* Software is furnished to do so, subject to the following -* conditions: -* -* The above copyright notice and this permission notice shall be -* included in all copies or substantial portions of the Software. -* -* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR -* THE USE OR OTHER DEALINGS IN THE SOFTWARE. -*/ - -package org.broadinstitute.sting.utils.pileup; - -import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; -import org.broadinstitute.variant.utils.BaseUtils; -import org.broadinstitute.sting.utils.GenomeLoc; -import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; -import org.broadinstitute.sting.utils.fragments.FragmentCollection; -import org.broadinstitute.sting.utils.fragments.FragmentUtils; -import org.broadinstitute.sting.utils.sam.GATKSAMRecord; - -import java.util.*; - -/** - * A generic implementation of read-backed pileups. - * - * @author mhanna - * @version 0.1 - */ -public abstract class AbstractReadBackedPileup, PE extends PileupElement> implements ReadBackedPileup { - protected final GenomeLoc loc; - protected final PileupElementTracker pileupElementTracker; - - protected int size = 0; // cached value of the size of the pileup - protected int abstractSize = -1; // cached value of the abstract size of the pileup - protected int nDeletions = 0; // cached value of the number of deletions - protected int nMQ0Reads = 0; // cached value of the number of MQ0 reads - - /** - * Create a new version of a read backed pileup at loc, using the reads and their corresponding - * offsets. This pileup will contain a list, in order of the reads, of the piled bases at - * reads[i] for all i in offsets. Does not make a copy of the data, so it's not safe to - * go changing the reads. - * - * @param loc The genome loc to associate reads wotj - * @param reads - * @param offsets - */ - public AbstractReadBackedPileup(GenomeLoc loc, List reads, List offsets) { - this.loc = loc; - this.pileupElementTracker = readsOffsets2Pileup(reads, offsets); - } - - - /** - * Create a new version of a read backed pileup at loc without any aligned reads - */ - public AbstractReadBackedPileup(GenomeLoc loc) { - this(loc, new UnifiedPileupElementTracker()); - } - - /** - * Create a new version of a read backed pileup at loc, using the reads and their corresponding - * offsets. This lower level constructure assumes pileup is well-formed and merely keeps a - * pointer to pileup. Don't go changing the data in pileup. - */ - public AbstractReadBackedPileup(GenomeLoc loc, List pileup) { - if (loc == null) throw new ReviewedStingException("Illegal null genomeloc in ReadBackedPileup"); - if (pileup == null) throw new ReviewedStingException("Illegal null pileup in ReadBackedPileup"); - - this.loc = loc; - this.pileupElementTracker = new UnifiedPileupElementTracker(pileup); - calculateCachedData(); - } - - /** - * Optimization of above constructor where all of the cached data is provided - * - * @param loc - * @param pileup - */ - public AbstractReadBackedPileup(GenomeLoc loc, List pileup, int size, int nDeletions, int nMQ0Reads) { - if (loc == null) throw new ReviewedStingException("Illegal null genomeloc in UnifiedReadBackedPileup"); - if (pileup == null) throw new ReviewedStingException("Illegal null pileup in UnifiedReadBackedPileup"); - - this.loc = loc; - this.pileupElementTracker = new UnifiedPileupElementTracker(pileup); - this.size = size; - this.nDeletions = nDeletions; - this.nMQ0Reads = nMQ0Reads; - } - - - protected AbstractReadBackedPileup(GenomeLoc loc, PileupElementTracker tracker) { - this.loc = loc; - this.pileupElementTracker = tracker; - calculateCachedData(); - } - - protected AbstractReadBackedPileup(GenomeLoc loc, Map> pileupsBySample) { - this.loc = loc; - PerSamplePileupElementTracker tracker = new PerSamplePileupElementTracker(); - for (Map.Entry> pileupEntry : pileupsBySample.entrySet()) { - tracker.addElements(pileupEntry.getKey(), pileupEntry.getValue().pileupElementTracker); - addPileupToCumulativeStats(pileupEntry.getValue()); - } - this.pileupElementTracker = tracker; - } - - public AbstractReadBackedPileup(GenomeLoc loc, List reads, int offset) { - this.loc = loc; - this.pileupElementTracker = readsOffsets2Pileup(reads, offset); - } - - /** - * Calculate cached sizes, nDeletion, and base counts for the pileup. This calculation is done upfront, - * so you pay the cost at the start, but it's more efficient to do this rather than pay the cost of calling - * sizes, nDeletion, etc. over and over potentially. - */ - protected void calculateCachedData() { - size = 0; - nDeletions = 0; - nMQ0Reads = 0; - - for (PileupElement p : pileupElementTracker) { - size++; - if (p.isDeletion()) { - nDeletions++; - } - if (p.getRead().getMappingQuality() == 0) { - nMQ0Reads++; - } - } - } - - protected void calculateAbstractSize() { - abstractSize = 0; - for (PileupElement p : pileupElementTracker) { - abstractSize += p.getRepresentativeCount(); - } - } - - protected void addPileupToCumulativeStats(AbstractReadBackedPileup pileup) { - size += pileup.getNumberOfElements(); - abstractSize = pileup.depthOfCoverage() + (abstractSize == -1 ? 0 : abstractSize); - nDeletions += pileup.getNumberOfDeletions(); - nMQ0Reads += pileup.getNumberOfMappingQualityZeroReads(); - } - - /** - * Helper routine for converting reads and offset lists to a PileupElement list. - * - * @param reads - * @param offsets - * @return - */ - private PileupElementTracker readsOffsets2Pileup(List reads, List offsets) { - if (reads == null) throw new ReviewedStingException("Illegal null read list in UnifiedReadBackedPileup"); - if (offsets == null) throw new ReviewedStingException("Illegal null offsets list in UnifiedReadBackedPileup"); - if (reads.size() != offsets.size()) - throw new ReviewedStingException("Reads and offset lists have different sizes!"); - - UnifiedPileupElementTracker pileup = new UnifiedPileupElementTracker(); - for (int i = 0; i < reads.size(); i++) { - GATKSAMRecord read = reads.get(i); - int offset = offsets.get(i); - pileup.add(createNewPileupElement(read, offset, false, false, false, false, false, false)); // only used to create fake pileups for testing so ancillary information is not important - } - - return pileup; - } - - /** - * Helper routine for converting reads and a single offset to a PileupElement list. - * - * @param reads - * @param offset - * @return - */ - private PileupElementTracker readsOffsets2Pileup(List reads, int offset) { - if (reads == null) throw new ReviewedStingException("Illegal null read list in UnifiedReadBackedPileup"); - if (offset < 0) throw new ReviewedStingException("Illegal offset < 0 UnifiedReadBackedPileup"); - - UnifiedPileupElementTracker pileup = new UnifiedPileupElementTracker(); - for (GATKSAMRecord read : reads) { - pileup.add(createNewPileupElement(read, offset, false, false, false, false, false, false)); // only used to create fake pileups for testing so ancillary information is not important - } - - return pileup; - } - - protected abstract AbstractReadBackedPileup createNewPileup(GenomeLoc loc, PileupElementTracker pileupElementTracker); - - protected abstract PE createNewPileupElement(final GATKSAMRecord read, final int offset, final boolean isDeletion, final boolean isBeforeDeletion, final boolean isAfterDeletion, final boolean isBeforeInsertion, final boolean isAfterInsertion, final boolean isNextToSoftClip); - protected abstract PE createNewPileupElement(final GATKSAMRecord read, final int offset, final boolean isDeletion, final boolean isBeforeDeletion, final boolean isAfterDeletion, final boolean isBeforeInsertion, final boolean isAfterInsertion, final boolean isNextToSoftClip, final String nextEventBases, final int nextEventLength ); - - // -------------------------------------------------------- - // - // Special 'constructors' - // - // -------------------------------------------------------- - - /** - * Returns a new ReadBackedPileup that is free of deletion spanning reads in this pileup. Note that this - * does not copy the data, so both ReadBackedPileups should not be changed. Doesn't make an unnecessary copy - * of the pileup (just returns this) if there are no deletions in the pileup. - * - * @return - */ - @Override - public RBP getPileupWithoutDeletions() { - if (getNumberOfDeletions() > 0) { - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); - - for (final String sample : tracker.getSamples()) { - PileupElementTracker perSampleElements = tracker.getElements(sample); - AbstractReadBackedPileup pileup = createNewPileup(loc, perSampleElements).getPileupWithoutDeletions(); - filteredTracker.addElements(sample, pileup.pileupElementTracker); - } - return (RBP) createNewPileup(loc, filteredTracker); - - } else { - UnifiedPileupElementTracker tracker = (UnifiedPileupElementTracker) pileupElementTracker; - UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); - - for (PE p : tracker) { - if (!p.isDeletion()) { - filteredTracker.add(p); - } - } - return (RBP) createNewPileup(loc, filteredTracker); - } - } else { - return (RBP) this; - } - } - - /** - * Returns a new ReadBackedPileup where only one read from an overlapping read - * pair is retained. If the two reads in question disagree to their basecall, - * neither read is retained. If they agree on the base, the read with the higher - * base quality observation is retained - * - * @return the newly filtered pileup - */ - @Override - public ReadBackedPileup getOverlappingFragmentFilteredPileup() { - return getOverlappingFragmentFilteredPileup(true, true); - } - - /** - * Returns a new ReadBackedPileup where only one read from an overlapping read - * pair is retained. If discardDiscordant and the two reads in question disagree to their basecall, - * neither read is retained. Otherwise, the read with the higher - * quality (base or mapping, depending on baseQualNotMapQual) observation is retained - * - * @return the newly filtered pileup - */ - @Override - public RBP getOverlappingFragmentFilteredPileup(boolean discardDiscordant, boolean baseQualNotMapQual) { - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); - - for (final String sample : tracker.getSamples()) { - PileupElementTracker perSampleElements = tracker.getElements(sample); - AbstractReadBackedPileup pileup = createNewPileup(loc, perSampleElements).getOverlappingFragmentFilteredPileup(discardDiscordant, baseQualNotMapQual); - filteredTracker.addElements(sample, pileup.pileupElementTracker); - } - return (RBP) createNewPileup(loc, filteredTracker); - } else { - Map filteredPileup = new HashMap(); - - for (PE p : pileupElementTracker) { - String readName = p.getRead().getReadName(); - - // if we've never seen this read before, life is good - if (!filteredPileup.containsKey(readName)) { - filteredPileup.put(readName, p); - } else { - PileupElement existing = filteredPileup.get(readName); - - // if the reads disagree at this position, throw them both out. Otherwise - // keep the element with the higher quality score - if (discardDiscordant && existing.getBase() != p.getBase()) { - filteredPileup.remove(readName); - } else { - if (baseQualNotMapQual) { - if (existing.getQual() < p.getQual()) - filteredPileup.put(readName, p); - } - else { - if (existing.getMappingQual() < p.getMappingQual()) - filteredPileup.put(readName, p); - } - } - } - } - - UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); - for (PE filteredElement : filteredPileup.values()) - filteredTracker.add(filteredElement); - - return (RBP) createNewPileup(loc, filteredTracker); - } - } - - - /** - * Returns a new ReadBackedPileup that is free of mapping quality zero reads in this pileup. Note that this - * does not copy the data, so both ReadBackedPileups should not be changed. Doesn't make an unnecessary copy - * of the pileup (just returns this) if there are no MQ0 reads in the pileup. - * - * @return - */ - @Override - public RBP getPileupWithoutMappingQualityZeroReads() { - if (getNumberOfMappingQualityZeroReads() > 0) { - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); - - for (final String sample : tracker.getSamples()) { - PileupElementTracker perSampleElements = tracker.getElements(sample); - AbstractReadBackedPileup pileup = createNewPileup(loc, perSampleElements).getPileupWithoutMappingQualityZeroReads(); - filteredTracker.addElements(sample, pileup.pileupElementTracker); - } - return (RBP) createNewPileup(loc, filteredTracker); - - } else { - UnifiedPileupElementTracker tracker = (UnifiedPileupElementTracker) pileupElementTracker; - UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); - - for (PE p : tracker) { - if (p.getRead().getMappingQuality() > 0) { - filteredTracker.add(p); - } - } - return (RBP) createNewPileup(loc, filteredTracker); - } - } else { - return (RBP) this; - } - } - - public RBP getPositiveStrandPileup() { - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); - - for (final String sample : tracker.getSamples()) { - PileupElementTracker perSampleElements = tracker.getElements(sample); - AbstractReadBackedPileup pileup = createNewPileup(loc, perSampleElements).getPositiveStrandPileup(); - filteredTracker.addElements(sample, pileup.pileupElementTracker); - } - return (RBP) createNewPileup(loc, filteredTracker); - } else { - UnifiedPileupElementTracker tracker = (UnifiedPileupElementTracker) pileupElementTracker; - UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); - - for (PE p : tracker) { - if (!p.getRead().getReadNegativeStrandFlag()) { - filteredTracker.add(p); - } - } - return (RBP) createNewPileup(loc, filteredTracker); - } - } - - /** - * Gets the pileup consisting of only reads on the negative strand. - * - * @return A read-backed pileup consisting only of reads on the negative strand. - */ - public RBP getNegativeStrandPileup() { - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); - - for (final String sample : tracker.getSamples()) { - PileupElementTracker perSampleElements = tracker.getElements(sample); - AbstractReadBackedPileup pileup = createNewPileup(loc, perSampleElements).getNegativeStrandPileup(); - filteredTracker.addElements(sample, pileup.pileupElementTracker); - } - return (RBP) createNewPileup(loc, filteredTracker); - } else { - UnifiedPileupElementTracker tracker = (UnifiedPileupElementTracker) pileupElementTracker; - UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); - - for (PE p : tracker) { - if (p.getRead().getReadNegativeStrandFlag()) { - filteredTracker.add(p); - } - } - return (RBP) createNewPileup(loc, filteredTracker); - } - } - - /** - * Gets a pileup consisting of all those elements passed by a given filter. - * - * @param filter Filter to use when testing for elements. - * @return a pileup without the given filtered elements. - */ - public RBP getFilteredPileup(PileupElementFilter filter) { - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); - - for (final String sample : tracker.getSamples()) { - PileupElementTracker perSampleElements = tracker.getElements(sample); - AbstractReadBackedPileup pileup = createNewPileup(loc, perSampleElements).getFilteredPileup(filter); - filteredTracker.addElements(sample, pileup.pileupElementTracker); - } - - return (RBP) createNewPileup(loc, filteredTracker); - } else { - UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); - - for (PE p : pileupElementTracker) { - if (filter.allow(p)) - filteredTracker.add(p); - } - - return (RBP) createNewPileup(loc, filteredTracker); - } - } - - /** - * Returns subset of this pileup that contains only bases with quality >= minBaseQ, coming from - * reads with mapping qualities >= minMapQ. This method allocates and returns a new instance of ReadBackedPileup. - * - * @param minBaseQ - * @param minMapQ - * @return - */ - @Override - public RBP getBaseAndMappingFilteredPileup(int minBaseQ, int minMapQ) { - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); - - for (final String sample : tracker.getSamples()) { - PileupElementTracker perSampleElements = tracker.getElements(sample); - AbstractReadBackedPileup pileup = createNewPileup(loc, perSampleElements).getBaseAndMappingFilteredPileup(minBaseQ, minMapQ); - filteredTracker.addElements(sample, pileup.pileupElementTracker); - } - - return (RBP) createNewPileup(loc, filteredTracker); - } else { - UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); - - for (PE p : pileupElementTracker) { - if (p.getRead().getMappingQuality() >= minMapQ && (p.isDeletion() || p.getQual() >= minBaseQ)) { - filteredTracker.add(p); - } - } - - return (RBP) createNewPileup(loc, filteredTracker); - } - } - - /** - * Returns subset of this pileup that contains only bases with quality >= minBaseQ. - * This method allocates and returns a new instance of ReadBackedPileup. - * - * @param minBaseQ - * @return - */ - @Override - public RBP getBaseFilteredPileup(int minBaseQ) { - return getBaseAndMappingFilteredPileup(minBaseQ, -1); - } - - /** - * Returns subset of this pileup that contains only bases coming from reads with mapping quality >= minMapQ. - * This method allocates and returns a new instance of ReadBackedPileup. - * - * @param minMapQ - * @return - */ - @Override - public RBP getMappingFilteredPileup(int minMapQ) { - return getBaseAndMappingFilteredPileup(-1, minMapQ); - } - - /** - * Gets a list of the read groups represented in this pileup. - * - * @return - */ - @Override - public Collection getReadGroups() { - Set readGroups = new HashSet(); - for (PileupElement pileupElement : this) - readGroups.add(pileupElement.getRead().getReadGroup().getReadGroupId()); - return readGroups; - } - - /** - * Gets the pileup for a given read group. Horrendously inefficient at this point. - * - * @param targetReadGroupId Identifier for the read group. - * @return A read-backed pileup containing only the reads in the given read group. - */ - @Override - public RBP getPileupForReadGroup(String targetReadGroupId) { - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); - - for (final String sample : tracker.getSamples()) { - PileupElementTracker perSampleElements = tracker.getElements(sample); - AbstractReadBackedPileup pileup = createNewPileup(loc, perSampleElements).getPileupForReadGroup(targetReadGroupId); - if (pileup != null) - filteredTracker.addElements(sample, pileup.pileupElementTracker); - } - return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null; - } else { - UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); - for (PE p : pileupElementTracker) { - GATKSAMRecord read = p.getRead(); - if (targetReadGroupId != null) { - if (read.getReadGroup() != null && targetReadGroupId.equals(read.getReadGroup().getReadGroupId())) - filteredTracker.add(p); - } else { - if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null) - filteredTracker.add(p); - } - } - return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null; - } - } - - /** - * Gets the pileup for a set of read groups. Horrendously inefficient at this point. - * - * @param rgSet List of identifiers for the read groups. - * @return A read-backed pileup containing only the reads in the given read groups. - */ - @Override - public RBP getPileupForReadGroups(final HashSet rgSet) { - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); - - for (final String sample : tracker.getSamples()) { - PileupElementTracker perSampleElements = tracker.getElements(sample); - AbstractReadBackedPileup pileup = createNewPileup(loc, perSampleElements).getPileupForReadGroups(rgSet); - if (pileup != null) - filteredTracker.addElements(sample, pileup.pileupElementTracker); - } - return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null; - } else { - UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); - for (PE p : pileupElementTracker) { - GATKSAMRecord read = p.getRead(); - if (rgSet != null && !rgSet.isEmpty()) { - if (read.getReadGroup() != null && rgSet.contains(read.getReadGroup().getReadGroupId())) - filteredTracker.add(p); - } else { - if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null) - filteredTracker.add(p); - } - } - return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null; - } - } - - @Override - public RBP getPileupForLane(String laneID) { - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); - - for (final String sample : tracker.getSamples()) { - PileupElementTracker perSampleElements = tracker.getElements(sample); - AbstractReadBackedPileup pileup = createNewPileup(loc, perSampleElements).getPileupForLane(laneID); - if (pileup != null) - filteredTracker.addElements(sample, pileup.pileupElementTracker); - } - return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null; - } else { - UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); - for (PE p : pileupElementTracker) { - GATKSAMRecord read = p.getRead(); - if (laneID != null) { - if (read.getReadGroup() != null && - (read.getReadGroup().getReadGroupId().startsWith(laneID + ".")) || // lane is the same, but sample identifier is different - (read.getReadGroup().getReadGroupId().equals(laneID))) // in case there is no sample identifier, they have to be exactly the same - filteredTracker.add(p); - } else { - if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null) - filteredTracker.add(p); - } - } - return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null; - } - } - - public Collection getSamples() { - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - return new HashSet(tracker.getSamples()); - } else { - Collection sampleNames = new HashSet(); - for (PileupElement p : this) { - GATKSAMRecord read = p.getRead(); - String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null; - sampleNames.add(sampleName); - } - return sampleNames; - } - } - - /** - * Returns a pileup randomly downsampled to the desiredCoverage. - * - * TODO: delete this once the experimental downsampler stabilizes - * - * @param desiredCoverage - * @return - */ - @Override - public RBP getDownsampledPileup(int desiredCoverage) { - if (getNumberOfElements() <= desiredCoverage) - return (RBP) this; - - // randomly choose numbers corresponding to positions in the reads list - TreeSet positions = new TreeSet(); - for (int i = 0; i < desiredCoverage; /* no update */) { - if (positions.add(GenomeAnalysisEngine.getRandomGenerator().nextInt(size))) - i++; - } - - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); - - - for (final String sample : tracker.getSamples()) { - PileupElementTracker perSampleElements = tracker.getElements(sample); - - int current = 0; - UnifiedPileupElementTracker filteredPileup = new UnifiedPileupElementTracker(); - for (PE p : perSampleElements) { - if (positions.contains(current)) - filteredPileup.add(p); - current++; - - } - filteredTracker.addElements(sample, filteredPileup); - } - - return (RBP) createNewPileup(loc, filteredTracker); - } else { - UnifiedPileupElementTracker tracker = (UnifiedPileupElementTracker) pileupElementTracker; - UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); - - Iterator positionIter = positions.iterator(); - - while (positionIter.hasNext()) { - int nextReadToKeep = (Integer) positionIter.next(); - filteredTracker.add(tracker.get(nextReadToKeep)); - } - - return (RBP) createNewPileup(getLocation(), filteredTracker); - } - } - - @Override - public RBP getPileupForSamples(Collection sampleNames) { - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - PileupElementTracker filteredElements = tracker.getElements(sampleNames); - return filteredElements != null ? (RBP) createNewPileup(loc, filteredElements) : null; - } else { - HashSet hashSampleNames = new HashSet(sampleNames); // to speed up the "contains" access in the for loop - UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); - for (PE p : pileupElementTracker) { - GATKSAMRecord read = p.getRead(); - if (sampleNames != null) { // still checking on sampleNames because hashSampleNames will never be null. And empty means something else. - if (read.getReadGroup() != null && hashSampleNames.contains(read.getReadGroup().getSample())) - filteredTracker.add(p); - } else { - if (read.getReadGroup() == null || read.getReadGroup().getSample() == null) - filteredTracker.add(p); - } - } - return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null; - } - } - - @Override - public Map getPileupsForSamples(Collection sampleNames) { - Map result = new HashMap(); - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - for (String sample : sampleNames) { - PileupElementTracker filteredElements = tracker.getElements(sample); - if (filteredElements != null) - result.put(sample, createNewPileup(loc, filteredElements)); - } - } else { - Map> trackerMap = new HashMap>(); - - for (String sample : sampleNames) { // initialize pileups for each sample - UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); - trackerMap.put(sample, filteredTracker); - } - for (PE p : pileupElementTracker) { // go through all pileup elements only once and add them to the respective sample's pileup - GATKSAMRecord read = p.getRead(); - if (read.getReadGroup() != null) { - String sample = read.getReadGroup().getSample(); - UnifiedPileupElementTracker tracker = trackerMap.get(sample); - if (tracker != null) // we only add the pileup the requested samples. Completely ignore the rest - tracker.add(p); - } - } - for (Map.Entry> entry : trackerMap.entrySet()) // create the RBP for each sample - result.put(entry.getKey(), createNewPileup(loc, entry.getValue())); - } - return result; - } - - - @Override - public RBP getPileupForSample(String sampleName) { - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - PileupElementTracker filteredElements = tracker.getElements(sampleName); - return filteredElements != null ? (RBP) createNewPileup(loc, filteredElements) : null; - } else { - UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); - for (PE p : pileupElementTracker) { - GATKSAMRecord read = p.getRead(); - if (sampleName != null) { - if (read.getReadGroup() != null && sampleName.equals(read.getReadGroup().getSample())) - filteredTracker.add(p); - } else { - if (read.getReadGroup() == null || read.getReadGroup().getSample() == null) - filteredTracker.add(p); - } - } - return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null; - } - } - - // -------------------------------------------------------- - // - // iterators - // - // -------------------------------------------------------- - - /** - * The best way to access PileupElements where you only care about the bases and quals in the pileup. - *

- * for (PileupElement p : this) { doSomething(p); } - *

- * Provides efficient iteration of the data. - * - * @return - */ - @Override - public Iterator iterator() { - return new Iterator() { - private final Iterator wrappedIterator = pileupElementTracker.iterator(); - - public boolean hasNext() { - return wrappedIterator.hasNext(); - } - - public PileupElement next() { - return wrappedIterator.next(); - } - - public void remove() { - throw new UnsupportedOperationException("Cannot remove from a pileup element iterator"); - } - }; - } - - /** - * The best way to access PileupElements where you only care not only about bases and quals in the pileup - * but also need access to the index of the pileup element in the pile. - * - * for (ExtendedPileupElement p : this) { doSomething(p); } - * - * Provides efficient iteration of the data. - * - * @return - */ - - /** - * Simple useful routine to count the number of deletion bases in this pileup - * - * @return - */ - @Override - public int getNumberOfDeletions() { - return nDeletions; - } - - @Override - public int getNumberOfMappingQualityZeroReads() { - return nMQ0Reads; - } - - /** - * @return the number of physical elements in this pileup - */ - @Override - public int getNumberOfElements() { - return size; - } - - /** - * @return the number of abstract elements in this pileup - */ - @Override - public int depthOfCoverage() { - if (abstractSize == -1) - calculateAbstractSize(); - return abstractSize; - } - - /** - * @return true if there are 0 elements in the pileup, false otherwise - */ - @Override - public boolean isEmpty() { - return size == 0; - } - - - /** - * @return the location of this pileup - */ - @Override - public GenomeLoc getLocation() { - return loc; - } - - /** - * Get counts of A, C, G, T in order, which returns a int[4] vector with counts according - * to BaseUtils.simpleBaseToBaseIndex for each base. - * - * @return - */ - @Override - public int[] getBaseCounts() { - int[] counts = new int[4]; - - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - for (final String sample : tracker.getSamples()) { - int[] countsBySample = createNewPileup(loc, tracker.getElements(sample)).getBaseCounts(); - for (int i = 0; i < counts.length; i++) - counts[i] += countsBySample[i]; - } - } else { - for (PileupElement pile : this) { - // skip deletion sites - if (!pile.isDeletion()) { - int index = BaseUtils.simpleBaseToBaseIndex((char) pile.getBase()); - if (index != -1) - counts[index]++; - } - } - } - - return counts; - } - - @Override - public String getPileupString(Character ref) { - // In the pileup format, each line represents a genomic position, consisting of chromosome name, - // coordinate, reference base, read bases, read qualities and alignment mapping qualities. - return String.format("%s %s %c %s %s", - getLocation().getContig(), getLocation().getStart(), // chromosome name and coordinate - ref, // reference base - new String(getBases()), - getQualsString()); - } - - // -------------------------------------------------------- - // - // Convenience functions that may be slow - // - // -------------------------------------------------------- - - /** - * Returns a list of the reads in this pileup. Note this call costs O(n) and allocates fresh lists each time - * - * @return - */ - @Override - public List getReads() { - List reads = new ArrayList(getNumberOfElements()); - for (PileupElement pile : this) { - reads.add(pile.getRead()); - } - return reads; - } - - @Override - public int getNumberOfDeletionsAfterThisElement() { - int count = 0; - for (PileupElement p : this) { - if (p.isBeforeDeletionStart()) - count++; - } - return count; - } - - @Override - public int getNumberOfInsertionsAfterThisElement() { - int count = 0; - for (PileupElement p : this) { - if (p.isBeforeInsertion()) - count++; - } - return count; - - } - /** - * Returns a list of the offsets in this pileup. Note this call costs O(n) and allocates fresh lists each time - * - * @return - */ - @Override - public List getOffsets() { - List offsets = new ArrayList(getNumberOfElements()); - for (PileupElement pile : this) { - offsets.add(pile.getOffset()); - } - return offsets; - } - - /** - * Returns an array of the bases in this pileup. Note this call costs O(n) and allocates fresh array each time - * - * @return - */ - @Override - public byte[] getBases() { - byte[] v = new byte[getNumberOfElements()]; - int pos = 0; - for (PileupElement pile : pileupElementTracker) { - v[pos++] = pile.getBase(); - } - return v; - } - - /** - * Returns an array of the quals in this pileup. Note this call costs O(n) and allocates fresh array each time - * - * @return - */ - @Override - public byte[] getQuals() { - byte[] v = new byte[getNumberOfElements()]; - int pos = 0; - for (PileupElement pile : pileupElementTracker) { - v[pos++] = pile.getQual(); - } - return v; - } - - /** - * Get an array of the mapping qualities - * - * @return - */ - @Override - public byte[] getMappingQuals() { - byte[] v = new byte[getNumberOfElements()]; - int pos = 0; - for (PileupElement pile : pileupElementTracker) { - v[pos++] = (byte) pile.getRead().getMappingQuality(); - } - return v; - } - - static String quals2String(byte[] quals) { - StringBuilder qualStr = new StringBuilder(); - for (int qual : quals) { - qual = Math.min(qual, 63); // todo: fixme, this isn't a good idea - char qualChar = (char) (33 + qual); // todo: warning, this is illegal for qual > 63 - qualStr.append(qualChar); - } - - return qualStr.toString(); - } - - private String getQualsString() { - return quals2String(getQuals()); - } - - /** - * Returns a new ReadBackedPileup that is sorted by start coordinate of the reads. - * - * @return - */ - @Override - public RBP getStartSortedPileup() { - - final TreeSet sortedElements = new TreeSet(new Comparator() { - @Override - public int compare(PE element1, PE element2) { - final int difference = element1.getRead().getAlignmentStart() - element2.getRead().getAlignmentStart(); - return difference != 0 ? difference : element1.getRead().getReadName().compareTo(element2.getRead().getReadName()); - } - }); - - if (pileupElementTracker instanceof PerSamplePileupElementTracker) { - PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; - - for (final String sample : tracker.getSamples()) { - PileupElementTracker perSampleElements = tracker.getElements(sample); - for (PE pile : perSampleElements) - sortedElements.add(pile); - } - } - else { - UnifiedPileupElementTracker tracker = (UnifiedPileupElementTracker) pileupElementTracker; - for (PE pile : tracker) - sortedElements.add(pile); - } - - UnifiedPileupElementTracker sortedTracker = new UnifiedPileupElementTracker(); - for (PE pile : sortedElements) - sortedTracker.add(pile); - - return (RBP) createNewPileup(loc, sortedTracker); - } - - @Override - public FragmentCollection toFragments() { - return FragmentUtils.create(this); - } - - @Override - public ReadBackedPileup copy() { - return new ReadBackedPileupImpl(loc, (PileupElementTracker) pileupElementTracker.copy()); - } -} - - diff --git a/public/java/src/org/broadinstitute/sting/utils/pileup/PileupElement.java b/public/java/src/org/broadinstitute/sting/utils/pileup/PileupElement.java index 5fdd9fe62..5a5358208 100644 --- a/public/java/src/org/broadinstitute/sting/utils/pileup/PileupElement.java +++ b/public/java/src/org/broadinstitute/sting/utils/pileup/PileupElement.java @@ -27,12 +27,18 @@ package org.broadinstitute.sting.utils.pileup; import com.google.java.contract.Ensures; import com.google.java.contract.Requires; +import net.sf.samtools.CigarElement; +import net.sf.samtools.CigarOperator; import org.broadinstitute.variant.utils.BaseUtils; import org.broadinstitute.sting.utils.MathUtils; -import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import java.util.Arrays; +import java.util.EnumSet; +import java.util.LinkedList; +import java.util.List; + /** * Created by IntelliJ IDEA. * User: depristo @@ -40,7 +46,12 @@ import org.broadinstitute.sting.utils.sam.GATKSAMRecord; * Time: 8:54:05 AM */ public class PileupElement implements Comparable { - public static final byte DELETION_BASE = BaseUtils.D; + private final static LinkedList EMPTY_LINKED_LIST = new LinkedList(); + + private final static EnumSet ON_GENOME_OPERATORS = + EnumSet.of(CigarOperator.M, CigarOperator.EQ, CigarOperator.X, CigarOperator.D); + + public static final byte DELETION_BASE = BaseUtils.Base.D.base; public static final byte DELETION_QUAL = (byte) 16; public static final byte A_FOLLOWED_BY_INSERTION_BASE = (byte) 87; public static final byte C_FOLLOWED_BY_INSERTION_BASE = (byte) 88; @@ -49,138 +60,213 @@ public class PileupElement implements Comparable { protected final GATKSAMRecord read; // the read this base belongs to protected final int offset; // the offset in the bases array for this base - protected final boolean isDeletion; // is this base a deletion - protected final boolean isBeforeDeletedBase; // is the base to the right of this base an deletion - protected final boolean isAfterDeletedBase; // is the base to the left of this base a deletion - protected final boolean isBeforeInsertion; // is the base to the right of this base an insertion - protected final boolean isAfterInsertion; // is the base to the left of this base an insertion - protected final boolean isNextToSoftClip; // is this base either before or after a soft clipped base - protected final int eventLength; // what is the length of the event (insertion or deletion) *after* this base - protected final String eventBases; // if it is a deletion, we do not have information about the actual deleted bases in the read itself, so we fill the string with D's; for insertions we keep actual inserted bases + + private final CigarElement currentCigarElement; + private final int currentCigarOffset; + private final int offsetInCurrentCigar; /** - * Creates a new pileup element. + * Create a new pileup element * - * @param read the read we are adding to the pileup - * @param offset the position in the read for this base. All deletions must be left aligned! (-1 is only allowed for reads starting with insertions) - * @param isDeletion whether or not this base is a deletion - * @param isBeforeDeletion whether or not this base is before a deletion - * @param isAfterDeletion whether or not this base is after a deletion - * @param isBeforeInsertion whether or not this base is before an insertion - * @param isAfterInsertion whether or not this base is after an insertion - * @param isNextToSoftClip whether or not this base is next to a soft clipped base - * @param nextEventBases bases in event in case element comes before insertion or deletion - * @param nextEventLength length of next event in case it's insertion or deletion + * @param read a non-null read to pileup + * @param baseOffset the offset into the read's base / qual vector aligned to this position on the genome. If the + * current cigar element is a deletion, offset should be the offset of the last M/=/X position. + * @param currentElement a non-null CigarElement that indicates the cigar element aligning the read to the genome + * @param currentCigarOffset the offset of currentElement in read.getCigar().getElement(currentCigarOffset) == currentElement) + * @param offsetInCurrentCigar how far into the currentElement are we in our alignment to the genome? */ - @Requires({ - "read != null", - "offset >= -1", - "offset <= read.getReadLength()"}) - public PileupElement(final GATKSAMRecord read, final int offset, final boolean isDeletion, final boolean isBeforeDeletion, final boolean isAfterDeletion, final boolean isBeforeInsertion, final boolean isAfterInsertion, final boolean isNextToSoftClip, final String nextEventBases, final int nextEventLength) { - if (offset < 0 && isDeletion) - throw new ReviewedStingException("Pileup Element cannot create a deletion with a negative offset"); + public PileupElement(final GATKSAMRecord read, final int baseOffset, + final CigarElement currentElement, final int currentCigarOffset, + final int offsetInCurrentCigar) { + assert currentElement != null; this.read = read; - this.offset = offset; - this.isDeletion = isDeletion; - this.isBeforeDeletedBase = isBeforeDeletion; - this.isAfterDeletedBase = isAfterDeletion; - this.isBeforeInsertion = isBeforeInsertion; - this.isAfterInsertion = isAfterInsertion; - this.isNextToSoftClip = isNextToSoftClip; - if (isBeforeInsertion) - eventBases = nextEventBases; - else - eventBases = null; // ignore argument in any other case - if (isBeforeDeletion || isBeforeInsertion) - eventLength = nextEventLength; - else - eventLength = -1; + this.offset = baseOffset; + this.currentCigarElement = currentElement; + this.currentCigarOffset = currentCigarOffset; + this.offsetInCurrentCigar = offsetInCurrentCigar; + + // for performance regions these are assertions + assert this.read != null; + assert this.offset >= 0 && this.offset < this.read.getReadLength(); + assert this.currentCigarOffset >= 0; + assert this.currentCigarOffset < read.getCigarLength(); + assert this.offsetInCurrentCigar >= 0; + assert this.offsetInCurrentCigar < currentElement.getLength(); } - public PileupElement(final GATKSAMRecord read, final int offset, final boolean isDeletion, final boolean isBeforeDeletion, final boolean isAfterDeletion, final boolean isBeforeInsertion, final boolean isAfterInsertion, final boolean isNextToSoftClip) { - this(read, offset, isDeletion, isBeforeDeletion, isAfterDeletion, isBeforeInsertion, isAfterInsertion, isNextToSoftClip, null, -1); + /** + * Create a new PileupElement that's a copy of toCopy + * @param toCopy the element we want to copy + */ + public PileupElement(final PileupElement toCopy) { + this(toCopy.read, toCopy.offset, toCopy.currentCigarElement, toCopy.currentCigarOffset, toCopy.offsetInCurrentCigar); } + + /** + * Is this element a deletion w.r.t. the reference genome? + * + * @return true if this is a deletion, false otherwise + */ public boolean isDeletion() { - return isDeletion; - } - - public boolean isBeforeDeletedBase() { - return isBeforeDeletedBase; - } - - public boolean isAfterDeletedBase() { - return isAfterDeletedBase; + return currentCigarElement.getOperator() == CigarOperator.D; } + /** + * Is the current element immediately before a deletion, but itself not a deletion? + * + * Suppose we are aligning a read with cigar 3M2D1M. This function is true + * if we are in the last cigar position of the 3M, but not if we are in the 2D itself. + * + * @return true if the next alignment position is a deletion w.r.t. the reference genome + */ public boolean isBeforeDeletionStart() { - return isBeforeDeletedBase && !isDeletion; + return ! isDeletion() && atEndOfCurrentCigar() && hasOperator(getNextOnGenomeCigarElement(), CigarOperator.D); } + /** + * Is the current element immediately after a deletion, but itself not a deletion? + * + * Suppose we are aligning a read with cigar 1M2D3M. This function is true + * if we are in the first cigar position of the 3M, but not if we are in the 2D itself or + * in any but the first position of the 3M. + * + * @return true if the previous alignment position is a deletion w.r.t. the reference genome + */ public boolean isAfterDeletionEnd() { - return isAfterDeletedBase && !isDeletion; - } - - public boolean isBeforeInsertion() { - return isBeforeInsertion; - } - - public boolean isAfterInsertion() { - return isAfterInsertion; - } - - public boolean isNextToSoftClip() { - return isNextToSoftClip; - } - - public boolean isInsertionAtBeginningOfRead() { - return offset == -1; + return ! isDeletion() && atStartOfCurrentCigar() && hasOperator(getPreviousOnGenomeCigarElement(), CigarOperator.D); } + /** + * Get the read for this pileup element + * @return a non-null GATKSAMRecord + */ @Ensures("result != null") public GATKSAMRecord getRead() { return read; } - @Ensures("result == offset") + /** + * Get the offset of the this element into the read that aligns that read's base to this genomic position. + * + * If the current element is a deletion then offset is the offset of the last base containing offset. + * + * @return a valid offset into the read's bases + */ + @Ensures({"result >= 0", "result <= read.getReadLength()"}) public int getOffset() { return offset; } - public byte getBase() { - return getBase(offset); - } - - public int getBaseIndex() { - return getBaseIndex(offset); - } - - public byte getQual() { - return getQual(offset); - } - - public byte getBaseInsertionQual() { - return getBaseInsertionQual(offset); - } - - public byte getBaseDeletionQual() { - return getBaseDeletionQual(offset); - } - /** - * @return length of the event (number of inserted or deleted bases + * Get the base aligned to the genome at this location + * + * If the current element is a deletion returns DELETION_BASE + * + * @return a base encoded as a byte */ - public int getEventLength() { - return eventLength; + @Ensures("result != DELETION_BASE || (isDeletion() && result == DELETION_BASE)") + public byte getBase() { + return isDeletion() ? DELETION_BASE : read.getReadBases()[offset]; + } + + @Deprecated + public int getBaseIndex() { + return BaseUtils.simpleBaseToBaseIndex(getBase()); } /** + * Get the base quality score of the base at this aligned position on the genome + * @return a phred-scaled quality score as a byte + */ + public byte getQual() { + return isDeletion() ? DELETION_QUAL : read.getBaseQualities()[offset]; + } + + /** + * Get the Base Insertion quality at this pileup position + * @return a phred-scaled quality score as a byte + */ + public byte getBaseInsertionQual() { + return isDeletion() ? DELETION_QUAL : read.getBaseInsertionQualities()[offset]; + } + + /** + * Get the Base Deletion quality at this pileup position + * @return a phred-scaled quality score as a byte + */ + public byte getBaseDeletionQual() { + return isDeletion() ? DELETION_QUAL : read.getBaseDeletionQualities()[offset]; + } + + /** + * Get the length of an immediately following insertion or deletion event, or 0 if no such event exists + * + * Only returns a positive value when this pileup element is immediately before an indel. Being + * immediately before a deletion means that this pileup element isn't an deletion, and that the + * next genomic alignment for this read is a deletion. For the insertion case, this means + * that an insertion cigar occurs immediately after this element, between this one and the + * next genomic position. + * + * Note this function may be expensive, so multiple uses should be cached by the caller + * + * @return length of the event (number of inserted or deleted bases), or 0 + */ + @Ensures("result >= 0") + public int getLengthOfImmediatelyFollowingIndel() { + final CigarElement element = getNextIndelCigarElement(); + return element == null ? 0 : element.getLength(); + } + + /** + * Helpful function to get the immediately following cigar element, for an insertion or deletion + * + * if this state precedes a deletion (i.e., next position on genome) or insertion (immediately between + * this and the next position) returns the CigarElement corresponding to this event. Otherwise returns + * null. + * + * @return a CigarElement, or null if the next alignment state ins't an insertion or deletion. + */ + private CigarElement getNextIndelCigarElement() { + if ( isBeforeDeletionStart() ) { + final CigarElement element = getNextOnGenomeCigarElement(); + if ( element == null || element.getOperator() != CigarOperator.D ) + throw new IllegalStateException("Immediately before deletion but the next cigar element isn't a deletion " + element); + return element; + } else if ( isBeforeInsertion() ) { + final CigarElement element = getBetweenNextPosition().get(0); + if ( element.getOperator() != CigarOperator.I ) + throw new IllegalStateException("Immediately before insertion but the next cigar element isn't an insertion " + element); + return element; + } else { + return null; + } + } + + /** + * Get the bases for an insertion that immediately follows this alignment state, or null if none exists + * + * @see #getLengthOfImmediatelyFollowingIndel() for details on the meaning of immediately. + * + * If the immediately following state isn't an insertion, returns null + * * @return actual sequence of inserted bases, or a null if the event is a deletion or if there is no event in the associated read. */ - public String getEventBases() { - return eventBases; + @Ensures("result == null || result.length() == getLengthOfImmediatelyFollowingIndel()") + public String getBasesOfImmediatelyFollowingInsertion() { + final CigarElement element = getNextIndelCigarElement(); + if ( element != null && element.getOperator() == CigarOperator.I ) { + final int getFrom = offset + 1; + final byte[] bases = Arrays.copyOfRange(read.getReadBases(), getFrom, getFrom + element.getLength()); + return new String(bases); + } else + return null; } + /** + * Get the mapping quality of the read of this element + * @return the mapping quality of the underlying SAM record + */ public int getMappingQual() { return read.getMappingQuality(); } @@ -190,26 +276,6 @@ public class PileupElement implements Comparable { return String.format("%s @ %d = %c Q%d", getRead().getReadName(), getOffset(), (char) getBase(), getQual()); } - protected byte getBase(final int offset) { - return (isDeletion() || isInsertionAtBeginningOfRead()) ? DELETION_BASE : read.getReadBases()[offset]; - } - - protected int getBaseIndex(final int offset) { - return BaseUtils.simpleBaseToBaseIndex((isDeletion() || isInsertionAtBeginningOfRead()) ? DELETION_BASE : read.getReadBases()[offset]); - } - - protected byte getQual(final int offset) { - return (isDeletion() || isInsertionAtBeginningOfRead()) ? DELETION_QUAL : read.getBaseQualities()[offset]; - } - - protected byte getBaseInsertionQual(final int offset) { - return (isDeletion() || isInsertionAtBeginningOfRead()) ? DELETION_QUAL : read.getBaseInsertionQualities()[offset]; - } - - protected byte getBaseDeletionQual(final int offset) { - return (isDeletion() || isInsertionAtBeginningOfRead()) ? DELETION_QUAL : read.getBaseDeletionQualities()[offset]; - } - @Override public int compareTo(final PileupElement pileupElement) { if (offset < pileupElement.offset) @@ -240,15 +306,258 @@ public class PileupElement implements Comparable { * @return */ public int getRepresentativeCount() { - int representativeCount = 1; - - if (read.isReducedRead() && !isInsertionAtBeginningOfRead()) { + if (read.isReducedRead()) { if (isDeletion() && (offset + 1 >= read.getReadLength()) ) // deletion in the end of the read throw new UserException.MalformedBAM(read, String.format("Adjacent I/D events in read %s -- cigar: %s", read.getReadName(), read.getCigarString())); - representativeCount = (isDeletion()) ? MathUtils.fastRound((read.getReducedCount(offset) + read.getReducedCount(offset + 1)) / 2.0) : read.getReducedCount(offset); + return isDeletion() + ? MathUtils.fastRound((read.getReducedCount(offset) + read.getReducedCount(offset + 1)) / 2.0) + : read.getReducedCount(offset); + } else { + return 1; } - return representativeCount; } + /** + * Get the cigar element aligning this element to the genome + * @return a non-null CigarElement + */ + @Ensures("result != null") + public CigarElement getCurrentCigarElement() { + return currentCigarElement; + } + + /** + * Get the offset of this cigar element in the Cigar of the current read (0-based) + * + * Suppose the cigar is 1M2D3I4D. If we are in the 1M state this function returns + * 0. If we are in 2D, the result is 1. If we are in the 4D, the result is 3. + * + * @return an offset into the read.getCigar() that brings us to the current cigar element + */ + public int getCurrentCigarOffset() { + return currentCigarOffset; + } + + /** + * Get the offset into the *current* cigar element for this alignment position + * + * We can be anywhere from offset 0 (first position) to length - 1 of the current + * cigar element aligning us to this genomic position. + * + * @return a valid offset into the current cigar element + */ + @Ensures({"result >= 0", "result < getCurrentCigarElement().getLength()"}) + public int getOffsetInCurrentCigar() { + return offsetInCurrentCigar; + } + + /** + * Get the cigar elements that occur before the current position but after the previous position on the genome + * + * For example, if we are in the 3M state of 1M2I3M state then 2I occurs before this position. + * + * Note that this function does not care where we are in the current cigar element. In the previous + * example this list of elements contains the 2I state regardless of where you are in the 3M. + * + * Note this returns the list of all elements that occur between this and the prev site, so for + * example we might have 5S10I2M and this function would return [5S, 10I]. + * + * @return a non-null list of CigarElements + */ + @Ensures("result != null") + public LinkedList getBetweenPrevPosition() { + return atStartOfCurrentCigar() ? getBetween(Direction.PREV) : EMPTY_LINKED_LIST; + } + + /** + * Get the cigar elements that occur after the current position but before the next position on the genome + * + * @see #getBetweenPrevPosition() for more details + * + * @return a non-null list of CigarElements + */ + @Ensures("result != null") + public LinkedList getBetweenNextPosition() { + return atEndOfCurrentCigar() ? getBetween(Direction.NEXT) : EMPTY_LINKED_LIST; + } + + /** for some helper functions */ + private enum Direction { PREV, NEXT } + + /** + * Helper function to get cigar elements between this and either the prev or next genomic position + * + * @param direction PREVIOUS if we want before, NEXT if we want after + * @return a non-null list of cigar elements between this and the neighboring position in direction + */ + @Ensures("result != null") + private LinkedList getBetween(final Direction direction) { + final int increment = direction == Direction.NEXT ? 1 : -1; + LinkedList elements = null; + final int nCigarElements = read.getCigarLength(); + for ( int i = currentCigarOffset + increment; i >= 0 && i < nCigarElements; i += increment) { + final CigarElement elt = read.getCigar().getCigarElement(i); + if ( ON_GENOME_OPERATORS.contains(elt.getOperator()) ) + break; + else { + // optimization: don't allocate list if not necessary + if ( elements == null ) + elements = new LinkedList(); + + if ( increment > 0 ) + // to keep the list in the right order, if we are incrementing positively add to the end + elements.add(elt); + else + // counting down => add to front + elements.addFirst(elt); + } + } + + // optimization: elements is null because nothing got added, just return the empty list + return elements == null ? EMPTY_LINKED_LIST : elements; + } + + /** + * Get the cigar element of the previous genomic aligned position + * + * For example, we might have 1M2I3M, and be sitting at the someone in the 3M. This + * function would return 1M, as the 2I isn't on the genome. Note this function skips + * all of the positions that would occur in the current element. So the result + * is always 1M regardless of whether we're in the first, second, or third position of the 3M + * cigar. + * + * @return a CigarElement, or null (indicating that no previous element exists) + */ + @Ensures("result == null || ON_GENOME_OPERATORS.contains(result.getOperator())") + public CigarElement getPreviousOnGenomeCigarElement() { + return getNeighboringOnGenomeCigarElement(Direction.PREV); + } + + /** + * Get the cigar element of the next genomic aligned position + * + * @see #getPreviousOnGenomeCigarElement() for more details + * + * @return a CigarElement, or null (indicating that no next element exists) + */ + @Ensures("result == null || ON_GENOME_OPERATORS.contains(result.getOperator())") + public CigarElement getNextOnGenomeCigarElement() { + return getNeighboringOnGenomeCigarElement(Direction.NEXT); + } + + /** + * Helper function to get the cigar element of the next or previous genomic position + * @param direction the direction to look in + * @return a CigarElement, or null if no such element exists + */ + @Ensures("result == null || ON_GENOME_OPERATORS.contains(result.getOperator())") + private CigarElement getNeighboringOnGenomeCigarElement(final Direction direction) { + final int increment = direction == Direction.NEXT ? 1 : -1; + final int nCigarElements = read.getCigarLength(); + + for ( int i = currentCigarOffset + increment; i >= 0 && i < nCigarElements; i += increment) { + final CigarElement elt = read.getCigar().getCigarElement(i); + if ( ON_GENOME_OPERATORS.contains(elt.getOperator()) ) + return elt; + } + + // getting here means that you didn't find anything + return null; + } + + /** + * Does the cigar element (which may be null) have operation toMatch? + * + * @param maybeCigarElement a CigarElement that might be null + * @param toMatch a CigarOperator we want to match against the one in maybeCigarElement + * @return true if maybeCigarElement isn't null and has operator toMatch + */ + @Requires("toMatch != null") + private boolean hasOperator(final CigarElement maybeCigarElement, final CigarOperator toMatch) { + return maybeCigarElement != null && maybeCigarElement.getOperator() == toMatch; + } + + /** + * Does an insertion occur immediately before the current position on the genome? + * + * @return true if yes, false if no + */ + public boolean isAfterInsertion() { return isAfter(getBetweenPrevPosition(), CigarOperator.I); } + + /** + * Does an insertion occur immediately after the current position on the genome? + * + * @return true if yes, false if no + */ + public boolean isBeforeInsertion() { return isBefore(getBetweenNextPosition(), CigarOperator.I); } + + /** + * Does a soft-clipping event occur immediately before the current position on the genome? + * + * @return true if yes, false if no + */ + public boolean isAfterSoftClip() { return isAfter(getBetweenPrevPosition(), CigarOperator.S); } + + /** + * Does a soft-clipping event occur immediately after the current position on the genome? + * + * @return true if yes, false if no + */ + public boolean isBeforeSoftClip() { return isBefore(getBetweenNextPosition(), CigarOperator.S); } + + /** + * Does a soft-clipping event occur immediately before or after the current position on the genome? + * + * @return true if yes, false if no + */ + public boolean isNextToSoftClip() { return isAfterSoftClip() || isBeforeSoftClip(); } + + /** + * Is the current position at the end of the current cigar? + * + * For example, if we are in element 3M, this function returns true if we are at offsetInCurrentCigar + * of 2, but not 0 or 1. + * + * @return true if we're at the end of the current cigar + */ + public boolean atEndOfCurrentCigar() { + return offsetInCurrentCigar == currentCigarElement.getLength() - 1; + } + + /** + * Is the current position at the start of the current cigar? + * + * For example, if we are in element 3M, this function returns true if we are at offsetInCurrentCigar + * of 0, but not 1 or 2. + * + * @return true if we're at the start of the current cigar + */ + public boolean atStartOfCurrentCigar() { + return offsetInCurrentCigar == 0; + } + + /** + * Is op the last element in the list of elements? + * + * @param elements the elements to examine + * @param op the op we want the last element's op to equal + * @return true if op == last(elements).op + */ + @Requires({"elements != null", "op != null"}) + private boolean isAfter(final LinkedList elements, final CigarOperator op) { + return ! elements.isEmpty() && elements.peekLast().getOperator() == op; + } + + /** + * Is op the first element in the list of elements? + * + * @param elements the elements to examine + * @param op the op we want the last element's op to equal + * @return true if op == first(elements).op + */ + @Requires({"elements != null", "op != null"}) + private boolean isBefore(final List elements, final CigarOperator op) { + return ! elements.isEmpty() && elements.get(0).getOperator() == op; + } } \ No newline at end of file diff --git a/public/java/src/org/broadinstitute/sting/utils/pileup/PileupElementTracker.java b/public/java/src/org/broadinstitute/sting/utils/pileup/PileupElementTracker.java index 0a0d4ab9c..288b033cb 100644 --- a/public/java/src/org/broadinstitute/sting/utils/pileup/PileupElementTracker.java +++ b/public/java/src/org/broadinstitute/sting/utils/pileup/PileupElementTracker.java @@ -25,6 +25,8 @@ package org.broadinstitute.sting.utils.pileup; +import org.apache.commons.collections.iterators.IteratorChain; + import java.util.*; /** @@ -35,6 +37,20 @@ import java.util.*; */ abstract class PileupElementTracker implements Iterable { public abstract int size(); + + /** + * Iterate through the PEs here, but in any order, which may improve performance + * if you don't care about the underlying order the reads are coming to you in. + * @return an iteratable over all pileup elements in this tracker + */ + public abstract Iterable unorderedIterable(); + + /** + * Same as @see #unorderedIterable but the actual iterator itself + * @return + */ + public Iterator unorderedIterator() { return unorderedIterable().iterator(); } + public abstract PileupElementTracker copy(); } @@ -65,6 +81,7 @@ class UnifiedPileupElementTracker extends PileupElemen } public Iterator iterator() { return pileup.iterator(); } + public Iterable unorderedIterable() { return this; } } class PerSamplePileupElementTracker extends PileupElementTracker { @@ -113,4 +130,25 @@ class PerSamplePileupElementTracker extends PileupElem public int size() { return size; } + + + public Iterable unorderedIterable() { + return new Iterable() { + @Override + public Iterator iterator() { + return new Iterator() { + final private IteratorChain chain = new IteratorChain(); + + { // initialize the chain with the unordered iterators of the per sample pileups + for ( PileupElementTracker pet : pileup.values() ) { + chain.addIterator(pet.unorderedIterator()); + } + } + @Override public boolean hasNext() { return chain.hasNext(); } + @Override public PE next() { return (PE)chain.next(); } + @Override public void remove() { throw new UnsupportedOperationException("Cannot remove"); } + }; + } + }; + } } \ No newline at end of file diff --git a/public/java/src/org/broadinstitute/sting/utils/pileup/ReadBackedPileupImpl.java b/public/java/src/org/broadinstitute/sting/utils/pileup/ReadBackedPileupImpl.java index b34f61f31..fe43f85bd 100644 --- a/public/java/src/org/broadinstitute/sting/utils/pileup/ReadBackedPileupImpl.java +++ b/public/java/src/org/broadinstitute/sting/utils/pileup/ReadBackedPileupImpl.java @@ -25,32 +25,64 @@ package org.broadinstitute.sting.utils.pileup; +import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; +import org.broadinstitute.sting.utils.fragments.FragmentCollection; +import org.broadinstitute.sting.utils.fragments.FragmentUtils; +import org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.variant.utils.BaseUtils; -import java.util.List; -import java.util.Map; +import java.util.*; -public class ReadBackedPileupImpl extends AbstractReadBackedPileup implements ReadBackedPileup { +public class ReadBackedPileupImpl implements ReadBackedPileup { + protected final GenomeLoc loc; + protected final PileupElementTracker pileupElementTracker; - public ReadBackedPileupImpl(GenomeLoc loc) { - super(loc); - } + private final static int UNINITIALIZED_CACHED_INT_VALUE = -1; + /** + * Different then number of elements due to reduced reads + */ + private int depthOfCoverage = UNINITIALIZED_CACHED_INT_VALUE; + private int nDeletions = UNINITIALIZED_CACHED_INT_VALUE; // cached value of the number of deletions + private int nMQ0Reads = UNINITIALIZED_CACHED_INT_VALUE; // cached value of the number of MQ0 reads + + /** + * Create a new version of a read backed pileup at loc, using the reads and their corresponding + * offsets. This pileup will contain a list, in order of the reads, of the piled bases at + * reads[i] for all i in offsets. Does not make a copy of the data, so it's not safe to + * go changing the reads. + * + * @param loc The genome loc to associate reads wotj + * @param reads + * @param offsets + */ public ReadBackedPileupImpl(GenomeLoc loc, List reads, List offsets) { - super(loc, reads, offsets); + this.loc = loc; + this.pileupElementTracker = readsOffsets2Pileup(reads, offsets); } - public ReadBackedPileupImpl(GenomeLoc loc, List reads, int offset) { - super(loc, reads, offset); + + /** + * Create a new version of a read backed pileup at loc without any aligned reads + */ + public ReadBackedPileupImpl(GenomeLoc loc) { + this(loc, new UnifiedPileupElementTracker()); } - public ReadBackedPileupImpl(GenomeLoc loc, List pileupElements) { - super(loc, pileupElements); - } + /** + * Create a new version of a read backed pileup at loc, using the reads and their corresponding + * offsets. This lower level constructure assumes pileup is well-formed and merely keeps a + * pointer to pileup. Don't go changing the data in pileup. + */ + public ReadBackedPileupImpl(GenomeLoc loc, List pileup) { + if (loc == null) throw new ReviewedStingException("Illegal null genomeloc in ReadBackedPileup"); + if (pileup == null) throw new ReviewedStingException("Illegal null pileup in ReadBackedPileup"); - public ReadBackedPileupImpl(GenomeLoc loc, Map pileupElementsBySample) { - super(loc, pileupElementsBySample); + this.loc = loc; + this.pileupElementTracker = new UnifiedPileupElementTracker(pileup); } /** @@ -58,32 +90,954 @@ public class ReadBackedPileupImpl extends AbstractReadBackedPileup pileup, int size, int nDeletions, int nMQ0Reads) { - super(loc, pileup, size, nDeletions, nMQ0Reads); + this(loc, pileup); } protected ReadBackedPileupImpl(GenomeLoc loc, PileupElementTracker tracker) { - super(loc, tracker); + this.loc = loc; + this.pileupElementTracker = tracker; + } + + public ReadBackedPileupImpl(GenomeLoc loc, Map pileupsBySample) { + this.loc = loc; + PerSamplePileupElementTracker tracker = new PerSamplePileupElementTracker(); + for (Map.Entry pileupEntry : pileupsBySample.entrySet()) { + tracker.addElements(pileupEntry.getKey(), pileupEntry.getValue().pileupElementTracker); + } + this.pileupElementTracker = tracker; + } + + public ReadBackedPileupImpl(GenomeLoc loc, List reads, int offset) { + this.loc = loc; + this.pileupElementTracker = readsOffsets2Pileup(reads, offset); + } + + /** + * Helper routine for converting reads and offset lists to a PileupElement list. + * + * @param reads + * @param offsets + * @return + */ + private PileupElementTracker readsOffsets2Pileup(List reads, List offsets) { + if (reads == null) throw new ReviewedStingException("Illegal null read list in UnifiedReadBackedPileup"); + if (offsets == null) throw new ReviewedStingException("Illegal null offsets list in UnifiedReadBackedPileup"); + if (reads.size() != offsets.size()) + throw new ReviewedStingException("Reads and offset lists have different sizes!"); + + UnifiedPileupElementTracker pileup = new UnifiedPileupElementTracker(); + for (int i = 0; i < reads.size(); i++) { + GATKSAMRecord read = reads.get(i); + int offset = offsets.get(i); + pileup.add(createNewPileupElement(read, offset)); // only used to create fake pileups for testing so ancillary information is not important + } + + return pileup; + } + + /** + * Helper routine for converting reads and a single offset to a PileupElement list. + * + * @param reads + * @param offset + * @return + */ + private PileupElementTracker readsOffsets2Pileup(List reads, int offset) { + if (reads == null) throw new ReviewedStingException("Illegal null read list in UnifiedReadBackedPileup"); + if (offset < 0) throw new ReviewedStingException("Illegal offset < 0 UnifiedReadBackedPileup"); + + UnifiedPileupElementTracker pileup = new UnifiedPileupElementTracker(); + for (GATKSAMRecord read : reads) { + pileup.add(createNewPileupElement(read, offset)); // only used to create fake pileups for testing so ancillary information is not important + } + + return pileup; } - @Override protected ReadBackedPileupImpl createNewPileup(GenomeLoc loc, PileupElementTracker tracker) { return new ReadBackedPileupImpl(loc, tracker); } + protected PileupElement createNewPileupElement(GATKSAMRecord read, int offset) { + return LocusIteratorByState.createPileupForReadAndOffset(read, offset); + } + + // -------------------------------------------------------- + // + // Special 'constructors' + // + // -------------------------------------------------------- + + /** + * Returns a new ReadBackedPileup that is free of deletion spanning reads in this pileup. Note that this + * does not copy the data, so both ReadBackedPileups should not be changed. Doesn't make an unnecessary copy + * of the pileup (just returns this) if there are no deletions in the pileup. + * + * @return + */ @Override - protected PileupElement createNewPileupElement(final GATKSAMRecord read, final int offset, final boolean isDeletion, final boolean isBeforeDeletion, final boolean isAfterDeletion, final boolean isBeforeInsertion, final boolean isAfterInsertion, final boolean isNextToSoftClip) { - return new PileupElement(read, offset, isDeletion, isBeforeDeletion, isAfterDeletion, isBeforeInsertion, isAfterInsertion, isNextToSoftClip, null, 0); + public ReadBackedPileupImpl getPileupWithoutDeletions() { + if (getNumberOfDeletions() > 0) { + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); + + for (final String sample : tracker.getSamples()) { + PileupElementTracker perSampleElements = tracker.getElements(sample); + ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupWithoutDeletions(); + filteredTracker.addElements(sample, pileup.pileupElementTracker); + } + return createNewPileup(loc, filteredTracker); + + } else { + UnifiedPileupElementTracker tracker = (UnifiedPileupElementTracker) pileupElementTracker; + UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); + + for (PileupElement p : tracker) { + if (!p.isDeletion()) { + filteredTracker.add(p); + } + } + return createNewPileup(loc, filteredTracker); + } + } else { + return this; + } } + /** + * Returns a new ReadBackedPileup where only one read from an overlapping read + * pair is retained. If the two reads in question disagree to their basecall, + * neither read is retained. If they agree on the base, the read with the higher + * base quality observation is retained + * + * @return the newly filtered pileup + */ @Override - protected PileupElement createNewPileupElement(final GATKSAMRecord read, final int offset, final boolean isDeletion, final boolean isBeforeDeletion, final boolean isAfterDeletion, final boolean isBeforeInsertion, final boolean isAfterInsertion, final boolean isNextToSoftClip, final String nextEventBases, final int nextEventLength ) { - return new PileupElement(read, offset, isDeletion, isBeforeDeletion, isAfterDeletion, isBeforeInsertion, isAfterInsertion, isNextToSoftClip, nextEventBases, nextEventLength); + public ReadBackedPileup getOverlappingFragmentFilteredPileup() { + return getOverlappingFragmentFilteredPileup(true, true); + } + + /** + * Returns a new ReadBackedPileup where only one read from an overlapping read + * pair is retained. If discardDiscordant and the two reads in question disagree to their basecall, + * neither read is retained. Otherwise, the read with the higher + * quality (base or mapping, depending on baseQualNotMapQual) observation is retained + * + * @return the newly filtered pileup + */ + @Override + public ReadBackedPileupImpl getOverlappingFragmentFilteredPileup(boolean discardDiscordant, boolean baseQualNotMapQual) { + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); + + for (final String sample : tracker.getSamples()) { + PileupElementTracker perSampleElements = tracker.getElements(sample); + ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getOverlappingFragmentFilteredPileup(discardDiscordant, baseQualNotMapQual); + filteredTracker.addElements(sample, pileup.pileupElementTracker); + } + return createNewPileup(loc, filteredTracker); + } else { + Map filteredPileup = new HashMap(); + + for (PileupElement p : pileupElementTracker) { + String readName = p.getRead().getReadName(); + + // if we've never seen this read before, life is good + if (!filteredPileup.containsKey(readName)) { + filteredPileup.put(readName, p); + } else { + PileupElement existing = filteredPileup.get(readName); + + // if the reads disagree at this position, throw them both out. Otherwise + // keep the element with the higher quality score + if (discardDiscordant && existing.getBase() != p.getBase()) { + filteredPileup.remove(readName); + } else { + if (baseQualNotMapQual) { + if (existing.getQual() < p.getQual()) + filteredPileup.put(readName, p); + } + else { + if (existing.getMappingQual() < p.getMappingQual()) + filteredPileup.put(readName, p); + } + } + } + } + + UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); + for (PileupElement filteredElement : filteredPileup.values()) + filteredTracker.add(filteredElement); + + return createNewPileup(loc, filteredTracker); + } } + /** + * Returns a new ReadBackedPileup that is free of mapping quality zero reads in this pileup. Note that this + * does not copy the data, so both ReadBackedPileups should not be changed. Doesn't make an unnecessary copy + * of the pileup (just returns this) if there are no MQ0 reads in the pileup. + * + * @return + */ + @Override + public ReadBackedPileupImpl getPileupWithoutMappingQualityZeroReads() { + if (getNumberOfMappingQualityZeroReads() > 0) { + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); + + for (final String sample : tracker.getSamples()) { + PileupElementTracker perSampleElements = tracker.getElements(sample); + ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupWithoutMappingQualityZeroReads(); + filteredTracker.addElements(sample, pileup.pileupElementTracker); + } + return createNewPileup(loc, filteredTracker); + + } else { + UnifiedPileupElementTracker tracker = (UnifiedPileupElementTracker) pileupElementTracker; + UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); + + for (PileupElement p : tracker) { + if (p.getRead().getMappingQuality() > 0) { + filteredTracker.add(p); + } + } + return createNewPileup(loc, filteredTracker); + } + } else { + return this; + } + } + + public ReadBackedPileupImpl getPositiveStrandPileup() { + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); + + for (final String sample : tracker.getSamples()) { + PileupElementTracker perSampleElements = tracker.getElements(sample); + ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPositiveStrandPileup(); + filteredTracker.addElements(sample, pileup.pileupElementTracker); + } + return createNewPileup(loc, filteredTracker); + } else { + UnifiedPileupElementTracker tracker = (UnifiedPileupElementTracker) pileupElementTracker; + UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); + + for (PileupElement p : tracker) { + if (!p.getRead().getReadNegativeStrandFlag()) { + filteredTracker.add(p); + } + } + return createNewPileup(loc, filteredTracker); + } + } + + /** + * Gets the pileup consisting of only reads on the negative strand. + * + * @return A read-backed pileup consisting only of reads on the negative strand. + */ + public ReadBackedPileupImpl getNegativeStrandPileup() { + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); + + for (final String sample : tracker.getSamples()) { + PileupElementTracker perSampleElements = tracker.getElements(sample); + ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getNegativeStrandPileup(); + filteredTracker.addElements(sample, pileup.pileupElementTracker); + } + return createNewPileup(loc, filteredTracker); + } else { + UnifiedPileupElementTracker tracker = (UnifiedPileupElementTracker) pileupElementTracker; + UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); + + for (PileupElement p : tracker) { + if (p.getRead().getReadNegativeStrandFlag()) { + filteredTracker.add(p); + } + } + return createNewPileup(loc, filteredTracker); + } + } + + /** + * Gets a pileup consisting of all those elements passed by a given filter. + * + * @param filter Filter to use when testing for elements. + * @return a pileup without the given filtered elements. + */ + public ReadBackedPileupImpl getFilteredPileup(PileupElementFilter filter) { + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); + + for (final String sample : tracker.getSamples()) { + PileupElementTracker perSampleElements = tracker.getElements(sample); + ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getFilteredPileup(filter); + filteredTracker.addElements(sample, pileup.pileupElementTracker); + } + + return createNewPileup(loc, filteredTracker); + } else { + UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); + + for (PileupElement p : pileupElementTracker) { + if (filter.allow(p)) + filteredTracker.add(p); + } + + return createNewPileup(loc, filteredTracker); + } + } + + /** + * Returns subset of this pileup that contains only bases with quality >= minBaseQ, coming from + * reads with mapping qualities >= minMapQ. This method allocates and returns a new instance of ReadBackedPileup. + * + * @param minBaseQ + * @param minMapQ + * @return + */ + @Override + public ReadBackedPileupImpl getBaseAndMappingFilteredPileup(int minBaseQ, int minMapQ) { + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); + + for (final String sample : tracker.getSamples()) { + PileupElementTracker perSampleElements = tracker.getElements(sample); + ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getBaseAndMappingFilteredPileup(minBaseQ, minMapQ); + filteredTracker.addElements(sample, pileup.pileupElementTracker); + } + + return createNewPileup(loc, filteredTracker); + } else { + UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); + + for (PileupElement p : pileupElementTracker) { + if (p.getRead().getMappingQuality() >= minMapQ && (p.isDeletion() || p.getQual() >= minBaseQ)) { + filteredTracker.add(p); + } + } + + return createNewPileup(loc, filteredTracker); + } + } + + /** + * Returns subset of this pileup that contains only bases with quality >= minBaseQ. + * This method allocates and returns a new instance of ReadBackedPileup. + * + * @param minBaseQ + * @return + */ + @Override + public ReadBackedPileup getBaseFilteredPileup(int minBaseQ) { + return getBaseAndMappingFilteredPileup(minBaseQ, -1); + } + + /** + * Returns subset of this pileup that contains only bases coming from reads with mapping quality >= minMapQ. + * This method allocates and returns a new instance of ReadBackedPileup. + * + * @param minMapQ + * @return + */ + @Override + public ReadBackedPileup getMappingFilteredPileup(int minMapQ) { + return getBaseAndMappingFilteredPileup(-1, minMapQ); + } + + /** + * Gets a list of the read groups represented in this pileup. + * + * @return + */ + @Override + public Collection getReadGroups() { + Set readGroups = new HashSet(); + for (PileupElement pileupElement : this) + readGroups.add(pileupElement.getRead().getReadGroup().getReadGroupId()); + return readGroups; + } + + /** + * Gets the pileup for a given read group. Horrendously inefficient at this point. + * + * @param targetReadGroupId Identifier for the read group. + * @return A read-backed pileup containing only the reads in the given read group. + */ + @Override + public ReadBackedPileupImpl getPileupForReadGroup(String targetReadGroupId) { + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); + + for (final String sample : tracker.getSamples()) { + PileupElementTracker perSampleElements = tracker.getElements(sample); + ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupForReadGroup(targetReadGroupId); + if (pileup != null) + filteredTracker.addElements(sample, pileup.pileupElementTracker); + } + return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null; + } else { + UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); + for (PileupElement p : pileupElementTracker) { + GATKSAMRecord read = p.getRead(); + if (targetReadGroupId != null) { + if (read.getReadGroup() != null && targetReadGroupId.equals(read.getReadGroup().getReadGroupId())) + filteredTracker.add(p); + } else { + if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null) + filteredTracker.add(p); + } + } + return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null; + } + } + + /** + * Gets the pileup for a set of read groups. Horrendously inefficient at this point. + * + * @param rgSet List of identifiers for the read groups. + * @return A read-backed pileup containing only the reads in the given read groups. + */ + @Override + public ReadBackedPileupImpl getPileupForReadGroups(final HashSet rgSet) { + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); + + for (final String sample : tracker.getSamples()) { + PileupElementTracker perSampleElements = tracker.getElements(sample); + ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupForReadGroups(rgSet); + if (pileup != null) + filteredTracker.addElements(sample, pileup.pileupElementTracker); + } + return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null; + } else { + UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); + for (PileupElement p : pileupElementTracker) { + GATKSAMRecord read = p.getRead(); + if (rgSet != null && !rgSet.isEmpty()) { + if (read.getReadGroup() != null && rgSet.contains(read.getReadGroup().getReadGroupId())) + filteredTracker.add(p); + } else { + if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null) + filteredTracker.add(p); + } + } + return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null; + } + } + + @Override + public ReadBackedPileupImpl getPileupForLane(String laneID) { + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); + + for (final String sample : tracker.getSamples()) { + PileupElementTracker perSampleElements = tracker.getElements(sample); + ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupForLane(laneID); + if (pileup != null) + filteredTracker.addElements(sample, pileup.pileupElementTracker); + } + return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null; + } else { + UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); + for (PileupElement p : pileupElementTracker) { + GATKSAMRecord read = p.getRead(); + if (laneID != null) { + if (read.getReadGroup() != null && + (read.getReadGroup().getReadGroupId().startsWith(laneID + ".")) || // lane is the same, but sample identifier is different + (read.getReadGroup().getReadGroupId().equals(laneID))) // in case there is no sample identifier, they have to be exactly the same + filteredTracker.add(p); + } else { + if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null) + filteredTracker.add(p); + } + } + return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null; + } + } + + public Collection getSamples() { + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + return new HashSet(tracker.getSamples()); + } else { + Collection sampleNames = new HashSet(); + for (PileupElement p : this) { + GATKSAMRecord read = p.getRead(); + String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null; + sampleNames.add(sampleName); + } + return sampleNames; + } + } + + /** + * Returns a pileup randomly downsampled to the desiredCoverage. + * + * TODO: delete this once the experimental downsampler stabilizes + * + * @param desiredCoverage + * @return + */ + @Override + public ReadBackedPileup getDownsampledPileup(int desiredCoverage) { + if (getNumberOfElements() <= desiredCoverage) + return this; + + // randomly choose numbers corresponding to positions in the reads list + TreeSet positions = new TreeSet(); + for (int i = 0; i < desiredCoverage; /* no update */) { + if (positions.add(GenomeAnalysisEngine.getRandomGenerator().nextInt(getNumberOfElements()))) + i++; + } + + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + PerSamplePileupElementTracker filteredTracker = new PerSamplePileupElementTracker(); + + + for (final String sample : tracker.getSamples()) { + PileupElementTracker perSampleElements = tracker.getElements(sample); + + int current = 0; + UnifiedPileupElementTracker filteredPileup = new UnifiedPileupElementTracker(); + for (PileupElement p : perSampleElements) { + if (positions.contains(current)) + filteredPileup.add(p); + current++; + + } + filteredTracker.addElements(sample, filteredPileup); + } + + return createNewPileup(loc, filteredTracker); + } else { + UnifiedPileupElementTracker tracker = (UnifiedPileupElementTracker) pileupElementTracker; + UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); + + Iterator positionIter = positions.iterator(); + + while (positionIter.hasNext()) { + int nextReadToKeep = (Integer) positionIter.next(); + filteredTracker.add(tracker.get(nextReadToKeep)); + } + + return createNewPileup(getLocation(), filteredTracker); + } + } + + @Override + public ReadBackedPileup getPileupForSamples(Collection sampleNames) { + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + PileupElementTracker filteredElements = tracker.getElements(sampleNames); + return filteredElements != null ? createNewPileup(loc, filteredElements) : null; + } else { + HashSet hashSampleNames = new HashSet(sampleNames); // to speed up the "contains" access in the for loop + UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); + for (PileupElement p : pileupElementTracker) { + GATKSAMRecord read = p.getRead(); + if (sampleNames != null) { // still checking on sampleNames because hashSampleNames will never be null. And empty means something else. + if (read.getReadGroup() != null && hashSampleNames.contains(read.getReadGroup().getSample())) + filteredTracker.add(p); + } else { + if (read.getReadGroup() == null || read.getReadGroup().getSample() == null) + filteredTracker.add(p); + } + } + return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null; + } + } + + @Override + public Map getPileupsForSamples(Collection sampleNames) { + Map result = new HashMap(); + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + for (String sample : sampleNames) { + PileupElementTracker filteredElements = tracker.getElements(sample); + if (filteredElements != null) + result.put(sample, createNewPileup(loc, filteredElements)); + } + } else { + Map> trackerMap = new HashMap>(); + + for (String sample : sampleNames) { // initialize pileups for each sample + UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); + trackerMap.put(sample, filteredTracker); + } + for (PileupElement p : pileupElementTracker) { // go through all pileup elements only once and add them to the respective sample's pileup + GATKSAMRecord read = p.getRead(); + if (read.getReadGroup() != null) { + String sample = read.getReadGroup().getSample(); + UnifiedPileupElementTracker tracker = trackerMap.get(sample); + if (tracker != null) // we only add the pileup the requested samples. Completely ignore the rest + tracker.add(p); + } + } + for (Map.Entry> entry : trackerMap.entrySet()) // create the ReadBackedPileup for each sample + result.put(entry.getKey(), createNewPileup(loc, entry.getValue())); + } + return result; + } + + + @Override + public ReadBackedPileup getPileupForSample(String sampleName) { + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + PileupElementTracker filteredElements = tracker.getElements(sampleName); + return filteredElements != null ? createNewPileup(loc, filteredElements) : null; + } else { + UnifiedPileupElementTracker filteredTracker = new UnifiedPileupElementTracker(); + for (PileupElement p : pileupElementTracker) { + GATKSAMRecord read = p.getRead(); + if (sampleName != null) { + if (read.getReadGroup() != null && sampleName.equals(read.getReadGroup().getSample())) + filteredTracker.add(p); + } else { + if (read.getReadGroup() == null || read.getReadGroup().getSample() == null) + filteredTracker.add(p); + } + } + return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null; + } + } + + // -------------------------------------------------------- + // + // iterators + // + // -------------------------------------------------------- + + /** + * The best way to access PileupElements where you only care about the bases and quals in the pileup. + *

+ * for (PileupElement p : this) { doSomething(p); } + *

+ * Provides efficient iteration of the data. + * + * @return + */ + @Override + public Iterator iterator() { + return new Iterator() { + private final Iterator wrappedIterator = pileupElementTracker.iterator(); + + public boolean hasNext() { + return wrappedIterator.hasNext(); + } + + public PileupElement next() { + return wrappedIterator.next(); + } + + public void remove() { + throw new UnsupportedOperationException("Cannot remove from a pileup element iterator"); + } + }; + } + + /** + * The best way to access PileupElements where you only care not only about bases and quals in the pileup + * but also need access to the index of the pileup element in the pile. + * + * for (ExtendedPileupElement p : this) { doSomething(p); } + * + * Provides efficient iteration of the data. + * + * @return + */ + + /** + * Simple useful routine to count the number of deletion bases in this pileup + * + * @return + */ + @Override + public int getNumberOfDeletions() { + if ( nDeletions == UNINITIALIZED_CACHED_INT_VALUE ) { + nDeletions = 0; + for (PileupElement p : pileupElementTracker.unorderedIterable() ) { + if (p.isDeletion()) { + nDeletions++; + } + } + } + return nDeletions; + } + + @Override + public int getNumberOfMappingQualityZeroReads() { + if ( nMQ0Reads == UNINITIALIZED_CACHED_INT_VALUE ) { + nMQ0Reads = 0; + + for (PileupElement p : pileupElementTracker.unorderedIterable()) { + if (p.getRead().getMappingQuality() == 0) { + nMQ0Reads++; + } + } + } + + return nMQ0Reads; + } + + /** + * @return the number of physical elements in this pileup + */ + @Override + public int getNumberOfElements() { + return pileupElementTracker.size(); + } + + /** + * @return the number of abstract elements in this pileup + */ + @Override + public int depthOfCoverage() { + if (depthOfCoverage == UNINITIALIZED_CACHED_INT_VALUE) { + depthOfCoverage = 0; + for (PileupElement p : pileupElementTracker.unorderedIterable()) { + depthOfCoverage += p.getRepresentativeCount(); + } + } + return depthOfCoverage; + } + + /** + * @return true if there are 0 elements in the pileup, false otherwise + */ + @Override + public boolean isEmpty() { + return getNumberOfElements() == 0; + } + + + /** + * @return the location of this pileup + */ + @Override + public GenomeLoc getLocation() { + return loc; + } + + /** + * Get counts of A, C, G, T in order, which returns a int[4] vector with counts according + * to BaseUtils.simpleBaseToBaseIndex for each base. + * + * @return + */ + @Override + public int[] getBaseCounts() { + int[] counts = new int[4]; + + // TODO -- can be optimized with .unorderedIterable() + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + for (final String sample : tracker.getSamples()) { + int[] countsBySample = createNewPileup(loc, tracker.getElements(sample)).getBaseCounts(); + for (int i = 0; i < counts.length; i++) + counts[i] += countsBySample[i]; + } + } else { + for (PileupElement pile : this) { + // skip deletion sites + if (!pile.isDeletion()) { + int index = BaseUtils.simpleBaseToBaseIndex((char) pile.getBase()); + if (index != -1) + counts[index]++; + } + } + } + + return counts; + } + + @Override + public String getPileupString(Character ref) { + // In the pileup format, each line represents a genomic position, consisting of chromosome name, + // coordinate, reference base, read bases, read qualities and alignment mapping qualities. + return String.format("%s %s %c %s %s", + getLocation().getContig(), getLocation().getStart(), // chromosome name and coordinate + ref, // reference base + new String(getBases()), + getQualsString()); + } + + // -------------------------------------------------------- + // + // Convenience functions that may be slow + // + // -------------------------------------------------------- + + /** + * Returns a list of the reads in this pileup. Note this call costs O(n) and allocates fresh lists each time + * + * @return + */ + @Override + public List getReads() { + List reads = new ArrayList(getNumberOfElements()); + for (PileupElement pile : this) { + reads.add(pile.getRead()); + } + return reads; + } + + @Override + public int getNumberOfDeletionsAfterThisElement() { + int count = 0; + for (PileupElement p : pileupElementTracker.unorderedIterable()) { + if (p.isBeforeDeletionStart()) + count++; + } + return count; + } + + @Override + public int getNumberOfInsertionsAfterThisElement() { + int count = 0; + for (PileupElement p : pileupElementTracker.unorderedIterable()) { + if (p.isBeforeInsertion()) + count++; + } + return count; + + } + /** + * Returns a list of the offsets in this pileup. Note this call costs O(n) and allocates fresh lists each time + * + * @return + */ + @Override + public List getOffsets() { + List offsets = new ArrayList(getNumberOfElements()); + for (PileupElement pile : pileupElementTracker.unorderedIterable()) { + offsets.add(pile.getOffset()); + } + return offsets; + } + + /** + * Returns an array of the bases in this pileup. Note this call costs O(n) and allocates fresh array each time + * + * @return + */ + @Override + public byte[] getBases() { + byte[] v = new byte[getNumberOfElements()]; + int pos = 0; + for (PileupElement pile : pileupElementTracker) { + v[pos++] = pile.getBase(); + } + return v; + } + + /** + * Returns an array of the quals in this pileup. Note this call costs O(n) and allocates fresh array each time + * + * @return + */ + @Override + public byte[] getQuals() { + byte[] v = new byte[getNumberOfElements()]; + int pos = 0; + for (PileupElement pile : pileupElementTracker) { + v[pos++] = pile.getQual(); + } + return v; + } + + /** + * Get an array of the mapping qualities + * + * @return + */ + @Override + public byte[] getMappingQuals() { + byte[] v = new byte[getNumberOfElements()]; + int pos = 0; + for (PileupElement pile : pileupElementTracker) { + v[pos++] = (byte) pile.getRead().getMappingQuality(); + } + return v; + } + + static String quals2String(byte[] quals) { + StringBuilder qualStr = new StringBuilder(); + for (int qual : quals) { + qual = Math.min(qual, 63); // todo: fixme, this isn't a good idea + char qualChar = (char) (33 + qual); // todo: warning, this is illegal for qual > 63 + qualStr.append(qualChar); + } + + return qualStr.toString(); + } + + private String getQualsString() { + return quals2String(getQuals()); + } + + /** + * Returns a new ReadBackedPileup that is sorted by start coordinate of the reads. + * + * @return + */ + @Override + public ReadBackedPileup getStartSortedPileup() { + + final TreeSet sortedElements = new TreeSet(new Comparator() { + @Override + public int compare(PileupElement element1, PileupElement element2) { + final int difference = element1.getRead().getAlignmentStart() - element2.getRead().getAlignmentStart(); + return difference != 0 ? difference : element1.getRead().getReadName().compareTo(element2.getRead().getReadName()); + } + }); + + if (pileupElementTracker instanceof PerSamplePileupElementTracker) { + PerSamplePileupElementTracker tracker = (PerSamplePileupElementTracker) pileupElementTracker; + + for (final String sample : tracker.getSamples()) { + PileupElementTracker perSampleElements = tracker.getElements(sample); + for (PileupElement pile : perSampleElements) + sortedElements.add(pile); + } + } + else { + UnifiedPileupElementTracker tracker = (UnifiedPileupElementTracker) pileupElementTracker; + for (PileupElement pile : tracker) + sortedElements.add(pile); + } + + UnifiedPileupElementTracker sortedTracker = new UnifiedPileupElementTracker(); + for (PileupElement pile : sortedElements) + sortedTracker.add(pile); + + return createNewPileup(loc, sortedTracker); + } + + @Override + public FragmentCollection toFragments() { + return FragmentUtils.create(this); + } + + @Override + public ReadBackedPileup copy() { + return new ReadBackedPileupImpl(loc, pileupElementTracker.copy()); + } } diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java index ca48b7327..b7a813ec2 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java @@ -31,7 +31,6 @@ import net.sf.samtools.CigarOperator; import net.sf.samtools.SAMRecord; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.variant.utils.BaseUtils; -import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.pileup.PileupElement; import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; @@ -297,7 +296,7 @@ public class AlignmentUtils { } public static int calcAlignmentByteArrayOffset(final Cigar cigar, final PileupElement pileupElement, final int alignmentStart, final int refLocus) { - return calcAlignmentByteArrayOffset( cigar, pileupElement.getOffset(), pileupElement.isInsertionAtBeginningOfRead(), pileupElement.isDeletion(), alignmentStart, refLocus ); + return calcAlignmentByteArrayOffset( cigar, pileupElement.getOffset(), false, pileupElement.isDeletion(), alignmentStart, refLocus ); } public static int calcAlignmentByteArrayOffset(final Cigar cigar, final int offset, final boolean isInsertionAtBeginningOfRead, final boolean isDeletion, final int alignmentStart, final int refLocus) { @@ -402,13 +401,13 @@ public class AlignmentUtils { switch (ce.getOperator()) { case I: if (alignPos > 0) { - if (alignment[alignPos - 1] == BaseUtils.A) { + if (alignment[alignPos - 1] == BaseUtils.Base.A.base) { alignment[alignPos - 1] = PileupElement.A_FOLLOWED_BY_INSERTION_BASE; - } else if (alignment[alignPos - 1] == BaseUtils.C) { + } else if (alignment[alignPos - 1] == BaseUtils.Base.C.base) { alignment[alignPos - 1] = PileupElement.C_FOLLOWED_BY_INSERTION_BASE; - } else if (alignment[alignPos - 1] == BaseUtils.T) { + } else if (alignment[alignPos - 1] == BaseUtils.Base.T.base) { alignment[alignPos - 1] = PileupElement.T_FOLLOWED_BY_INSERTION_BASE; - } else if (alignment[alignPos - 1] == BaseUtils.G) { + } else if (alignment[alignPos - 1] == BaseUtils.Base.G.base) { alignment[alignPos - 1] = PileupElement.G_FOLLOWED_BY_INSERTION_BASE; } } diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java new file mode 100644 index 000000000..ab539c9dc --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java @@ -0,0 +1,225 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.sam; + +import net.sf.picard.reference.IndexedFastaSequenceFile; +import net.sf.samtools.*; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.NGSPlatform; + +import java.io.File; +import java.io.IOException; +import java.util.*; + +/** + * Easy to use creator of artificial BAM files for testing + * + * Allows us to make a stream of reads or an index BAM file with read having the following properties + * + * - coming from n samples + * - of fixed read length and aligned to the genome with M operator + * - having N reads per alignment start + * - skipping N bases between each alignment start + * - starting at a given alignment start + * + * User: depristo + * Date: 1/15/13 + * Time: 9:22 AM + */ +public class ArtificialBAMBuilder { + public final static int BAM_SHARD_SIZE = 16384; + + private final IndexedFastaSequenceFile reference; + private final GenomeLocParser parser; + + final int nReadsPerLocus; + final int nLoci; + + int skipNLoci = 0; + int alignmentStart = 1; + int readLength = 10; + private final ArrayList samples = new ArrayList(); + + private LinkedList additionalReads = new LinkedList(); + + final SAMFileWriterFactory factory = new SAMFileWriterFactory(); + { + factory.setCreateIndex(true); + } + + SAMFileHeader header; + + public ArtificialBAMBuilder(final IndexedFastaSequenceFile reference, int nReadsPerLocus, int nLoci) { + this.nReadsPerLocus = nReadsPerLocus; + this.nLoci = nLoci; + + this.reference = reference; + this.parser = new GenomeLocParser(reference); + createAndSetHeader(1); + } + + public ArtificialBAMBuilder(int nReadsPerLocus, int nLoci) { + this(ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000).getSequenceDictionary(), nReadsPerLocus, nLoci); + } + + public ArtificialBAMBuilder(final SAMSequenceDictionary dict, int nReadsPerLocus, int nLoci) { + this.nReadsPerLocus = nReadsPerLocus; + this.nLoci = nLoci; + this.reference = null; + this.parser = new GenomeLocParser(dict); + createAndSetHeader(1); + } + + public IndexedFastaSequenceFile getReference() { + return reference; + } + + public GenomeLocParser getGenomeLocParser() { + return parser; + } + + public ArtificialBAMBuilder createAndSetHeader(final int nSamples) { + this.header = new SAMFileHeader(); + header.setSortOrder(SAMFileHeader.SortOrder.coordinate); + header.setSequenceDictionary(parser.getContigs()); + samples.clear(); + + for ( int i = 0; i < nSamples; i++ ) { + final GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord("rg" + i); + final String sample = "sample" + i; + samples.add(sample); + rg.setSample(sample); + rg.setPlatform(NGSPlatform.ILLUMINA.getDefaultPlatform()); + header.addReadGroup(rg); + } + + return this; + } + + public void addReads(final GATKSAMRecord readToAdd) { + additionalReads.add(readToAdd); + } + + public void addReads(final Collection readsToAdd) { + additionalReads.addAll(readsToAdd); + } + + public List getSamples() { + return samples; + } + + /** + * Create a read stream based on the parameters. The cigar string for each + * read will be *M, where * is the length of the read. + * + * Useful for testing things like LocusIteratorBystate + * + * @return a ordered list of reads + */ + public List makeReads() { + final String baseName = "read"; + List reads = new ArrayList(nReadsPerLocus*nLoci); + for ( int locusI = 0; locusI < nLoci; locusI++) { + final int locus = locusI * (skipNLoci + 1); + for ( int readI = 0; readI < nReadsPerLocus; readI++ ) { + for ( final SAMReadGroupRecord rg : header.getReadGroups() ) { + final String readName = String.format("%s.%d.%d.%s", baseName, locus, readI, rg.getId()); + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, readName, 0, alignmentStart + locus, readLength); + read.setReadGroup(new GATKSAMReadGroupRecord(rg)); + reads.add(read); + } + } + } + + if ( ! additionalReads.isEmpty() ) { + reads.addAll(additionalReads); + Collections.sort(reads, new SAMRecordCoordinateComparator()); + } + + return reads; + } + + /** + * Make an indexed BAM file contains the reads in the builder, marking it for deleteOnExit() + * @return the BAM file + */ + public File makeTemporarilyBAMFile() { + try { + final File file = File.createTempFile("tempBAM", ".bam"); + file.deleteOnExit(); + return makeBAMFile(file); + } catch ( IOException e ) { + throw new RuntimeException(e); + } + } + + /** + * Write the reads from this builder to output, creating an index as well + * @param output the output BAM file we want to use + * @return + */ + public File makeBAMFile(final File output) { + final SAMFileWriter writer = factory.makeBAMWriter(header, true, output, 0); + for ( final GATKSAMRecord read : makeReads() ) + writer.addAlignment(read); + writer.close(); + return output; + } + + public int getnReadsPerLocus() { return nReadsPerLocus; } + public int getnLoci() { return nLoci; } + public int getSkipNLoci() { return skipNLoci; } + public ArtificialBAMBuilder setSkipNLoci(int skipNLoci) { this.skipNLoci = skipNLoci; return this; } + public int getAlignmentStart() { return alignmentStart; } + public ArtificialBAMBuilder setAlignmentStart(int alignmentStart) { this.alignmentStart = alignmentStart; return this; } + public int getReadLength() { return readLength; } + public ArtificialBAMBuilder setReadLength(int readLength) { this.readLength = readLength; return this; } + public SAMFileHeader getHeader() { return header; } + public ArtificialBAMBuilder setHeader(SAMFileHeader header) { this.header = header; return this; } + + public int getAlignmentEnd() { + return alignmentStart + nLoci * (skipNLoci + 1) + readLength; + } + + + public int getNSamples() { return samples.size(); } + + public int expectedNumberOfReads() { + return nLoci * nReadsPerLocus * header.getReadGroups().size(); + } + + @Override + public String toString() { + return "ArtificialBAMBuilder{" + + "samples=" + samples + + ", readLength=" + readLength + + ", alignmentStart=" + alignmentStart + + ", skipNLoci=" + skipNLoci + + ", nLoci=" + nLoci + + ", nReadsPerLocus=" + nReadsPerLocus + + '}'; + } +} diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java index c6d5fc0d4..0f5d6a2f7 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java @@ -30,6 +30,7 @@ import org.broadinstitute.sting.gatk.iterators.StingSAMIterator; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; +import org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState; import org.broadinstitute.sting.utils.pileup.PileupElement; import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; import org.broadinstitute.sting.utils.pileup.ReadBackedPileupImpl; @@ -449,10 +450,10 @@ public class ArtificialSAMUtils { final GATKSAMRecord left = pair.get(0); final GATKSAMRecord right = pair.get(1); - pileupElements.add(new PileupElement(left, pos - leftStart, false, false, false, false, false, false)); + pileupElements.add(LocusIteratorByState.createPileupForReadAndOffset(left, pos - leftStart)); if (pos >= right.getAlignmentStart() && pos <= right.getAlignmentEnd()) { - pileupElements.add(new PileupElement(right, pos - rightStart, false, false, false, false, false, false)); + pileupElements.add(LocusIteratorByState.createPileupForReadAndOffset(right, pos - rightStart)); } } diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java index b61628d4d..1488f7269 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java @@ -169,8 +169,8 @@ public class ReadUtils { * @return whether or not the base is in the adaptor */ public static boolean isBaseInsideAdaptor(final GATKSAMRecord read, long basePos) { - Integer adaptorBoundary = getAdaptorBoundary(read); - if (adaptorBoundary == null || read.getInferredInsertSize() > DEFAULT_ADAPTOR_SIZE) + final int adaptorBoundary = getAdaptorBoundary(read); + if (adaptorBoundary == CANNOT_COMPUTE_ADAPTOR_BOUNDARY || read.getInferredInsertSize() > DEFAULT_ADAPTOR_SIZE) return false; return read.getReadNegativeStrandFlag() ? basePos <= adaptorBoundary : basePos >= adaptorBoundary; @@ -199,26 +199,28 @@ public class ReadUtils { * in these cases the adaptor boundary is at the start of the read plus the inferred insert size (plus one) * * @param read the read being tested for the adaptor boundary - * @return the reference coordinate for the adaptor boundary (effectively the first base IN the adaptor, closest to the read. NULL if the read is unmapped or the mate is mapped to another contig. + * @return the reference coordinate for the adaptor boundary (effectively the first base IN the adaptor, closest to the read. + * CANNOT_COMPUTE_ADAPTOR_BOUNDARY if the read is unmapped or the mate is mapped to another contig. */ - public static Integer getAdaptorBoundary(final SAMRecord read) { + public static int getAdaptorBoundary(final SAMRecord read) { final int MAXIMUM_ADAPTOR_LENGTH = 8; final int insertSize = Math.abs(read.getInferredInsertSize()); // the inferred insert size can be negative if the mate is mapped before the read (so we take the absolute value) if (insertSize == 0 || read.getReadUnmappedFlag()) // no adaptors in reads with mates in another chromosome or unmapped pairs - return null; + return CANNOT_COMPUTE_ADAPTOR_BOUNDARY; - Integer adaptorBoundary; // the reference coordinate for the adaptor boundary (effectively the first base IN the adaptor, closest to the read) + int adaptorBoundary; // the reference coordinate for the adaptor boundary (effectively the first base IN the adaptor, closest to the read) if (read.getReadNegativeStrandFlag()) adaptorBoundary = read.getMateAlignmentStart() - 1; // case 1 (see header) else adaptorBoundary = read.getAlignmentStart() + insertSize + 1; // case 2 (see header) if ( (adaptorBoundary < read.getAlignmentStart() - MAXIMUM_ADAPTOR_LENGTH) || (adaptorBoundary > read.getAlignmentEnd() + MAXIMUM_ADAPTOR_LENGTH) ) - adaptorBoundary = null; // we are being conservative by not allowing the adaptor boundary to go beyond what we belive is the maximum size of an adaptor + adaptorBoundary = CANNOT_COMPUTE_ADAPTOR_BOUNDARY; // we are being conservative by not allowing the adaptor boundary to go beyond what we belive is the maximum size of an adaptor return adaptorBoundary; } + public static int CANNOT_COMPUTE_ADAPTOR_BOUNDARY = Integer.MIN_VALUE; /** * is the read a 454 read? @@ -392,6 +394,11 @@ public class ReadUtils { return getReadCoordinateForReferenceCoordinate(read.getSoftStart(), read.getCigar(), refCoord, tail, false); } + public static int getReadCoordinateForReferenceCoordinateUpToEndOfRead(GATKSAMRecord read, int refCoord, ClippingTail tail) { + final int leftmostSafeVariantPosition = Math.max(read.getSoftStart(), refCoord); + return getReadCoordinateForReferenceCoordinate(read.getSoftStart(), read.getCigar(), leftmostSafeVariantPosition, tail, false); + } + public static int getReadCoordinateForReferenceCoordinate(final int alignmentStart, final Cigar cigar, final int refCoord, final ClippingTail tail, final boolean allowGoalNotReached) { Pair result = getReadCoordinateForReferenceCoordinate(alignmentStart, cigar, refCoord, allowGoalNotReached); int readCoord = result.getFirst(); diff --git a/public/java/src/org/broadinstitute/variant/utils/BaseUtils.java b/public/java/src/org/broadinstitute/variant/utils/BaseUtils.java index 819041a3e..a6ac2ca53 100644 --- a/public/java/src/org/broadinstitute/variant/utils/BaseUtils.java +++ b/public/java/src/org/broadinstitute/variant/utils/BaseUtils.java @@ -26,6 +26,7 @@ package org.broadinstitute.variant.utils; import net.sf.samtools.util.StringUtil; +import org.broadinstitute.sting.utils.exceptions.UserException; import java.util.Arrays; import java.util.Random; @@ -34,42 +35,66 @@ import java.util.Random; * BaseUtils contains some basic utilities for manipulating nucleotides. */ public class BaseUtils { - public final static byte A = (byte) 'A'; - public final static byte C = (byte) 'C'; - public final static byte G = (byte) 'G'; - public final static byte T = (byte) 'T'; - public final static byte N = (byte) 'N'; - public final static byte D = (byte) 'D'; + public enum Base { + A ((byte)'A'), + C ((byte)'C'), + G ((byte)'G'), + T ((byte)'T'), + N ((byte)'N'), + D ((byte)'D'); - // - // todo -- we need a generalized base abstraction using the Base enum. - // + public byte base; + + private Base(final byte base) { + this.base = base; + } + } + + // todo -- add this to the generalized base abstraction using the Base enum. public final static byte[] BASES = {'A', 'C', 'G', 'T'}; public final static byte[] EXTENDED_BASES = {'A', 'C', 'G', 'T', 'N', 'D'}; static private final int[] baseIndexMap = new int[256]; static { Arrays.fill(baseIndexMap, -1); - baseIndexMap['A'] = 0; - baseIndexMap['a'] = 0; - baseIndexMap['*'] = 0; // the wildcard character counts as an A - baseIndexMap['C'] = 1; - baseIndexMap['c'] = 1; - baseIndexMap['G'] = 2; - baseIndexMap['g'] = 2; - baseIndexMap['T'] = 3; - baseIndexMap['t'] = 3; + baseIndexMap['A'] = Base.A.ordinal(); + baseIndexMap['a'] = Base.A.ordinal(); + baseIndexMap['*'] = Base.A.ordinal(); // the wildcard character counts as an A + baseIndexMap['C'] = Base.C.ordinal(); + baseIndexMap['c'] = Base.C.ordinal(); + baseIndexMap['G'] = Base.G.ordinal(); + baseIndexMap['g'] = Base.G.ordinal(); + baseIndexMap['T'] = Base.T.ordinal(); + baseIndexMap['t'] = Base.T.ordinal(); } - // todo -- fix me (enums?) - public static final byte DELETION_INDEX = 4; - public static final byte NO_CALL_INDEX = 5; // (this is 'N') - - public static final int aIndex = BaseUtils.simpleBaseToBaseIndex((byte) 'A'); - public static final int cIndex = BaseUtils.simpleBaseToBaseIndex((byte) 'C'); - public static final int gIndex = BaseUtils.simpleBaseToBaseIndex((byte) 'G'); - public static final int tIndex = BaseUtils.simpleBaseToBaseIndex((byte) 'T'); + static private final int[] baseIndexWithIupacMap = baseIndexMap.clone(); + static { + baseIndexWithIupacMap['*'] = -1; // the wildcard character is bad + baseIndexWithIupacMap['N'] = Base.N.ordinal(); + baseIndexWithIupacMap['n'] = Base.N.ordinal(); + baseIndexWithIupacMap['R'] = Base.N.ordinal(); + baseIndexWithIupacMap['r'] = Base.N.ordinal(); + baseIndexWithIupacMap['Y'] = Base.N.ordinal(); + baseIndexWithIupacMap['y'] = Base.N.ordinal(); + baseIndexWithIupacMap['M'] = Base.N.ordinal(); + baseIndexWithIupacMap['m'] = Base.N.ordinal(); + baseIndexWithIupacMap['K'] = Base.N.ordinal(); + baseIndexWithIupacMap['k'] = Base.N.ordinal(); + baseIndexWithIupacMap['W'] = Base.N.ordinal(); + baseIndexWithIupacMap['w'] = Base.N.ordinal(); + baseIndexWithIupacMap['S'] = Base.N.ordinal(); + baseIndexWithIupacMap['s'] = Base.N.ordinal(); + baseIndexWithIupacMap['B'] = Base.N.ordinal(); + baseIndexWithIupacMap['b'] = Base.N.ordinal(); + baseIndexWithIupacMap['D'] = Base.N.ordinal(); + baseIndexWithIupacMap['d'] = Base.N.ordinal(); + baseIndexWithIupacMap['H'] = Base.N.ordinal(); + baseIndexWithIupacMap['h'] = Base.N.ordinal(); + baseIndexWithIupacMap['V'] = Base.N.ordinal(); + baseIndexWithIupacMap['v'] = Base.N.ordinal(); + } // Use a fixed random seed to allow for deterministic results when using random bases private static final Random randomNumberGen = new Random(47382911L); @@ -96,10 +121,10 @@ public class BaseUtils { } public static boolean isTransition(byte base1, byte base2) { - int b1 = simpleBaseToBaseIndex(base1); - int b2 = simpleBaseToBaseIndex(base2); - return b1 == 0 && b2 == 2 || b1 == 2 && b2 == 0 || - b1 == 1 && b2 == 3 || b1 == 3 && b2 == 1; + final int b1 = simpleBaseToBaseIndex(base1); + final int b2 = simpleBaseToBaseIndex(base2); + return b1 == Base.A.ordinal() && b2 == Base.G.ordinal() || b1 == Base.G.ordinal() && b2 == Base.A.ordinal() || + b1 == Base.C.ordinal() && b2 == Base.T.ordinal() || b1 == Base.T.ordinal() && b2 == Base.C.ordinal(); } public static boolean isTransversion(byte base1, byte base2) { @@ -141,6 +166,21 @@ public class BaseUtils { return base >= 'A' && base <= 'Z'; } + public static byte[] convertIUPACtoN(final byte[] bases, final boolean errorOnBadReferenceBase, final boolean ignoreConversionOfFirstByte) { + final int length = bases.length; + final int start = ignoreConversionOfFirstByte ? 1 : 0; + + for ( int i = start; i < length; i++ ) { + final int baseIndex = baseIndexWithIupacMap[bases[i]]; + if ( baseIndex == Base.N.ordinal() ) { + bases[i] = 'N'; + } else if ( errorOnBadReferenceBase && baseIndex == -1 ) { + throw new UserException.BadInput("We encountered a non-standard non-IUPAC base in the provided reference: '" + bases[i] + "'"); + } + } + return bases; + } + /** * Converts a IUPAC nucleotide code to a pair of bases * @@ -231,10 +271,10 @@ public class BaseUtils { switch (base) { case 'd': case 'D': - return DELETION_INDEX; + return Base.D.ordinal(); case 'n': case 'N': - return NO_CALL_INDEX; + return Base.N.ordinal(); default: return simpleBaseToBaseIndex(base); diff --git a/public/java/src/org/broadinstitute/variant/variantcontext/Allele.java b/public/java/src/org/broadinstitute/variant/variantcontext/Allele.java index 33bca1a8a..0a0b4d0b7 100644 --- a/public/java/src/org/broadinstitute/variant/variantcontext/Allele.java +++ b/public/java/src/org/broadinstitute/variant/variantcontext/Allele.java @@ -111,7 +111,7 @@ public class Allele implements Comparable { /** A generic static NO_CALL allele for use */ // no public way to create an allele - private Allele(byte[] bases, boolean isRef) { + protected Allele(byte[] bases, boolean isRef) { // null alleles are no longer allowed if ( wouldBeNullAllele(bases) ) { throw new IllegalArgumentException("Null alleles are not supported"); @@ -140,7 +140,7 @@ public class Allele implements Comparable { throw new IllegalArgumentException("Unexpected base in allele bases \'" + new String(bases)+"\'"); } - private Allele(String bases, boolean isRef) { + protected Allele(String bases, boolean isRef) { this(bases.getBytes(), isRef); } diff --git a/public/java/src/org/broadinstitute/variant/vcf/VCFHeader.java b/public/java/src/org/broadinstitute/variant/vcf/VCFHeader.java index 583a01417..9bdb86a48 100644 --- a/public/java/src/org/broadinstitute/variant/vcf/VCFHeader.java +++ b/public/java/src/org/broadinstitute/variant/vcf/VCFHeader.java @@ -73,6 +73,10 @@ public class VCFHeader { public static final String REFERENCE_KEY = "reference"; public static final String CONTIG_KEY = "contig"; public static final String INTERVALS_KEY = "intervals"; + public static final String EXCLUDE_INTERVALS_KEY = "excludeIntervals"; + public static final String INTERVAL_MERGING_KEY = "interval_merging"; + public static final String INTERVAL_SET_RULE_KEY = "interval_set_rule"; + public static final String INTERVAL_PADDING_KEY = "interval_padding"; // were the input samples sorted originally (or are we sorting them)? private boolean samplesWereAlreadySorted = true; diff --git a/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java b/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java index 6f7a6391c..d960177d9 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java +++ b/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java @@ -26,23 +26,8 @@ package org.broadinstitute.sting.gatk.datasources.reads; import com.google.caliper.Param; -import net.sf.picard.filter.FilteringIterator; -import net.sf.samtools.SAMFileHeader; -import net.sf.samtools.SAMFileReader; -import net.sf.samtools.SAMRecord; -import org.broadinstitute.sting.commandline.Tags; import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod; -import org.broadinstitute.sting.gatk.ReadProperties; -import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; -import org.broadinstitute.sting.gatk.filters.ReadFilter; -import org.broadinstitute.sting.gatk.filters.UnmappedReadFilter; -import org.broadinstitute.sting.gatk.iterators.LegacyLocusIteratorByState; -import org.broadinstitute.sting.gatk.iterators.ReadTransformer; import org.broadinstitute.sting.gatk.walkers.qc.CountLoci; -import org.broadinstitute.sting.utils.GenomeLocParser; - -import java.util.Collections; -import java.util.Iterator; /** * Created by IntelliJ IDEA. @@ -67,31 +52,32 @@ public class DownsamplerBenchmark extends ReadProcessingBenchmark { @Param private Downsampling downsampling; - public void timeDownsampling(int reps) { - for(int i = 0; i < reps; i++) { - SAMFileReader reader = new SAMFileReader(inputFile); - ReadProperties readProperties = new ReadProperties(Collections.singletonList(new SAMReaderID(inputFile,new Tags())), - reader.getFileHeader(), - SAMFileHeader.SortOrder.coordinate, - false, - SAMFileReader.ValidationStringency.SILENT, - downsampling.create(), - new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALL)), - Collections.emptyList(), - Collections.emptyList(), - false, - (byte)0); - - GenomeLocParser genomeLocParser = new GenomeLocParser(reader.getFileHeader().getSequenceDictionary()); - // Filter unmapped reads. TODO: is this always strictly necessary? Who in the GATK normally filters these out? - Iterator readIterator = new FilteringIterator(reader.iterator(),new UnmappedReadFilter()); - LegacyLocusIteratorByState locusIteratorByState = new LegacyLocusIteratorByState(readIterator,readProperties,genomeLocParser, LegacyLocusIteratorByState.sampleListForSAMWithoutReadGroups()); - while(locusIteratorByState.hasNext()) { - locusIteratorByState.next().getLocation(); - } - reader.close(); - } - } +// public void timeDownsampling(int reps) { +// for(int i = 0; i < reps; i++) { +// SAMFileReader reader = new SAMFileReader(inputFile); +// ReadProperties readProperties = new ReadProperties(Collections.singletonList(new SAMReaderID(inputFile,new Tags())), +// reader.getFileHeader(), +// SAMFileHeader.SortOrder.coordinate, +// false, +// SAMFileReader.ValidationStringency.SILENT, +// downsampling.create(), +// new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALL)), +// Collections.emptyList(), +// Collections.emptyList(), +// false, +// (byte)0, +// false); +// +// GenomeLocParser genomeLocParser = new GenomeLocParser(reader.getFileHeader().getSequenceDictionary()); +// // Filter unmapped reads. TODO: is this always strictly necessary? Who in the GATK normally filters these out? +// Iterator readIterator = new FilteringIterator(reader.iterator(),new UnmappedReadFilter()); +// LegacyLocusIteratorByState locusIteratorByState = new LegacyLocusIteratorByState(readIterator,readProperties,genomeLocParser, LegacyLocusIteratorByState.sampleListForSAMWithoutReadGroups()); +// while(locusIteratorByState.hasNext()) { +// locusIteratorByState.next().getLocation(); +// } +// reader.close(); +// } +// } private enum Downsampling { NONE { diff --git a/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSourceUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSourceUnitTest.java index 15e86f30e..23720e60d 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSourceUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSourceUnitTest.java @@ -182,7 +182,8 @@ public class SAMDataSourceUnitTest extends BaseTest { Collections.emptyList(), false, (byte) -1, - removeProgramRecords); + removeProgramRecords, + false); List dontRemoveProgramRecords = data.getHeader().getProgramRecords(); assertEquals(dontRemoveProgramRecords, defaultProgramRecords, "testRemoveProgramRecords: default program records differ from removeProgramRecords = false"); @@ -201,7 +202,8 @@ public class SAMDataSourceUnitTest extends BaseTest { Collections.emptyList(), false, (byte) -1, - removeProgramRecords); + removeProgramRecords, + false); List doRemoveProgramRecords = data.getHeader().getProgramRecords(); assertTrue(doRemoveProgramRecords.isEmpty(), "testRemoveProgramRecords: program records not cleared when removeProgramRecords = true"); diff --git a/public/java/test/org/broadinstitute/sting/gatk/iterators/LegacyLocusIteratorByStateUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/iterators/LegacyLocusIteratorByStateUnitTest.java deleted file mode 100644 index f350bcab4..000000000 --- a/public/java/test/org/broadinstitute/sting/gatk/iterators/LegacyLocusIteratorByStateUnitTest.java +++ /dev/null @@ -1,505 +0,0 @@ -/* -* Copyright (c) 2012 The Broad Institute -* -* Permission is hereby granted, free of charge, to any person -* obtaining a copy of this software and associated documentation -* files (the "Software"), to deal in the Software without -* restriction, including without limitation the rights to use, -* copy, modify, merge, publish, distribute, sublicense, and/or sell -* copies of the Software, and to permit persons to whom the -* Software is furnished to do so, subject to the following -* conditions: -* -* The above copyright notice and this permission notice shall be -* included in all copies or substantial portions of the Software. -* -* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR -* THE USE OR OTHER DEALINGS IN THE SOFTWARE. -*/ - -package org.broadinstitute.sting.gatk.iterators; - -import net.sf.samtools.*; -import net.sf.samtools.util.CloseableIterator; -import org.broadinstitute.sting.BaseTest; -import org.broadinstitute.sting.gatk.ReadProperties; -import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; -import org.broadinstitute.sting.gatk.contexts.AlignmentContext; -import org.broadinstitute.sting.gatk.datasources.reads.SAMReaderID; -import org.broadinstitute.sting.gatk.filters.ReadFilter; -import org.broadinstitute.sting.utils.GenomeLocParser; -import org.broadinstitute.sting.utils.Utils; -import org.broadinstitute.sting.utils.pileup.PileupElement; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; -import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; -import org.broadinstitute.sting.utils.sam.GATKSAMRecord; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Test; - -import java.util.Arrays; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; - -/** - * testing of the LEGACY version of LocusIteratorByState - */ -public class LegacyLocusIteratorByStateUnitTest extends BaseTest { - private static SAMFileHeader header; - private LegacyLocusIteratorByState li; - private GenomeLocParser genomeLocParser; - - @BeforeClass - public void beforeClass() { - header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000); - genomeLocParser = new GenomeLocParser(header.getSequenceDictionary()); - } - - private LegacyLocusIteratorByState makeLTBS(List reads, ReadProperties readAttributes) { - return new LegacyLocusIteratorByState(new FakeCloseableIterator(reads.iterator()), readAttributes, genomeLocParser, LegacyLocusIteratorByState.sampleListForSAMWithoutReadGroups()); - } - - @Test - public void testXandEQOperators() { - final byte[] bases1 = new byte[] {'A','A','A','A','A','A','A','A','A','A'}; - final byte[] bases2 = new byte[] {'A','A','A','C','A','A','A','A','A','C'}; - - // create a test version of the Reads object - ReadProperties readAttributes = createTestReadProperties(); - - SAMRecord r1 = ArtificialSAMUtils.createArtificialRead(header,"r1",0,1,10); - r1.setReadBases(bases1); - r1.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20}); - r1.setCigarString("10M"); - - SAMRecord r2 = ArtificialSAMUtils.createArtificialRead(header,"r2",0,1,10); - r2.setReadBases(bases2); - r2.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20}); - r2.setCigarString("3=1X5=1X"); - - SAMRecord r3 = ArtificialSAMUtils.createArtificialRead(header,"r3",0,1,10); - r3.setReadBases(bases2); - r3.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20}); - r3.setCigarString("3=1X5M1X"); - - SAMRecord r4 = ArtificialSAMUtils.createArtificialRead(header,"r4",0,1,10); - r4.setReadBases(bases2); - r4.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20}); - r4.setCigarString("10M"); - - List reads = Arrays.asList(r1, r2, r3, r4); - - // create the iterator by state with the fake reads and fake records - li = makeLTBS(reads,readAttributes); - - while (li.hasNext()) { - AlignmentContext context = li.next(); - ReadBackedPileup pileup = context.getBasePileup(); - Assert.assertEquals(pileup.depthOfCoverage(), 4); - } - } - - @Test - public void testIndelsInRegularPileup() { - final byte[] bases = new byte[] {'A','A','A','A','A','A','A','A','A','A'}; - final byte[] indelBases = new byte[] {'A','A','A','A','C','T','A','A','A','A','A','A'}; - - // create a test version of the Reads object - ReadProperties readAttributes = createTestReadProperties(); - - SAMRecord before = ArtificialSAMUtils.createArtificialRead(header,"before",0,1,10); - before.setReadBases(bases); - before.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20}); - before.setCigarString("10M"); - - SAMRecord during = ArtificialSAMUtils.createArtificialRead(header,"during",0,2,10); - during.setReadBases(indelBases); - during.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20}); - during.setCigarString("4M2I6M"); - - SAMRecord after = ArtificialSAMUtils.createArtificialRead(header,"after",0,3,10); - after.setReadBases(bases); - after.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20}); - after.setCigarString("10M"); - - List reads = Arrays.asList(before, during, after); - - // create the iterator by state with the fake reads and fake records - li = makeLTBS(reads,readAttributes); - - boolean foundIndel = false; - while (li.hasNext()) { - AlignmentContext context = li.next(); - ReadBackedPileup pileup = context.getBasePileup().getBaseFilteredPileup(10); - for (PileupElement p : pileup) { - if (p.isBeforeInsertion()) { - foundIndel = true; - Assert.assertEquals(p.getEventLength(), 2, "Wrong event length"); - Assert.assertEquals(p.getEventBases(), "CT", "Inserted bases are incorrect"); - break; - } - } - - } - - Assert.assertTrue(foundIndel,"Indel in pileup not found"); - } - - @Test - public void testWholeIndelReadInIsolation() { - final int firstLocus = 44367789; - - // create a test version of the Reads object - ReadProperties readAttributes = createTestReadProperties(); - - SAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header, "indelOnly", 0, firstLocus, 76); - indelOnlyRead.setReadBases(Utils.dupBytes((byte)'A',76)); - indelOnlyRead.setBaseQualities(Utils.dupBytes((byte) '@', 76)); - indelOnlyRead.setCigarString("76I"); - - List reads = Arrays.asList(indelOnlyRead); - - // create the iterator by state with the fake reads and fake records - li = makeLTBS(reads, readAttributes); - - // Traditionally, reads that end with indels bleed into the pileup at the following locus. Verify that the next pileup contains this read - // and considers it to be an indel-containing read. - Assert.assertTrue(li.hasNext(),"Should have found a whole-indel read in the normal base pileup without extended events enabled"); - AlignmentContext alignmentContext = li.next(); - Assert.assertEquals(alignmentContext.getLocation().getStart(), firstLocus, "Base pileup is at incorrect location."); - ReadBackedPileup basePileup = alignmentContext.getBasePileup(); - Assert.assertEquals(basePileup.getReads().size(),1,"Pileup is of incorrect size"); - Assert.assertSame(basePileup.getReads().get(0), indelOnlyRead, "Read in pileup is incorrect"); - } - - /** - * Test to make sure that reads supporting only an indel (example cigar string: 76I) do - * not negatively influence the ordering of the pileup. - */ - @Test - public void testWholeIndelRead() { - final int firstLocus = 44367788, secondLocus = firstLocus + 1; - - SAMRecord leadingRead = ArtificialSAMUtils.createArtificialRead(header,"leading",0,firstLocus,76); - leadingRead.setReadBases(Utils.dupBytes((byte)'A',76)); - leadingRead.setBaseQualities(Utils.dupBytes((byte)'@',76)); - leadingRead.setCigarString("1M75I"); - - SAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header,"indelOnly",0,secondLocus,76); - indelOnlyRead.setReadBases(Utils.dupBytes((byte) 'A', 76)); - indelOnlyRead.setBaseQualities(Utils.dupBytes((byte)'@',76)); - indelOnlyRead.setCigarString("76I"); - - SAMRecord fullMatchAfterIndel = ArtificialSAMUtils.createArtificialRead(header,"fullMatch",0,secondLocus,76); - fullMatchAfterIndel.setReadBases(Utils.dupBytes((byte)'A',76)); - fullMatchAfterIndel.setBaseQualities(Utils.dupBytes((byte)'@',76)); - fullMatchAfterIndel.setCigarString("75I1M"); - - List reads = Arrays.asList(leadingRead, indelOnlyRead, fullMatchAfterIndel); - - // create the iterator by state with the fake reads and fake records - li = makeLTBS(reads, createTestReadProperties()); - int currentLocus = firstLocus; - int numAlignmentContextsFound = 0; - - while(li.hasNext()) { - AlignmentContext alignmentContext = li.next(); - Assert.assertEquals(alignmentContext.getLocation().getStart(),currentLocus,"Current locus returned by alignment context is incorrect"); - - if(currentLocus == firstLocus) { - List readsAtLocus = alignmentContext.getBasePileup().getReads(); - Assert.assertEquals(readsAtLocus.size(),1,"Wrong number of reads at locus " + currentLocus); - Assert.assertSame(readsAtLocus.get(0),leadingRead,"leadingRead absent from pileup at locus " + currentLocus); - } - else if(currentLocus == secondLocus) { - List readsAtLocus = alignmentContext.getBasePileup().getReads(); - Assert.assertEquals(readsAtLocus.size(),2,"Wrong number of reads at locus " + currentLocus); - Assert.assertSame(readsAtLocus.get(0),indelOnlyRead,"indelOnlyRead absent from pileup at locus " + currentLocus); - Assert.assertSame(readsAtLocus.get(1),fullMatchAfterIndel,"fullMatchAfterIndel absent from pileup at locus " + currentLocus); - } - - currentLocus++; - numAlignmentContextsFound++; - } - - Assert.assertEquals(numAlignmentContextsFound, 2, "Found incorrect number of alignment contexts"); - } - - /** - * Test to make sure that reads supporting only an indel (example cigar string: 76I) are represented properly - */ - @Test - public void testWholeIndelReadRepresentedTest() { - final int firstLocus = 44367788, secondLocus = firstLocus + 1; - - SAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,secondLocus,1); - read1.setReadBases(Utils.dupBytes((byte) 'A', 1)); - read1.setBaseQualities(Utils.dupBytes((byte) '@', 1)); - read1.setCigarString("1I"); - - List reads = Arrays.asList(read1); - - // create the iterator by state with the fake reads and fake records - li = makeLTBS(reads, createTestReadProperties()); - - while(li.hasNext()) { - AlignmentContext alignmentContext = li.next(); - ReadBackedPileup p = alignmentContext.getBasePileup(); - Assert.assertTrue(p.getNumberOfElements() == 1); - PileupElement pe = p.iterator().next(); - Assert.assertTrue(pe.isBeforeInsertion()); - Assert.assertFalse(pe.isAfterInsertion()); - Assert.assertEquals(pe.getEventBases(), "A"); - } - - SAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,secondLocus,10); - read2.setReadBases(Utils.dupBytes((byte) 'A', 10)); - read2.setBaseQualities(Utils.dupBytes((byte) '@', 10)); - read2.setCigarString("10I"); - - reads = Arrays.asList(read2); - - // create the iterator by state with the fake reads and fake records - li = makeLTBS(reads, createTestReadProperties()); - - while(li.hasNext()) { - AlignmentContext alignmentContext = li.next(); - ReadBackedPileup p = alignmentContext.getBasePileup(); - Assert.assertTrue(p.getNumberOfElements() == 1); - PileupElement pe = p.iterator().next(); - Assert.assertTrue(pe.isBeforeInsertion()); - Assert.assertFalse(pe.isAfterInsertion()); - Assert.assertEquals(pe.getEventBases(), "AAAAAAAAAA"); - } - } - - //////////////////////////////////////////// - // comprehensive LIBS/PileupElement tests // - //////////////////////////////////////////// - - private static class LIBSTest { - - - final String cigar; - final int readLength; - - private LIBSTest(final String cigar, final int readLength) { - this.cigar = cigar; - this.readLength = readLength; - } - } - - @DataProvider(name = "LIBSTest") - public Object[][] createLIBSTestData() { - - //TODO -- when LIBS is fixed this should be replaced to provide all possible permutations of CIGAR strings - - return new Object[][]{ - {new LIBSTest("1I", 1)}, - {new LIBSTest("10I", 10)}, - {new LIBSTest("2M2I2M", 6)}, - {new LIBSTest("2M2I", 4)}, - //TODO -- uncomment these when LIBS is fixed - //{new LIBSTest("2I2M", 4, Arrays.asList(2,3), Arrays.asList(IS_AFTER_INSERTION_FLAG,0))}, - //{new LIBSTest("1I1M1D1M", 3, Arrays.asList(0,1), Arrays.asList(IS_AFTER_INSERTION_FLAG | IS_BEFORE_DELETION_START_FLAG | IS_BEFORE_DELETED_BASE_FLAG,IS_AFTER_DELETED_BASE_FLAG | IS_AFTER_DELETION_END_FLAG))}, - //{new LIBSTest("1S1I1M", 3, Arrays.asList(2), Arrays.asList(IS_AFTER_INSERTION_FLAG))}, - //{new LIBSTest("1M2D2M", 3)}, - {new LIBSTest("1S1M", 2)}, - {new LIBSTest("1M1S", 2)}, - {new LIBSTest("1S1M1I", 3)} - }; - } - - @Test(dataProvider = "LIBSTest") - public void testLIBS(LIBSTest params) { - final int locus = 44367788; - - SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, params.readLength); - read.setReadBases(Utils.dupBytes((byte) 'A', params.readLength)); - read.setBaseQualities(Utils.dupBytes((byte) '@', params.readLength)); - read.setCigarString(params.cigar); - - // create the iterator by state with the fake reads and fake records - li = makeLTBS(Arrays.asList(read), createTestReadProperties()); - final LIBS_position tester = new LIBS_position(read); - - while ( li.hasNext() ) { - AlignmentContext alignmentContext = li.next(); - ReadBackedPileup p = alignmentContext.getBasePileup(); - Assert.assertTrue(p.getNumberOfElements() == 1); - PileupElement pe = p.iterator().next(); - - tester.stepForwardOnGenome(); - - Assert.assertEquals(pe.isBeforeDeletedBase(), tester.isBeforeDeletedBase); - Assert.assertEquals(pe.isBeforeDeletionStart(), tester.isBeforeDeletionStart); - Assert.assertEquals(pe.isAfterDeletedBase(), tester.isAfterDeletedBase); - Assert.assertEquals(pe.isAfterDeletionEnd(), tester.isAfterDeletionEnd); - Assert.assertEquals(pe.isBeforeInsertion(), tester.isBeforeInsertion); - Assert.assertEquals(pe.isAfterInsertion(), tester.isAfterInsertion); - Assert.assertEquals(pe.isNextToSoftClip(), tester.isNextToSoftClip); - Assert.assertEquals(pe.getOffset(), tester.getCurrentReadOffset()); - } - } - - //////////////////////////////////////////////// - // End comprehensive LIBS/PileupElement tests // - //////////////////////////////////////////////// - - private static ReadProperties createTestReadProperties() { - return new ReadProperties( - Collections.emptyList(), - new SAMFileHeader(), - SAMFileHeader.SortOrder.coordinate, - false, - SAMFileReader.ValidationStringency.STRICT, - null, - new ValidationExclusion(), - Collections.emptyList(), - Collections.emptyList(), - false, - (byte) -1 - ); - } -} - -class FakeCloseableIterator implements CloseableIterator { - Iterator iterator; - - public FakeCloseableIterator(Iterator it) { - iterator = it; - } - - @Override - public void close() {} - - @Override - public boolean hasNext() { - return iterator.hasNext(); - } - - @Override - public T next() { - return iterator.next(); - } - - @Override - public void remove() { - throw new UnsupportedOperationException("Don't remove!"); - } -} - - -final class LIBS_position { - - SAMRecord read; - - final int numOperators; - int currentOperatorIndex = 0; - int currentPositionOnOperator = 0; - int currentReadOffset = 0; - - boolean isBeforeDeletionStart = false; - boolean isBeforeDeletedBase = false; - boolean isAfterDeletionEnd = false; - boolean isAfterDeletedBase = false; - boolean isBeforeInsertion = false; - boolean isAfterInsertion = false; - boolean isNextToSoftClip = false; - - boolean sawMop = false; - - public LIBS_position(final SAMRecord read) { - this.read = read; - numOperators = read.getCigar().numCigarElements(); - } - - public int getCurrentReadOffset() { - return Math.max(0, currentReadOffset - 1); - } - - /** - * Steps forward on the genome. Returns false when done reading the read, true otherwise. - */ - public boolean stepForwardOnGenome() { - if ( currentOperatorIndex == numOperators ) - return false; - - CigarElement curElement = read.getCigar().getCigarElement(currentOperatorIndex); - if ( currentPositionOnOperator >= curElement.getLength() ) { - if ( ++currentOperatorIndex == numOperators ) - return false; - - curElement = read.getCigar().getCigarElement(currentOperatorIndex); - currentPositionOnOperator = 0; - } - - switch ( curElement.getOperator() ) { - case I: // insertion w.r.t. the reference - if ( !sawMop ) - break; - case S: // soft clip - currentReadOffset += curElement.getLength(); - case H: // hard clip - case P: // padding - currentOperatorIndex++; - return stepForwardOnGenome(); - - case D: // deletion w.r.t. the reference - case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning) - currentPositionOnOperator++; - break; - - case M: - case EQ: - case X: - sawMop = true; - currentReadOffset++; - currentPositionOnOperator++; - break; - default: - throw new IllegalStateException("No support for cigar op: " + curElement.getOperator()); - } - - final boolean isFirstOp = currentOperatorIndex == 0; - final boolean isLastOp = currentOperatorIndex == numOperators - 1; - final boolean isFirstBaseOfOp = currentPositionOnOperator == 1; - final boolean isLastBaseOfOp = currentPositionOnOperator == curElement.getLength(); - - isBeforeDeletionStart = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isLastOp, isLastBaseOfOp); - isBeforeDeletedBase = isBeforeDeletionStart || (!isLastBaseOfOp && curElement.getOperator() == CigarOperator.D); - isAfterDeletionEnd = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isFirstOp, isFirstBaseOfOp); - isAfterDeletedBase = isAfterDeletionEnd || (!isFirstBaseOfOp && curElement.getOperator() == CigarOperator.D); - isBeforeInsertion = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isLastOp, isLastBaseOfOp) - || (!sawMop && curElement.getOperator() == CigarOperator.I); - isAfterInsertion = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isFirstOp, isFirstBaseOfOp); - isNextToSoftClip = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isLastOp, isLastBaseOfOp) - || isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isFirstOp, isFirstBaseOfOp); - - return true; - } - - private static boolean isBeforeOp(final Cigar cigar, - final int currentOperatorIndex, - final CigarOperator op, - final boolean isLastOp, - final boolean isLastBaseOfOp) { - return !isLastOp && isLastBaseOfOp && cigar.getCigarElement(currentOperatorIndex+1).getOperator() == op; - } - - private static boolean isAfterOp(final Cigar cigar, - final int currentOperatorIndex, - final CigarOperator op, - final boolean isFirstOp, - final boolean isFirstBaseOfOp) { - return !isFirstOp && isFirstBaseOfOp && cigar.getCigarElement(currentOperatorIndex-1).getOperator() == op; - } -} diff --git a/public/java/test/org/broadinstitute/sting/gatk/iterators/LocusIteratorByStateUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/iterators/LocusIteratorByStateUnitTest.java deleted file mode 100644 index 0cd576cbd..000000000 --- a/public/java/test/org/broadinstitute/sting/gatk/iterators/LocusIteratorByStateUnitTest.java +++ /dev/null @@ -1,673 +0,0 @@ -/* -* Copyright (c) 2012 The Broad Institute -* -* Permission is hereby granted, free of charge, to any person -* obtaining a copy of this software and associated documentation -* files (the "Software"), to deal in the Software without -* restriction, including without limitation the rights to use, -* copy, modify, merge, publish, distribute, sublicense, and/or sell -* copies of the Software, and to permit persons to whom the -* Software is furnished to do so, subject to the following -* conditions: -* -* The above copyright notice and this permission notice shall be -* included in all copies or substantial portions of the Software. -* -* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR -* THE USE OR OTHER DEALINGS IN THE SOFTWARE. -*/ - -package org.broadinstitute.sting.gatk.iterators; - -import net.sf.samtools.*; -import net.sf.samtools.util.CloseableIterator; -import org.broadinstitute.sting.BaseTest; -import org.broadinstitute.sting.gatk.ReadProperties; -import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; -import org.broadinstitute.sting.gatk.contexts.AlignmentContext; -import org.broadinstitute.sting.gatk.datasources.reads.SAMReaderID; -import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod; -import org.broadinstitute.sting.gatk.filters.ReadFilter; -import org.broadinstitute.sting.utils.GenomeLocParser; -import org.broadinstitute.sting.utils.MathUtils; -import org.broadinstitute.sting.utils.Utils; -import org.broadinstitute.sting.utils.pileup.PileupElement; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; -import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; -import org.broadinstitute.sting.utils.sam.GATKSAMRecord; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.DataProvider; -import org.testng.annotations.Test; - -import java.util.*; - -/** - * testing of the new (non-legacy) version of LocusIteratorByState - */ -public class LocusIteratorByStateUnitTest extends BaseTest { - private static SAMFileHeader header; - private LocusIteratorByState li; - private GenomeLocParser genomeLocParser; - - @BeforeClass - public void beforeClass() { - header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000); - genomeLocParser = new GenomeLocParser(header.getSequenceDictionary()); - } - - private LocusIteratorByState makeLTBS(List reads, ReadProperties readAttributes) { - return new LocusIteratorByState(new FakeCloseableIterator(reads.iterator()), readAttributes, genomeLocParser, LocusIteratorByState.sampleListForSAMWithoutReadGroups()); - } - - @Test - public void testXandEQOperators() { - final byte[] bases1 = new byte[] {'A','A','A','A','A','A','A','A','A','A'}; - final byte[] bases2 = new byte[] {'A','A','A','C','A','A','A','A','A','C'}; - - // create a test version of the Reads object - ReadProperties readAttributes = createTestReadProperties(); - - SAMRecord r1 = ArtificialSAMUtils.createArtificialRead(header,"r1",0,1,10); - r1.setReadBases(bases1); - r1.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20}); - r1.setCigarString("10M"); - - SAMRecord r2 = ArtificialSAMUtils.createArtificialRead(header,"r2",0,1,10); - r2.setReadBases(bases2); - r2.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20}); - r2.setCigarString("3=1X5=1X"); - - SAMRecord r3 = ArtificialSAMUtils.createArtificialRead(header,"r3",0,1,10); - r3.setReadBases(bases2); - r3.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20}); - r3.setCigarString("3=1X5M1X"); - - SAMRecord r4 = ArtificialSAMUtils.createArtificialRead(header,"r4",0,1,10); - r4.setReadBases(bases2); - r4.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20}); - r4.setCigarString("10M"); - - List reads = Arrays.asList(r1, r2, r3, r4); - - // create the iterator by state with the fake reads and fake records - li = makeLTBS(reads,readAttributes); - - while (li.hasNext()) { - AlignmentContext context = li.next(); - ReadBackedPileup pileup = context.getBasePileup(); - Assert.assertEquals(pileup.depthOfCoverage(), 4); - } - } - - @Test - public void testIndelsInRegularPileup() { - final byte[] bases = new byte[] {'A','A','A','A','A','A','A','A','A','A'}; - final byte[] indelBases = new byte[] {'A','A','A','A','C','T','A','A','A','A','A','A'}; - - // create a test version of the Reads object - ReadProperties readAttributes = createTestReadProperties(); - - SAMRecord before = ArtificialSAMUtils.createArtificialRead(header,"before",0,1,10); - before.setReadBases(bases); - before.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20}); - before.setCigarString("10M"); - - SAMRecord during = ArtificialSAMUtils.createArtificialRead(header,"during",0,2,10); - during.setReadBases(indelBases); - during.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20}); - during.setCigarString("4M2I6M"); - - SAMRecord after = ArtificialSAMUtils.createArtificialRead(header,"after",0,3,10); - after.setReadBases(bases); - after.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20}); - after.setCigarString("10M"); - - List reads = Arrays.asList(before, during, after); - - // create the iterator by state with the fake reads and fake records - li = makeLTBS(reads,readAttributes); - - boolean foundIndel = false; - while (li.hasNext()) { - AlignmentContext context = li.next(); - ReadBackedPileup pileup = context.getBasePileup().getBaseFilteredPileup(10); - for (PileupElement p : pileup) { - if (p.isBeforeInsertion()) { - foundIndel = true; - Assert.assertEquals(p.getEventLength(), 2, "Wrong event length"); - Assert.assertEquals(p.getEventBases(), "CT", "Inserted bases are incorrect"); - break; - } - } - - } - - Assert.assertTrue(foundIndel,"Indel in pileup not found"); - } - - @Test - public void testWholeIndelReadInIsolation() { - final int firstLocus = 44367789; - - // create a test version of the Reads object - ReadProperties readAttributes = createTestReadProperties(); - - SAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header, "indelOnly", 0, firstLocus, 76); - indelOnlyRead.setReadBases(Utils.dupBytes((byte)'A',76)); - indelOnlyRead.setBaseQualities(Utils.dupBytes((byte) '@', 76)); - indelOnlyRead.setCigarString("76I"); - - List reads = Arrays.asList(indelOnlyRead); - - // create the iterator by state with the fake reads and fake records - li = makeLTBS(reads, readAttributes); - - // Traditionally, reads that end with indels bleed into the pileup at the following locus. Verify that the next pileup contains this read - // and considers it to be an indel-containing read. - Assert.assertTrue(li.hasNext(),"Should have found a whole-indel read in the normal base pileup without extended events enabled"); - AlignmentContext alignmentContext = li.next(); - Assert.assertEquals(alignmentContext.getLocation().getStart(), firstLocus, "Base pileup is at incorrect location."); - ReadBackedPileup basePileup = alignmentContext.getBasePileup(); - Assert.assertEquals(basePileup.getReads().size(),1,"Pileup is of incorrect size"); - Assert.assertSame(basePileup.getReads().get(0), indelOnlyRead, "Read in pileup is incorrect"); - } - - /** - * Test to make sure that reads supporting only an indel (example cigar string: 76I) do - * not negatively influence the ordering of the pileup. - */ - @Test - public void testWholeIndelRead() { - final int firstLocus = 44367788, secondLocus = firstLocus + 1; - - SAMRecord leadingRead = ArtificialSAMUtils.createArtificialRead(header,"leading",0,firstLocus,76); - leadingRead.setReadBases(Utils.dupBytes((byte)'A',76)); - leadingRead.setBaseQualities(Utils.dupBytes((byte)'@',76)); - leadingRead.setCigarString("1M75I"); - - SAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header,"indelOnly",0,secondLocus,76); - indelOnlyRead.setReadBases(Utils.dupBytes((byte) 'A', 76)); - indelOnlyRead.setBaseQualities(Utils.dupBytes((byte)'@',76)); - indelOnlyRead.setCigarString("76I"); - - SAMRecord fullMatchAfterIndel = ArtificialSAMUtils.createArtificialRead(header,"fullMatch",0,secondLocus,76); - fullMatchAfterIndel.setReadBases(Utils.dupBytes((byte)'A',76)); - fullMatchAfterIndel.setBaseQualities(Utils.dupBytes((byte)'@',76)); - fullMatchAfterIndel.setCigarString("75I1M"); - - List reads = Arrays.asList(leadingRead, indelOnlyRead, fullMatchAfterIndel); - - // create the iterator by state with the fake reads and fake records - li = makeLTBS(reads, createTestReadProperties()); - int currentLocus = firstLocus; - int numAlignmentContextsFound = 0; - - while(li.hasNext()) { - AlignmentContext alignmentContext = li.next(); - Assert.assertEquals(alignmentContext.getLocation().getStart(),currentLocus,"Current locus returned by alignment context is incorrect"); - - if(currentLocus == firstLocus) { - List readsAtLocus = alignmentContext.getBasePileup().getReads(); - Assert.assertEquals(readsAtLocus.size(),1,"Wrong number of reads at locus " + currentLocus); - Assert.assertSame(readsAtLocus.get(0),leadingRead,"leadingRead absent from pileup at locus " + currentLocus); - } - else if(currentLocus == secondLocus) { - List readsAtLocus = alignmentContext.getBasePileup().getReads(); - Assert.assertEquals(readsAtLocus.size(),2,"Wrong number of reads at locus " + currentLocus); - Assert.assertSame(readsAtLocus.get(0),indelOnlyRead,"indelOnlyRead absent from pileup at locus " + currentLocus); - Assert.assertSame(readsAtLocus.get(1),fullMatchAfterIndel,"fullMatchAfterIndel absent from pileup at locus " + currentLocus); - } - - currentLocus++; - numAlignmentContextsFound++; - } - - Assert.assertEquals(numAlignmentContextsFound, 2, "Found incorrect number of alignment contexts"); - } - - /** - * Test to make sure that reads supporting only an indel (example cigar string: 76I) are represented properly - */ - @Test - public void testWholeIndelReadRepresentedTest() { - final int firstLocus = 44367788, secondLocus = firstLocus + 1; - - SAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,secondLocus,1); - read1.setReadBases(Utils.dupBytes((byte) 'A', 1)); - read1.setBaseQualities(Utils.dupBytes((byte) '@', 1)); - read1.setCigarString("1I"); - - List reads = Arrays.asList(read1); - - // create the iterator by state with the fake reads and fake records - li = makeLTBS(reads, createTestReadProperties()); - - while(li.hasNext()) { - AlignmentContext alignmentContext = li.next(); - ReadBackedPileup p = alignmentContext.getBasePileup(); - Assert.assertTrue(p.getNumberOfElements() == 1); - PileupElement pe = p.iterator().next(); - Assert.assertTrue(pe.isBeforeInsertion()); - Assert.assertFalse(pe.isAfterInsertion()); - Assert.assertEquals(pe.getEventBases(), "A"); - } - - SAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,secondLocus,10); - read2.setReadBases(Utils.dupBytes((byte) 'A', 10)); - read2.setBaseQualities(Utils.dupBytes((byte) '@', 10)); - read2.setCigarString("10I"); - - reads = Arrays.asList(read2); - - // create the iterator by state with the fake reads and fake records - li = makeLTBS(reads, createTestReadProperties()); - - while(li.hasNext()) { - AlignmentContext alignmentContext = li.next(); - ReadBackedPileup p = alignmentContext.getBasePileup(); - Assert.assertTrue(p.getNumberOfElements() == 1); - PileupElement pe = p.iterator().next(); - Assert.assertTrue(pe.isBeforeInsertion()); - Assert.assertFalse(pe.isAfterInsertion()); - Assert.assertEquals(pe.getEventBases(), "AAAAAAAAAA"); - } - } - - //////////////////////////////////////////// - // comprehensive LIBS/PileupElement tests // - //////////////////////////////////////////// - - private static class LIBSTest { - - - final String cigar; - final int readLength; - - private LIBSTest(final String cigar, final int readLength) { - this.cigar = cigar; - this.readLength = readLength; - } - } - - @DataProvider(name = "LIBSTest") - public Object[][] createLIBSTestData() { - - //TODO -- when LIBS is fixed this should be replaced to provide all possible permutations of CIGAR strings - - return new Object[][]{ - {new LIBSTest("1I", 1)}, - {new LIBSTest("10I", 10)}, - {new LIBSTest("2M2I2M", 6)}, - {new LIBSTest("2M2I", 4)}, - //TODO -- uncomment these when LIBS is fixed - //{new LIBSTest("2I2M", 4, Arrays.asList(2,3), Arrays.asList(IS_AFTER_INSERTION_FLAG,0))}, - //{new LIBSTest("1I1M1D1M", 3, Arrays.asList(0,1), Arrays.asList(IS_AFTER_INSERTION_FLAG | IS_BEFORE_DELETION_START_FLAG | IS_BEFORE_DELETED_BASE_FLAG,IS_AFTER_DELETED_BASE_FLAG | IS_AFTER_DELETION_END_FLAG))}, - //{new LIBSTest("1S1I1M", 3, Arrays.asList(2), Arrays.asList(IS_AFTER_INSERTION_FLAG))}, - //{new LIBSTest("1M2D2M", 3)}, - {new LIBSTest("1S1M", 2)}, - {new LIBSTest("1M1S", 2)}, - {new LIBSTest("1S1M1I", 3)} - }; - } - - @Test(dataProvider = "LIBSTest") - public void testLIBS(LIBSTest params) { - final int locus = 44367788; - - SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, params.readLength); - read.setReadBases(Utils.dupBytes((byte) 'A', params.readLength)); - read.setBaseQualities(Utils.dupBytes((byte) '@', params.readLength)); - read.setCigarString(params.cigar); - - // create the iterator by state with the fake reads and fake records - li = makeLTBS(Arrays.asList(read), createTestReadProperties()); - final LIBS_position tester = new LIBS_position(read); - - while ( li.hasNext() ) { - AlignmentContext alignmentContext = li.next(); - ReadBackedPileup p = alignmentContext.getBasePileup(); - Assert.assertTrue(p.getNumberOfElements() == 1); - PileupElement pe = p.iterator().next(); - - tester.stepForwardOnGenome(); - - Assert.assertEquals(pe.isBeforeDeletedBase(), tester.isBeforeDeletedBase); - Assert.assertEquals(pe.isBeforeDeletionStart(), tester.isBeforeDeletionStart); - Assert.assertEquals(pe.isAfterDeletedBase(), tester.isAfterDeletedBase); - Assert.assertEquals(pe.isAfterDeletionEnd(), tester.isAfterDeletionEnd); - Assert.assertEquals(pe.isBeforeInsertion(), tester.isBeforeInsertion); - Assert.assertEquals(pe.isAfterInsertion(), tester.isAfterInsertion); - Assert.assertEquals(pe.isNextToSoftClip(), tester.isNextToSoftClip); - Assert.assertEquals(pe.getOffset(), tester.getCurrentReadOffset()); - } - } - - //////////////////////////////////////////////// - // End comprehensive LIBS/PileupElement tests // - //////////////////////////////////////////////// - - - /////////////////////////////////////// - // Read State Manager Tests // - /////////////////////////////////////// - - private class PerSampleReadStateManagerTest extends TestDataProvider { - private List readCountsPerAlignmentStart; - private List reads; - private List> recordStatesByAlignmentStart; - private int removalInterval; - - public PerSampleReadStateManagerTest( List readCountsPerAlignmentStart, int removalInterval ) { - super(PerSampleReadStateManagerTest.class); - - this.readCountsPerAlignmentStart = readCountsPerAlignmentStart; - this.removalInterval = removalInterval; - - reads = new ArrayList(); - recordStatesByAlignmentStart = new ArrayList>(); - - setName(String.format("%s: readCountsPerAlignmentStart: %s removalInterval: %d", - getClass().getSimpleName(), readCountsPerAlignmentStart, removalInterval)); - } - - public void run() { - LocusIteratorByState libs = makeLTBS(new ArrayList(), createTestReadProperties()); - LocusIteratorByState.ReadStateManager readStateManager = - libs.new ReadStateManager(new ArrayList().iterator()); - LocusIteratorByState.ReadStateManager.PerSampleReadStateManager perSampleReadStateManager = - readStateManager.new PerSampleReadStateManager(); - - makeReads(); - - for ( ArrayList stackRecordStates : recordStatesByAlignmentStart ) { - perSampleReadStateManager.addStatesAtNextAlignmentStart(stackRecordStates); - } - - // read state manager should have the right number of reads - Assert.assertEquals(reads.size(), perSampleReadStateManager.size()); - - Iterator originalReadsIterator = reads.iterator(); - Iterator recordStateIterator = perSampleReadStateManager.iterator(); - int recordStateCount = 0; - int numReadStatesRemoved = 0; - - // Do a first-pass validation of the record state iteration by making sure we get back everything we - // put in, in the same order, doing any requested removals of read states along the way - while ( recordStateIterator.hasNext() ) { - LocusIteratorByState.SAMRecordState readState = recordStateIterator.next(); - recordStateCount++; - SAMRecord readFromPerSampleReadStateManager = readState.getRead(); - - Assert.assertTrue(originalReadsIterator.hasNext()); - SAMRecord originalRead = originalReadsIterator.next(); - - // The read we get back should be literally the same read in memory as we put in - Assert.assertTrue(originalRead == readFromPerSampleReadStateManager); - - // If requested, remove a read state every removalInterval states - if ( removalInterval > 0 && recordStateCount % removalInterval == 0 ) { - recordStateIterator.remove(); - numReadStatesRemoved++; - } - } - - Assert.assertFalse(originalReadsIterator.hasNext()); - - // If we removed any read states, do a second pass through the read states to make sure the right - // states were removed - if ( numReadStatesRemoved > 0 ) { - Assert.assertEquals(perSampleReadStateManager.size(), reads.size() - numReadStatesRemoved); - - originalReadsIterator = reads.iterator(); - recordStateIterator = perSampleReadStateManager.iterator(); - int readCount = 0; - int readStateCount = 0; - - // Match record states with the reads that should remain after removal - while ( recordStateIterator.hasNext() ) { - LocusIteratorByState.SAMRecordState readState = recordStateIterator.next(); - readStateCount++; - SAMRecord readFromPerSampleReadStateManager = readState.getRead(); - - Assert.assertTrue(originalReadsIterator.hasNext()); - - SAMRecord originalRead = originalReadsIterator.next(); - readCount++; - - if ( readCount % removalInterval == 0 ) { - originalRead = originalReadsIterator.next(); // advance to next read, since the previous one should have been discarded - readCount++; - } - - // The read we get back should be literally the same read in memory as we put in (after accounting for removals) - Assert.assertTrue(originalRead == readFromPerSampleReadStateManager); - } - - Assert.assertEquals(readStateCount, reads.size() - numReadStatesRemoved); - } - - // Allow memory used by this test to be reclaimed - readCountsPerAlignmentStart = null; - reads = null; - recordStatesByAlignmentStart = null; - } - - private void makeReads() { - int alignmentStart = 1; - - for ( int readsThisStack : readCountsPerAlignmentStart ) { - ArrayList stackReads = new ArrayList(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(readsThisStack, header, "foo", 0, alignmentStart, MathUtils.randomIntegerInRange(50, 100))); - ArrayList stackRecordStates = new ArrayList(); - - for ( SAMRecord read : stackReads ) { - stackRecordStates.add(new LocusIteratorByState.SAMRecordState(read)); - } - - reads.addAll(stackReads); - recordStatesByAlignmentStart.add(stackRecordStates); - } - } - } - - @DataProvider(name = "PerSampleReadStateManagerTestDataProvider") - public Object[][] createPerSampleReadStateManagerTests() { - for ( List thisTestReadStateCounts : Arrays.asList( Arrays.asList(1), - Arrays.asList(2), - Arrays.asList(10), - Arrays.asList(1, 1), - Arrays.asList(2, 2), - Arrays.asList(10, 10), - Arrays.asList(1, 10), - Arrays.asList(10, 1), - Arrays.asList(1, 1, 1), - Arrays.asList(2, 2, 2), - Arrays.asList(10, 10, 10), - Arrays.asList(1, 1, 1, 1, 1, 1), - Arrays.asList(10, 10, 10, 10, 10, 10), - Arrays.asList(1, 2, 10, 1, 2, 10) - ) ) { - - for ( int removalInterval : Arrays.asList(0, 2, 3) ) { - new PerSampleReadStateManagerTest(thisTestReadStateCounts, removalInterval); - } - } - - return PerSampleReadStateManagerTest.getTests(PerSampleReadStateManagerTest.class); - } - - @Test(dataProvider = "PerSampleReadStateManagerTestDataProvider") - public void runPerSampleReadStateManagerTest( PerSampleReadStateManagerTest test ) { - logger.warn("Running test: " + test); - - test.run(); - } - - /////////////////////////////////////// - // End Read State Manager Tests // - /////////////////////////////////////// - - - - /////////////////////////////////////// - // Helper methods / classes // - /////////////////////////////////////// - - private static ReadProperties createTestReadProperties() { - return createTestReadProperties(null); - } - - private static ReadProperties createTestReadProperties( DownsamplingMethod downsamplingMethod ) { - return new ReadProperties( - Collections.emptyList(), - new SAMFileHeader(), - SAMFileHeader.SortOrder.coordinate, - false, - SAMFileReader.ValidationStringency.STRICT, - downsamplingMethod, - new ValidationExclusion(), - Collections.emptyList(), - Collections.emptyList(), - false, - (byte) -1 - ); - } - - private static class FakeCloseableIterator implements CloseableIterator { - Iterator iterator; - - public FakeCloseableIterator(Iterator it) { - iterator = it; - } - - @Override - public void close() {} - - @Override - public boolean hasNext() { - return iterator.hasNext(); - } - - @Override - public T next() { - return iterator.next(); - } - - @Override - public void remove() { - throw new UnsupportedOperationException("Don't remove!"); - } - } - - private static final class LIBS_position { - - SAMRecord read; - - final int numOperators; - int currentOperatorIndex = 0; - int currentPositionOnOperator = 0; - int currentReadOffset = 0; - - boolean isBeforeDeletionStart = false; - boolean isBeforeDeletedBase = false; - boolean isAfterDeletionEnd = false; - boolean isAfterDeletedBase = false; - boolean isBeforeInsertion = false; - boolean isAfterInsertion = false; - boolean isNextToSoftClip = false; - - boolean sawMop = false; - - public LIBS_position(final SAMRecord read) { - this.read = read; - numOperators = read.getCigar().numCigarElements(); - } - - public int getCurrentReadOffset() { - return Math.max(0, currentReadOffset - 1); - } - - /** - * Steps forward on the genome. Returns false when done reading the read, true otherwise. - */ - public boolean stepForwardOnGenome() { - if ( currentOperatorIndex == numOperators ) - return false; - - CigarElement curElement = read.getCigar().getCigarElement(currentOperatorIndex); - if ( currentPositionOnOperator >= curElement.getLength() ) { - if ( ++currentOperatorIndex == numOperators ) - return false; - - curElement = read.getCigar().getCigarElement(currentOperatorIndex); - currentPositionOnOperator = 0; - } - - switch ( curElement.getOperator() ) { - case I: // insertion w.r.t. the reference - if ( !sawMop ) - break; - case S: // soft clip - currentReadOffset += curElement.getLength(); - case H: // hard clip - case P: // padding - currentOperatorIndex++; - return stepForwardOnGenome(); - - case D: // deletion w.r.t. the reference - case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning) - currentPositionOnOperator++; - break; - - case M: - case EQ: - case X: - sawMop = true; - currentReadOffset++; - currentPositionOnOperator++; - break; - default: - throw new IllegalStateException("No support for cigar op: " + curElement.getOperator()); - } - - final boolean isFirstOp = currentOperatorIndex == 0; - final boolean isLastOp = currentOperatorIndex == numOperators - 1; - final boolean isFirstBaseOfOp = currentPositionOnOperator == 1; - final boolean isLastBaseOfOp = currentPositionOnOperator == curElement.getLength(); - - isBeforeDeletionStart = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isLastOp, isLastBaseOfOp); - isBeforeDeletedBase = isBeforeDeletionStart || (!isLastBaseOfOp && curElement.getOperator() == CigarOperator.D); - isAfterDeletionEnd = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isFirstOp, isFirstBaseOfOp); - isAfterDeletedBase = isAfterDeletionEnd || (!isFirstBaseOfOp && curElement.getOperator() == CigarOperator.D); - isBeforeInsertion = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isLastOp, isLastBaseOfOp) - || (!sawMop && curElement.getOperator() == CigarOperator.I); - isAfterInsertion = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isFirstOp, isFirstBaseOfOp); - isNextToSoftClip = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isLastOp, isLastBaseOfOp) - || isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isFirstOp, isFirstBaseOfOp); - - return true; - } - - private static boolean isBeforeOp(final Cigar cigar, - final int currentOperatorIndex, - final CigarOperator op, - final boolean isLastOp, - final boolean isLastBaseOfOp) { - return !isLastOp && isLastBaseOfOp && cigar.getCigarElement(currentOperatorIndex+1).getOperator() == op; - } - - private static boolean isAfterOp(final Cigar cigar, - final int currentOperatorIndex, - final CigarOperator op, - final boolean isFirstOp, - final boolean isFirstBaseOfOp) { - return !isFirstOp && isFirstBaseOfOp && cigar.getCigarElement(currentOperatorIndex-1).getOperator() == op; - } - } -} diff --git a/public/java/test/org/broadinstitute/sting/gatk/traversals/DummyActiveRegionWalker.java b/public/java/test/org/broadinstitute/sting/gatk/traversals/DummyActiveRegionWalker.java new file mode 100644 index 000000000..bc1e1d7b0 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/traversals/DummyActiveRegionWalker.java @@ -0,0 +1,104 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.gatk.traversals; + +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.walkers.ActiveRegionWalker; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocSortedSet; +import org.broadinstitute.sting.utils.activeregion.ActiveRegion; +import org.broadinstitute.sting.utils.activeregion.ActiveRegionReadState; +import org.broadinstitute.sting.utils.activeregion.ActivityProfileResult; + +import java.util.*; + +/** + * ActiveRegionWalker for unit testing + * + * User: depristo + * Date: 1/15/13 + * Time: 1:28 PM + */ +class DummyActiveRegionWalker extends ActiveRegionWalker { + private final double prob; + private EnumSet states = super.desiredReadStates(); + private GenomeLocSortedSet activeRegions = null; + + protected List isActiveCalls = new ArrayList(); + protected Map mappedActiveRegions = new LinkedHashMap(); + + public DummyActiveRegionWalker() { + this(1.0); + } + + public DummyActiveRegionWalker(double constProb) { + this.prob = constProb; + } + + public DummyActiveRegionWalker(EnumSet wantStates) { + this(1.0); + this.states = wantStates; + } + + public DummyActiveRegionWalker(GenomeLocSortedSet activeRegions) { + this(1.0); + this.activeRegions = activeRegions; + } + + public void setStates(EnumSet states) { + this.states = states; + } + + @Override + public EnumSet desiredReadStates() { + return states; + } + + @Override + public ActivityProfileResult isActive(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + isActiveCalls.add(ref.getLocus()); + final double p = activeRegions == null || activeRegions.overlaps(ref.getLocus()) ? prob : 0.0; + return new ActivityProfileResult(ref.getLocus(), p); + } + + @Override + public Integer map(ActiveRegion activeRegion, RefMetaDataTracker metaDataTracker) { + mappedActiveRegions.put(activeRegion.getLocation(), activeRegion); + return 0; + } + + @Override + public Integer reduceInit() { + return 0; + } + + @Override + public Integer reduce(Integer value, Integer sum) { + return 0; + } +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java index be1e310ae..319af5ec5 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java @@ -1,59 +1,58 @@ /* -* Copyright (c) 2012 The Broad Institute -* -* Permission is hereby granted, free of charge, to any person -* obtaining a copy of this software and associated documentation -* files (the "Software"), to deal in the Software without -* restriction, including without limitation the rights to use, -* copy, modify, merge, publish, distribute, sublicense, and/or sell -* copies of the Software, and to permit persons to whom the -* Software is furnished to do so, subject to the following -* conditions: -* -* The above copyright notice and this permission notice shall be -* included in all copies or substantial portions of the Software. -* -* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR -* THE USE OR OTHER DEALINGS IN THE SOFTWARE. -*/ + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ package org.broadinstitute.sting.gatk.traversals; import com.google.java.contract.PreconditionError; import net.sf.samtools.*; import org.broadinstitute.sting.commandline.Tags; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; import org.broadinstitute.sting.gatk.datasources.reads.*; +import org.broadinstitute.sting.gatk.filters.ReadFilter; +import org.broadinstitute.sting.gatk.iterators.ReadTransformer; import org.broadinstitute.sting.gatk.resourcemanagement.ThreadAllocation; +import org.broadinstitute.sting.gatk.walkers.Walker; import org.broadinstitute.sting.utils.GenomeLocSortedSet; +import org.broadinstitute.sting.utils.SampleUtils; import org.broadinstitute.sting.utils.activeregion.ActiveRegionReadState; import org.broadinstitute.sting.utils.interval.IntervalMergingRule; import org.broadinstitute.sting.utils.interval.IntervalUtils; -import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.sam.*; import net.sf.picard.reference.IndexedFastaSequenceFile; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; -import org.broadinstitute.sting.gatk.contexts.AlignmentContext; -import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.datasources.providers.LocusShardDataProvider; import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; import org.broadinstitute.sting.gatk.executive.WindowMaker; -import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; -import org.broadinstitute.sting.gatk.walkers.ActiveRegionWalker; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.activeregion.ActiveRegion; -import org.broadinstitute.sting.utils.activeregion.ActivityProfileResult; import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; -import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; -import org.broadinstitute.sting.utils.sam.ReadUtils; import org.testng.Assert; import org.testng.annotations.BeforeClass; +import org.testng.annotations.DataProvider; import org.testng.annotations.Test; @@ -71,57 +70,16 @@ import java.util.*; * http://iwww.broadinstitute.org/gsa/wiki/index.php/Active_Region_Traversal_Contract */ public class TraverseActiveRegionsUnitTest extends BaseTest { + private final static boolean ENFORCE_CONTRACTS = false; + private final static boolean DEBUG = false; - private class DummyActiveRegionWalker extends ActiveRegionWalker { - private final double prob; - private EnumSet states = super.desiredReadStates(); - - protected List isActiveCalls = new ArrayList(); - protected Map mappedActiveRegions = new HashMap(); - - public DummyActiveRegionWalker() { - this.prob = 1.0; - } - - public DummyActiveRegionWalker(double constProb) { - this.prob = constProb; - } - - public DummyActiveRegionWalker(EnumSet wantStates) { - this.prob = 1.0; - this.states = wantStates; - } - - @Override - public EnumSet desiredReadStates() { - return states; - } - - @Override - public ActivityProfileResult isActive(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { - isActiveCalls.add(ref.getLocus()); - return new ActivityProfileResult(ref.getLocus(), prob); - } - - @Override - public Integer map(ActiveRegion activeRegion, RefMetaDataTracker metaDataTracker) { - mappedActiveRegions.put(activeRegion.getLocation(), activeRegion); - return 0; - } - - @Override - public Integer reduceInit() { - return 0; - } - - @Override - public Integer reduce(Integer value, Integer sum) { - return 0; - } + @DataProvider(name = "TraversalEngineProvider") + public Object[][] makeTraversals() { + final List traversals = new LinkedList(); + traversals.add(new Object[]{new TraverseActiveRegions()}); + return traversals.toArray(new Object[][]{}); } - private final TraverseActiveRegions t = new TraverseActiveRegions(); - private IndexedFastaSequenceFile reference; private SAMSequenceDictionary dictionary; private GenomeLocParser genomeLocParser; @@ -133,6 +91,7 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { @BeforeClass private void init() throws FileNotFoundException { + //reference = new CachingIndexedFastaSequenceFile(new File("/Users/depristo/Desktop/broadLocal/localData/human_g1k_v37.fasta")); // hg19Reference)); reference = new CachingIndexedFastaSequenceFile(new File(hg19Reference)); dictionary = reference.getSequenceDictionary(); genomeLocParser = new GenomeLocParser(dictionary); @@ -187,18 +146,18 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { out.close(); } - @Test - public void testAllBasesSeen() { + @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider") + public void testAllBasesSeen(TraverseActiveRegions t) { DummyActiveRegionWalker walker = new DummyActiveRegionWalker(); - List activeIntervals = getIsActiveIntervals(walker, intervals); + List activeIntervals = getIsActiveIntervals(t, walker, intervals); // Contract: Every genome position in the analysis interval(s) is processed by the walker's isActive() call verifyEqualIntervals(intervals, activeIntervals); } - private List getIsActiveIntervals(DummyActiveRegionWalker walker, List intervals) { + private List getIsActiveIntervals(final TraverseActiveRegions t, DummyActiveRegionWalker walker, List intervals) { List activeIntervals = new ArrayList(); - for (LocusShardDataProvider dataProvider : createDataProviders(intervals, testBAM)) { + for (LocusShardDataProvider dataProvider : createDataProviders(t, walker, intervals, testBAM)) { t.traverse(walker, dataProvider, 0); activeIntervals.addAll(walker.isActiveCalls); } @@ -206,23 +165,23 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { return activeIntervals; } - @Test (expectedExceptions = PreconditionError.class) - public void testIsActiveRangeLow () { + @Test (enabled = ENFORCE_CONTRACTS, dataProvider = "TraversalEngineProvider", expectedExceptions = PreconditionError.class) + public void testIsActiveRangeLow (TraverseActiveRegions t) { DummyActiveRegionWalker walker = new DummyActiveRegionWalker(-0.1); - getActiveRegions(walker, intervals).values(); + getActiveRegions(t, walker, intervals).values(); } - @Test (expectedExceptions = PreconditionError.class) - public void testIsActiveRangeHigh () { + @Test (enabled = ENFORCE_CONTRACTS, dataProvider = "TraversalEngineProvider", expectedExceptions = PreconditionError.class) + public void testIsActiveRangeHigh (TraverseActiveRegions t) { DummyActiveRegionWalker walker = new DummyActiveRegionWalker(1.1); - getActiveRegions(walker, intervals).values(); + getActiveRegions(t, walker, intervals).values(); } - @Test - public void testActiveRegionCoverage() { + @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider") + public void testActiveRegionCoverage(TraverseActiveRegions t) { DummyActiveRegionWalker walker = new DummyActiveRegionWalker(); - Collection activeRegions = getActiveRegions(walker, intervals).values(); + Collection activeRegions = getActiveRegions(t, walker, intervals).values(); verifyActiveRegionCoverage(intervals, activeRegions); } @@ -268,11 +227,11 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { Assert.assertEquals(intervalStops.size(), 0, "Interval stop location does not match an active region stop location"); } - @Test - public void testActiveRegionExtensionOnContig() { + @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider") + public void testActiveRegionExtensionOnContig(TraverseActiveRegions t) { DummyActiveRegionWalker walker = new DummyActiveRegionWalker(); - Collection activeRegions = getActiveRegions(walker, intervals).values(); + Collection activeRegions = getActiveRegions(t, walker, intervals).values(); for (ActiveRegion activeRegion : activeRegions) { GenomeLoc loc = activeRegion.getExtendedLoc(); @@ -283,8 +242,8 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { } } - @Test - public void testPrimaryReadMapping() { + @Test(enabled = true, dataProvider = "TraversalEngineProvider") + public void testPrimaryReadMapping(TraverseActiveRegions t) { DummyActiveRegionWalker walker = new DummyActiveRegionWalker(); // Contract: Each read has the Primary state in a single region (or none) @@ -293,41 +252,41 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { // simple: Primary in 1:1-999 // overlap_equal: Primary in 1:1-999 // overlap_unequal: Primary in 1:1-999 - // boundary_equal: Non-Primary in 1:1000-1999, Primary in 1:2000-2999 + // boundary_equal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999 // boundary_unequal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999 // boundary_1_pre: Primary in 1:1000-1999, Non-Primary in 1:2000-2999 - // boundary_1_post: Non-Primary in 1:1000-1999, Primary in 1:2000-2999 - // extended_and_np: Non-Primary in 1:1-999, Primary in 1:1000-1999, Extended in 1:2000-2999 + // boundary_1_post: Primary in 1:1000-1999, Non-Primary in 1:2000-2999 + // extended_and_np: Primary in 1:1-999, Non-Primary in 1:1000-1999, Extended in 1:2000-2999 // outside_intervals: none // shard_boundary_1_pre: Primary in 1:14908-16384, Non-Primary in 1:16385-16927 - // shard_boundary_1_post: Non-Primary in 1:14908-16384, Primary in 1:16385-16927 - // shard_boundary_equal: Non-Primary in 1:14908-16384, Primary in 1:16385-16927 + // shard_boundary_1_post: Primary in 1:14908-16384, Non-Primary in 1:16385-16927 + // shard_boundary_equal: Primary in 1:14908-16384, Non-Primary in 1:16385-16927 // simple20: Primary in 20:10000-10100 - Map activeRegions = getActiveRegions(walker, intervals); + Map activeRegions = getActiveRegions(t, walker, intervals); ActiveRegion region; region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1, 999)); - verifyReadMapping(region, "simple", "overlap_equal", "overlap_unequal"); + verifyReadMapping(region, "simple", "overlap_equal", "overlap_unequal", "extended_and_np"); region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1000, 1999)); - verifyReadMapping(region, "boundary_unequal", "extended_and_np", "boundary_1_pre"); + verifyReadMapping(region, "boundary_unequal", "boundary_1_pre", "boundary_equal", "boundary_1_post"); region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 2000, 2999)); - verifyReadMapping(region, "boundary_equal", "boundary_1_post"); + verifyReadMapping(region); region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 14908, 16384)); - verifyReadMapping(region, "shard_boundary_1_pre"); + verifyReadMapping(region, "shard_boundary_1_pre", "shard_boundary_1_post", "shard_boundary_equal"); region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 16385, 16927)); - verifyReadMapping(region, "shard_boundary_1_post", "shard_boundary_equal"); + verifyReadMapping(region); region = activeRegions.get(genomeLocParser.createGenomeLoc("20", 10000, 10100)); verifyReadMapping(region, "simple20"); } - @Test - public void testNonPrimaryReadMapping() { + @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider") + public void testNonPrimaryReadMapping(TraverseActiveRegions t) { DummyActiveRegionWalker walker = new DummyActiveRegionWalker( EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY)); @@ -339,18 +298,18 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { // simple: Primary in 1:1-999 // overlap_equal: Primary in 1:1-999 // overlap_unequal: Primary in 1:1-999 - // boundary_equal: Non-Primary in 1:1000-1999, Primary in 1:2000-2999 + // boundary_equal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999 // boundary_unequal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999 // boundary_1_pre: Primary in 1:1000-1999, Non-Primary in 1:2000-2999 - // boundary_1_post: Non-Primary in 1:1000-1999, Primary in 1:2000-2999 - // extended_and_np: Non-Primary in 1:1-999, Primary in 1:1000-1999, Extended in 1:2000-2999 + // boundary_1_post: Primary in 1:1000-1999, Non-Primary in 1:2000-2999 + // extended_and_np: Primary in 1:1-999, Non-Primary in 1:1000-1999, Extended in 1:2000-2999 // outside_intervals: none // shard_boundary_1_pre: Primary in 1:14908-16384, Non-Primary in 1:16385-16927 - // shard_boundary_1_post: Non-Primary in 1:14908-16384, Primary in 1:16385-16927 - // shard_boundary_equal: Non-Primary in 1:14908-16384, Primary in 1:16385-16927 + // shard_boundary_1_post: Primary in 1:14908-16384, Non-Primary in 1:16385-16927 + // shard_boundary_equal: Primary in 1:14908-16384, Non-Primary in 1:16385-16927 // simple20: Primary in 20:10000-10100 - Map activeRegions = getActiveRegions(walker, intervals); + Map activeRegions = getActiveRegions(t, walker, intervals); ActiveRegion region; region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1, 999)); @@ -372,8 +331,8 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { verifyReadMapping(region, "simple20"); } - @Test - public void testExtendedReadMapping() { + @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider") + public void testExtendedReadMapping(TraverseActiveRegions t) { DummyActiveRegionWalker walker = new DummyActiveRegionWalker( EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY, ActiveRegionReadState.EXTENDED)); @@ -397,7 +356,7 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { // shard_boundary_equal: Non-Primary in 1:14908-16384, Primary in 1:16385-16927 // simple20: Primary in 20:10000-10100 - Map activeRegions = getActiveRegions(walker, intervals); + Map activeRegions = getActiveRegions(t, walker, intervals); ActiveRegion region; region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1, 999)); @@ -419,24 +378,34 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { verifyReadMapping(region, "simple20"); } - @Test - public void testUnmappedReads() { + @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider") + public void testUnmappedReads(TraverseActiveRegions t) { // TODO } private void verifyReadMapping(ActiveRegion region, String... reads) { + final Set regionReads = new HashSet(); + for (SAMRecord read : region.getReads()) { + Assert.assertFalse(regionReads.contains(read.getReadName()), "Duplicate reads detected in region " + region + " read " + read.getReadName()); + regionReads.add(read.getReadName()); + } + Collection wantReads = new ArrayList(Arrays.asList(reads)); for (SAMRecord read : region.getReads()) { String regionReadName = read.getReadName(); - Assert.assertTrue(wantReads.contains(regionReadName), "Read " + regionReadName + " assigned to active region " + region); + Assert.assertTrue(wantReads.contains(regionReadName), "Read " + regionReadName + " incorrectly assigned to active region " + region); wantReads.remove(regionReadName); } - Assert.assertTrue(wantReads.isEmpty(), "Reads missing in active region " + region); + Assert.assertTrue(wantReads.isEmpty(), "Reads missing in active region " + region + ", wanted " + (wantReads.isEmpty() ? "" : wantReads.iterator().next())); } - private Map getActiveRegions(DummyActiveRegionWalker walker, List intervals) { - for (LocusShardDataProvider dataProvider : createDataProviders(intervals, testBAM)) + private Map getActiveRegions(TraverseActiveRegions t, DummyActiveRegionWalker walker, List intervals) { + return getActiveRegions(t, walker, intervals, testBAM); + } + + private Map getActiveRegions(TraverseActiveRegions t, DummyActiveRegionWalker walker, List intervals, final String bam) { + for (LocusShardDataProvider dataProvider : createDataProviders(t, walker, intervals, bam)) t.traverse(walker, dataProvider, 0); t.endTraversal(walker, 0); @@ -496,28 +465,223 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { record.setCigar(cigar); record.setReadString(new String(new char[len]).replace("\0", "A")); record.setBaseQualities(new byte[len]); + record.setReadGroup(new GATKSAMReadGroupRecord(header.getReadGroup("test"))); return record; } - private List createDataProviders(List intervals, String bamFile) { + private List createDataProviders(TraverseActiveRegions traverseActiveRegions, final Walker walker, List intervals, String bamFile) { GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); engine.setGenomeLocParser(genomeLocParser); - t.initialize(engine); + traverseActiveRegions.initialize(engine, walker); Collection samFiles = new ArrayList(); SAMReaderID readerID = new SAMReaderID(new File(bamFile), new Tags()); samFiles.add(readerID); - SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser); + SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser, + false, + SAMFileReader.ValidationStringency.STRICT, + null, + null, + new ValidationExclusion(), + new ArrayList(), + new ArrayList(), + false, (byte)30, false, true); + + final Set samples = SampleUtils.getSAMFileSamples(dataSource.getHeader()); List providers = new ArrayList(); for (Shard shard : dataSource.createShardIteratorOverIntervals(new GenomeLocSortedSet(genomeLocParser, intervals), new LocusShardBalancer())) { - for (WindowMaker.WindowMakerIterator window : new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs())) { + for (WindowMaker.WindowMakerIterator window : new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples)) { providers.add(new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList())); } } return providers; } + + // --------------------------------------------------------------------------------------------------------- + // + // Combinatorial tests to ensure reads are going into the right regions + // + // --------------------------------------------------------------------------------------------------------- + + @DataProvider(name = "CombinatorialARTTilingProvider") + public Object[][] makeCombinatorialARTTilingProvider() { + final List tests = new LinkedList(); + + final List starts = Arrays.asList( + 1, // very start of the chromosome + ArtificialBAMBuilder.BAM_SHARD_SIZE - 100, // right before the shard boundary + ArtificialBAMBuilder.BAM_SHARD_SIZE + 100 // right after the shard boundary + ); + + final List> allReadStates = Arrays.asList( + EnumSet.of(ActiveRegionReadState.PRIMARY), + EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY), + EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY, ActiveRegionReadState.EXTENDED) + ); + + final int maxTests = Integer.MAX_VALUE; + int nTests = 0; + for ( final int readLength : Arrays.asList(10, 100) ) { + for ( final int skips : Arrays.asList(0, 1, 10) ) { + for ( final int start : starts ) { + for ( final int nReadsPerLocus : Arrays.asList(1, 2) ) { + for ( final int nLoci : Arrays.asList(1, 1000) ) { + for ( EnumSet readStates : allReadStates ) { + final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(reference, nReadsPerLocus, nLoci); + bamBuilder.setReadLength(readLength); + bamBuilder.setSkipNLoci(skips); + bamBuilder.setAlignmentStart(start); + + for ( final GenomeLocSortedSet activeRegions : enumerateActiveRegions(bamBuilder.getAlignmentStart(), bamBuilder.getAlignmentEnd())) { + nTests++; + if ( nTests < maxTests ) // && nTests == 1238 ) + tests.add(new Object[]{nTests, activeRegions, readStates, bamBuilder}); + } + } + } + } + } + } + } + + return tests.toArray(new Object[][]{}); + } + + private Collection enumerateActiveRegions(final int start, final int stop) { + // should basically cut up entire region into equal sized chunks, of + // size 10, 20, 50, 100, etc, alternating skipping pieces so they are inactive + // Need to make sure we include some edge cases: + final List activeRegions = new LinkedList(); + + for ( final int stepSize : Arrays.asList(11, 29, 53, 97) ) { + for ( final boolean startWithActive : Arrays.asList(true, false) ) { + activeRegions.add(makeActiveRegionMask(start, stop, stepSize, startWithActive)); + } + } + + // active region is the whole interval + activeRegions.add(new GenomeLocSortedSet(genomeLocParser, genomeLocParser.createGenomeLoc("1", start, stop))); + + // active region extends up to the end of the data, but doesn't include start + activeRegions.add(new GenomeLocSortedSet(genomeLocParser, genomeLocParser.createGenomeLoc("1", start+10, stop))); + + return activeRegions; + } + + private GenomeLocSortedSet makeActiveRegionMask(final int start, final int stop, final int stepSize, final boolean startWithActive) { + final GenomeLocSortedSet active = new GenomeLocSortedSet(genomeLocParser); + + boolean includeRegion = startWithActive; + for ( int left = start; left < stop; left += stepSize) { + final int right = left + stepSize; + final GenomeLoc region = genomeLocParser.createGenomeLoc("1", left, right); + if ( includeRegion ) + active.add(region); + includeRegion = ! includeRegion; + } + + return active; + } + + + @Test(enabled = true && ! DEBUG, dataProvider = "CombinatorialARTTilingProvider") + public void testARTReadsInActiveRegions(final int id, final GenomeLocSortedSet activeRegions, final EnumSet readStates, final ArtificialBAMBuilder bamBuilder) { + logger.warn("Running testARTReadsInActiveRegions id=" + id + " locs " + activeRegions + " against bam " + bamBuilder); + final List intervals = Arrays.asList( + genomeLocParser.createGenomeLoc("1", bamBuilder.getAlignmentStart(), bamBuilder.getAlignmentEnd()) + ); + + final DummyActiveRegionWalker walker = new DummyActiveRegionWalker(activeRegions); + walker.setStates(readStates); + + final TraverseActiveRegions traversal = new TraverseActiveRegions(); + final Map activeRegionsMap = getActiveRegions(traversal, walker, intervals, bamBuilder.makeTemporarilyBAMFile().toString()); + + final Set alreadySeenReads = new HashSet(); // for use with the primary / non-primary + for ( final ActiveRegion region : activeRegionsMap.values() ) { + final Set readNamesInRegion = readNamesInRegion(region); + int nReadsExpectedInRegion = 0; + for ( final GATKSAMRecord read : bamBuilder.makeReads() ) { + final GenomeLoc readLoc = genomeLocParser.createGenomeLoc(read); + + boolean shouldBeInRegion = readStates.contains(ActiveRegionReadState.EXTENDED) + ? region.getExtendedLoc().overlapsP(readLoc) + : region.getLocation().overlapsP(readLoc); + + if ( ! readStates.contains(ActiveRegionReadState.NONPRIMARY) ) { + if ( alreadySeenReads.contains(read.getReadName()) ) + shouldBeInRegion = false; + else if ( shouldBeInRegion ) + alreadySeenReads.add(read.getReadName()); + } + + Assert.assertEquals(readNamesInRegion.contains(read.getReadName()), shouldBeInRegion, "Region " + region + + " failed contains read check: read " + read + " with span " + readLoc + " should be in region is " + shouldBeInRegion + " but I got the opposite"); + + nReadsExpectedInRegion += shouldBeInRegion ? 1 : 0; + } + + Assert.assertEquals(region.size(), nReadsExpectedInRegion, "There are more reads in active region " + region + "than expected"); + } + } + + private Set readNamesInRegion(final ActiveRegion region) { + final Set readNames = new LinkedHashSet(region.getReads().size()); + for ( final SAMRecord read : region.getReads() ) + readNames.add(read.getReadName()); + return readNames; + } + + // --------------------------------------------------------------------------------------------------------- + // + // Make sure all insertion reads are properly included in the active regions + // + // --------------------------------------------------------------------------------------------------------- + + @Test + public void ensureAllInsertionReadsAreInActiveRegions() { + + final int readLength = 10; + final int start = 20; + final int nReadsPerLocus = 10; + final int nLoci = 3; + + final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(reference, nReadsPerLocus, nLoci); + bamBuilder.setReadLength(readLength); + bamBuilder.setAlignmentStart(start); + + // note that the position must be +1 as the read's all I cigar puts the end 1 bp before start, leaving it out of the region + GATKSAMRecord allI = ArtificialSAMUtils.createArtificialRead(bamBuilder.getHeader(),"allI",0,start+1,readLength); + allI.setCigarString(readLength + "I"); + allI.setReadGroup(new GATKSAMReadGroupRecord(bamBuilder.getHeader().getReadGroups().get(0))); + + bamBuilder.addReads(allI); + + final GenomeLocSortedSet activeRegions = new GenomeLocSortedSet(bamBuilder.getGenomeLocParser()); + activeRegions.add(bamBuilder.getGenomeLocParser().createGenomeLoc("1", 10, 30)); + final List intervals = Arrays.asList( + genomeLocParser.createGenomeLoc("1", bamBuilder.getAlignmentStart(), bamBuilder.getAlignmentEnd()) + ); + + final DummyActiveRegionWalker walker = new DummyActiveRegionWalker(activeRegions); + + final TraverseActiveRegions traversal = new TraverseActiveRegions(); + final Map activeRegionsMap = getActiveRegions(traversal, walker, intervals, bamBuilder.makeTemporarilyBAMFile().toString()); + + final ActiveRegion region = activeRegionsMap.values().iterator().next(); + int nReadsExpectedInRegion = 0; + + final Set readNamesInRegion = readNamesInRegion(region); + for ( final GATKSAMRecord read : bamBuilder.makeReads() ) { + Assert.assertTrue(readNamesInRegion.contains(read.getReadName()), + "Region " + region + " should contain read " + read + " with cigar " + read.getCigarString() + " but it wasn't"); + nReadsExpectedInRegion++; + } + + Assert.assertEquals(region.size(), nReadsExpectedInRegion, "There are more reads in active region " + region + "than expected"); + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseDuplicatesUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseDuplicatesUnitTest.java index ee6c6d1d4..fd9e46a06 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseDuplicatesUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseDuplicatesUnitTest.java @@ -68,7 +68,7 @@ public class TraverseDuplicatesUnitTest extends BaseTest { engine.setReferenceDataSource(refFile); engine.setGenomeLocParser(genomeLocParser); - obj.initialize(engine); + obj.initialize(engine, null); } @Test diff --git a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java index 3866990b2..4328e3047 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java @@ -132,7 +132,7 @@ public class TraverseReadsUnitTest extends BaseTest { countReadWalker = new CountReads(); traversalEngine = new TraverseReadsNano(1); - traversalEngine.initialize(engine); + traversalEngine.initialize(engine, countReadWalker); } /** Test out that we can shard the file and iterate over every read */ diff --git a/public/java/test/org/broadinstitute/sting/utils/fasta/CachingIndexedFastaSequenceFileUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/fasta/CachingIndexedFastaSequenceFileUnitTest.java index c67e52f2e..0c1b5b069 100644 --- a/public/java/test/org/broadinstitute/sting/utils/fasta/CachingIndexedFastaSequenceFileUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/fasta/CachingIndexedFastaSequenceFileUnitTest.java @@ -32,8 +32,10 @@ package org.broadinstitute.sting.utils.fasta; import net.sf.picard.reference.IndexedFastaSequenceFile; import net.sf.picard.reference.ReferenceSequence; import net.sf.samtools.SAMSequenceRecord; +import org.apache.commons.lang.StringUtils; import org.apache.log4j.Priority; import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.exceptions.UserException; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; @@ -49,7 +51,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; /** - * Basic unit test for GenomeLoc + * Basic unit test for CachingIndexedFastaSequenceFile */ public class CachingIndexedFastaSequenceFileUnitTest extends BaseTest { private File simpleFasta = new File(publicTestDir + "/exampleFASTA.fasta"); @@ -80,7 +82,7 @@ public class CachingIndexedFastaSequenceFileUnitTest extends BaseTest { @Test(dataProvider = "fastas", enabled = true && ! DEBUG) public void testCachingIndexedFastaReaderSequential1(File fasta, int cacheSize, int querySize) throws FileNotFoundException { - final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true); + final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true, false); SAMSequenceRecord contig = caching.getSequenceDictionary().getSequence(0); logger.warn(String.format("Checking contig %s length %d with cache size %d and query size %d", @@ -122,7 +124,7 @@ public class CachingIndexedFastaSequenceFileUnitTest extends BaseTest { @Test(dataProvider = "fastas", enabled = true && ! DEBUG) public void testCachingIndexedFastaReaderTwoStage(File fasta, int cacheSize, int querySize) throws FileNotFoundException { final IndexedFastaSequenceFile uncached = new IndexedFastaSequenceFile(fasta); - final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true); + final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true, false); SAMSequenceRecord contig = uncached.getSequenceDictionary().getSequence(0); @@ -167,7 +169,7 @@ public class CachingIndexedFastaSequenceFileUnitTest extends BaseTest { @Test(dataProvider = "ParallelFastaTest", enabled = true && ! DEBUG, timeOut = 60000) public void testCachingIndexedFastaReaderParallel(final File fasta, final int cacheSize, final int querySize, final int nt) throws FileNotFoundException, InterruptedException { - final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true); + final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true, false); logger.warn(String.format("Parallel caching index fasta reader test cacheSize %d querySize %d nt %d", caching.getCacheSize(), querySize, nt)); for ( int iterations = 0; iterations < 1; iterations++ ) { @@ -230,4 +232,33 @@ public class CachingIndexedFastaSequenceFileUnitTest extends BaseTest { else return new String(reader.getSubsequenceAt(contig, start, stop).getBases()); } + + @Test(enabled = true) + public void testIupacChanges() throws FileNotFoundException, InterruptedException { + final String testFasta = privateTestDir + "iupacFASTA.fasta"; + final CachingIndexedFastaSequenceFile iupacPreserving = new CachingIndexedFastaSequenceFile(new File(testFasta), CachingIndexedFastaSequenceFile.DEFAULT_CACHE_SIZE, false, true); + final CachingIndexedFastaSequenceFile makeNs = new CachingIndexedFastaSequenceFile(new File(testFasta)); + + int preservingNs = 0; + int changingNs = 0; + for ( SAMSequenceRecord contig : iupacPreserving.getSequenceDictionary().getSequences() ) { + final String sPreserving = fetchBaseString(iupacPreserving, contig.getSequenceName(), 0, 15000); + preservingNs += StringUtils.countMatches(sPreserving, "N"); + + final String sChanging = fetchBaseString(makeNs, contig.getSequenceName(), 0, 15000); + changingNs += StringUtils.countMatches(sChanging, "N"); + } + + Assert.assertEquals(changingNs, preservingNs + 4); + } + + @Test(enabled = true, expectedExceptions = {UserException.class}) + public void testFailOnBadBase() throws FileNotFoundException, InterruptedException { + final String testFasta = privateTestDir + "problematicFASTA.fasta"; + final CachingIndexedFastaSequenceFile fasta = new CachingIndexedFastaSequenceFile(new File(testFasta)); + + for ( SAMSequenceRecord contig : fasta.getSequenceDictionary().getSequences() ) { + fetchBaseString(fasta, contig.getSequenceName(), -1, -1); + } + } } diff --git a/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsBenchmark.java b/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsBenchmark.java index 7d295c6f0..e06149f67 100644 --- a/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsBenchmark.java +++ b/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsBenchmark.java @@ -27,14 +27,14 @@ package org.broadinstitute.sting.utils.fragments; import com.google.caliper.Param; import com.google.caliper.SimpleBenchmark; -import com.google.caliper.runner.CaliperMain; import net.sf.samtools.SAMFileHeader; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; -import java.util.*; +import java.util.ArrayList; +import java.util.List; /** * Caliper microbenchmark of fragment pileup @@ -76,6 +76,6 @@ public class FragmentUtilsBenchmark extends SimpleBenchmark { } public static void main(String[] args) { - CaliperMain.main(FragmentUtilsBenchmark.class, args); + com.google.caliper.Runner.main(FragmentUtilsBenchmark.class, args); } } diff --git a/public/java/test/org/broadinstitute/sting/utils/interval/IntervalUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/interval/IntervalUtilsUnitTest.java index 35f9d4137..2be2745de 100644 --- a/public/java/test/org/broadinstitute/sting/utils/interval/IntervalUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/interval/IntervalUtilsUnitTest.java @@ -1068,7 +1068,7 @@ public class IntervalUtilsUnitTest extends BaseTest { List> intervalArgs = new ArrayList>(1); intervalArgs.add(new IntervalBinding(picardIntervalFile.getAbsolutePath())); - IntervalUtils.loadIntervals(intervalArgs, argCollection.intervalSetRule, argCollection.intervalMerging, argCollection.intervalPadding, genomeLocParser); + IntervalUtils.loadIntervals(intervalArgs, argCollection.intervalArguments.intervalSetRule, argCollection.intervalArguments.intervalMerging, argCollection.intervalArguments.intervalPadding, genomeLocParser); } @Test(expectedExceptions=UserException.class, dataProvider="invalidIntervalTestData") @@ -1081,7 +1081,7 @@ public class IntervalUtilsUnitTest extends BaseTest { List> intervalArgs = new ArrayList>(1); intervalArgs.add(new IntervalBinding(gatkIntervalFile.getAbsolutePath())); - IntervalUtils.loadIntervals(intervalArgs, argCollection.intervalSetRule, argCollection.intervalMerging, argCollection.intervalPadding, genomeLocParser); + IntervalUtils.loadIntervals(intervalArgs, argCollection.intervalArguments.intervalSetRule, argCollection.intervalArguments.intervalMerging, argCollection.intervalArguments.intervalPadding, genomeLocParser); } private File createTempFile( String tempFilePrefix, String tempFileExtension, String... lines ) throws Exception { diff --git a/public/java/test/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachineUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachineUnitTest.java new file mode 100644 index 000000000..2f1e95a1f --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachineUnitTest.java @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.locusiterator; + +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.testng.Assert; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.util.Arrays; + +/** + * testing of the new (non-legacy) version of LocusIteratorByState + */ +public class AlignmentStateMachineUnitTest extends LocusIteratorByStateBaseTest { + @DataProvider(name = "AlignmentStateMachineTest") + public Object[][] makeAlignmentStateMachineTest() { +// return new Object[][]{{new LIBSTest("2M2D2X", 2)}}; +// return createLIBSTests( +// Arrays.asList(2), +// Arrays.asList(2)); + return createLIBSTests( + Arrays.asList(1, 2), + Arrays.asList(1, 2, 3, 4)); + } + + @Test(dataProvider = "AlignmentStateMachineTest") + public void testAlignmentStateMachineTest(LIBSTest params) { + final GATKSAMRecord read = params.makeRead(); + final AlignmentStateMachine state = new AlignmentStateMachine(read); + final LIBS_position tester = new LIBS_position(read); + + // min is one because always visit something, even for 10I reads + final int expectedBpToVisit = read.getAlignmentEnd() - read.getAlignmentStart() + 1; + + Assert.assertSame(state.getRead(), read); + Assert.assertNotNull(state.toString()); + + int bpVisited = 0; + int lastOffset = -1; + + // TODO -- more tests about test state machine state before first step? + Assert.assertTrue(state.isLeftEdge()); + Assert.assertNull(state.getCigarOperator()); + Assert.assertNotNull(state.toString()); + Assert.assertEquals(state.getReadOffset(), -1); + Assert.assertEquals(state.getGenomeOffset(), -1); + Assert.assertEquals(state.getCurrentCigarElementOffset(), -1); + Assert.assertEquals(state.getCurrentCigarElement(), null); + + while ( state.stepForwardOnGenome() != null ) { + Assert.assertNotNull(state.toString()); + + tester.stepForwardOnGenome(); + + Assert.assertTrue(state.getReadOffset() >= lastOffset, "Somehow read offsets are decreasing: lastOffset " + lastOffset + " current " + state.getReadOffset()); + Assert.assertEquals(state.getReadOffset(), tester.getCurrentReadOffset(), "Read offsets are wrong at " + bpVisited); + + Assert.assertFalse(state.isLeftEdge()); + + Assert.assertEquals(state.getCurrentCigarElement(), read.getCigar().getCigarElement(tester.currentOperatorIndex), "CigarElement index failure"); + Assert.assertEquals(state.getOffsetIntoCurrentCigarElement(), tester.getCurrentPositionOnOperatorBase0(), "CigarElement index failure"); + + Assert.assertEquals(read.getCigar().getCigarElement(state.getCurrentCigarElementOffset()), state.getCurrentCigarElement(), "Current cigar element isn't what we'd get from the read itself"); + + Assert.assertTrue(state.getOffsetIntoCurrentCigarElement() >= 0, "Offset into current cigar too small"); + Assert.assertTrue(state.getOffsetIntoCurrentCigarElement() < state.getCurrentCigarElement().getLength(), "Offset into current cigar too big"); + + Assert.assertEquals(state.getGenomeOffset(), tester.getCurrentGenomeOffsetBase0(), "Offset from alignment start is bad"); + Assert.assertEquals(state.getGenomePosition(), tester.getCurrentGenomeOffsetBase0() + read.getAlignmentStart(), "GenomePosition start is bad"); + Assert.assertEquals(state.getLocation(genomeLocParser).size(), 1, "GenomeLoc position should have size == 1"); + Assert.assertEquals(state.getLocation(genomeLocParser).getStart(), state.getGenomePosition(), "GenomeLoc position is bad"); + + // most tests of this functionality are in LIBS + Assert.assertNotNull(state.makePileupElement()); + + lastOffset = state.getReadOffset(); + bpVisited++; + } + + Assert.assertEquals(bpVisited, expectedBpToVisit, "Didn't visit the expected number of bp"); + Assert.assertEquals(state.getReadOffset(), read.getReadLength()); + Assert.assertEquals(state.getCurrentCigarElementOffset(), read.getCigarLength()); + Assert.assertEquals(state.getCurrentCigarElement(), null); + Assert.assertNotNull(state.toString()); + } +} diff --git a/public/java/test/org/broadinstitute/sting/utils/locusiterator/LIBS_position.java b/public/java/test/org/broadinstitute/sting/utils/locusiterator/LIBS_position.java new file mode 100644 index 000000000..31be5a25a --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/utils/locusiterator/LIBS_position.java @@ -0,0 +1,155 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.locusiterator; + +import net.sf.samtools.Cigar; +import net.sf.samtools.CigarElement; +import net.sf.samtools.CigarOperator; +import net.sf.samtools.SAMRecord; + +/** +* Created with IntelliJ IDEA. +* User: depristo +* Date: 1/5/13 +* Time: 8:42 PM +* To change this template use File | Settings | File Templates. +*/ +public final class LIBS_position { + + SAMRecord read; + + final int numOperators; + int currentOperatorIndex = 0; + int currentPositionOnOperator = 0; + int currentReadOffset = 0; + int currentGenomeOffset = 0; + + public boolean isBeforeDeletionStart = false; + public boolean isBeforeDeletedBase = false; + public boolean isAfterDeletionEnd = false; + public boolean isAfterDeletedBase = false; + public boolean isBeforeInsertion = false; + public boolean isAfterInsertion = false; + public boolean isNextToSoftClip = false; + + boolean sawMop = false; + + public LIBS_position(final SAMRecord read) { + this.read = read; + numOperators = read.getCigar().numCigarElements(); + } + + public int getCurrentReadOffset() { + return Math.max(0, currentReadOffset - 1); + } + + public int getCurrentPositionOnOperatorBase0() { + return currentPositionOnOperator - 1; + } + + public int getCurrentGenomeOffsetBase0() { + return currentGenomeOffset - 1; + } + + /** + * Steps forward on the genome. Returns false when done reading the read, true otherwise. + */ + public boolean stepForwardOnGenome() { + if ( currentOperatorIndex == numOperators ) + return false; + + CigarElement curElement = read.getCigar().getCigarElement(currentOperatorIndex); + if ( currentPositionOnOperator >= curElement.getLength() ) { + if ( ++currentOperatorIndex == numOperators ) + return false; + + curElement = read.getCigar().getCigarElement(currentOperatorIndex); + currentPositionOnOperator = 0; + } + + switch ( curElement.getOperator() ) { + case I: // insertion w.r.t. the reference +// if ( !sawMop ) +// break; + case S: // soft clip + currentReadOffset += curElement.getLength(); + case H: // hard clip + case P: // padding + currentOperatorIndex++; + return stepForwardOnGenome(); + + case D: // deletion w.r.t. the reference + case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning) + currentPositionOnOperator++; + currentGenomeOffset++; + break; + + case M: + case EQ: + case X: + sawMop = true; + currentReadOffset++; + currentPositionOnOperator++; + currentGenomeOffset++; + break; + default: + throw new IllegalStateException("No support for cigar op: " + curElement.getOperator()); + } + + final boolean isFirstOp = currentOperatorIndex == 0; + final boolean isLastOp = currentOperatorIndex == numOperators - 1; + final boolean isFirstBaseOfOp = currentPositionOnOperator == 1; + final boolean isLastBaseOfOp = currentPositionOnOperator == curElement.getLength(); + + isBeforeDeletionStart = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isLastOp, isLastBaseOfOp); + isBeforeDeletedBase = isBeforeDeletionStart || (!isLastBaseOfOp && curElement.getOperator() == CigarOperator.D); + isAfterDeletionEnd = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isFirstOp, isFirstBaseOfOp); + isAfterDeletedBase = isAfterDeletionEnd || (!isFirstBaseOfOp && curElement.getOperator() == CigarOperator.D); + isBeforeInsertion = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isLastOp, isLastBaseOfOp) + || (!sawMop && curElement.getOperator() == CigarOperator.I); + isAfterInsertion = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isFirstOp, isFirstBaseOfOp); + isNextToSoftClip = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isLastOp, isLastBaseOfOp) + || isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isFirstOp, isFirstBaseOfOp); + + return true; + } + + private static boolean isBeforeOp(final Cigar cigar, + final int currentOperatorIndex, + final CigarOperator op, + final boolean isLastOp, + final boolean isLastBaseOfOp) { + return !isLastOp && isLastBaseOfOp && cigar.getCigarElement(currentOperatorIndex+1).getOperator() == op; + } + + private static boolean isAfterOp(final Cigar cigar, + final int currentOperatorIndex, + final CigarOperator op, + final boolean isFirstOp, + final boolean isFirstBaseOfOp) { + return !isFirstOp && isFirstBaseOfOp && cigar.getCigarElement(currentOperatorIndex-1).getOperator() == op; + } +} diff --git a/public/java/test/org/broadinstitute/sting/utils/locusiterator/LocusIteratorBenchmark.java b/public/java/test/org/broadinstitute/sting/utils/locusiterator/LocusIteratorBenchmark.java new file mode 100644 index 000000000..c0938676e --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/utils/locusiterator/LocusIteratorBenchmark.java @@ -0,0 +1,143 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.locusiterator; + +import com.google.caliper.Param; +import com.google.caliper.SimpleBenchmark; +import net.sf.samtools.SAMFileHeader; +import net.sf.samtools.SAMRecord; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.QualityUtils; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; + +import java.util.LinkedList; +import java.util.List; + +/** + * Caliper microbenchmark of fragment pileup + */ +public class LocusIteratorBenchmark extends SimpleBenchmark { + protected SAMFileHeader header; + protected GenomeLocParser genomeLocParser; + + List reads = new LinkedList(); + final int readLength = 101; + final int nReads = 10000; + final int locus = 1; + + @Param({"101M", "50M10I40M", "50M10D40M"}) + String cigar; // set automatically by framework + + @Override protected void setUp() { + header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000); + genomeLocParser = new GenomeLocParser(header.getSequenceDictionary()); + + for ( int j = 0; j < nReads; j++ ) { + GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, readLength); + read.setReadBases(Utils.dupBytes((byte) 'A', readLength)); + final byte[] quals = new byte[readLength]; + for ( int i = 0; i < readLength; i++ ) + quals[i] = (byte)(i % QualityUtils.MAX_QUAL_SCORE); + read.setBaseQualities(quals); + read.setCigarString(cigar); + reads.add(read); + } + } + +// public void timeOriginalLIBS(int rep) { +// for ( int i = 0; i < rep; i++ ) { +// final org.broadinstitute.sting.utils.locusiterator.old.LocusIteratorByState libs = +// new org.broadinstitute.sting.utils.locusiterator.old.LocusIteratorByState( +// new LocusIteratorByStateBaseTest.FakeCloseableIterator(reads.iterator()), +// LocusIteratorByStateBaseTest.createTestReadProperties(), +// genomeLocParser, +// LocusIteratorByState.sampleListForSAMWithoutReadGroups()); +// +// while ( libs.hasNext() ) { +// AlignmentContext context = libs.next(); +// } +// } +// } +// +// public void timeLegacyLIBS(int rep) { +// for ( int i = 0; i < rep; i++ ) { +// final org.broadinstitute.sting.utils.locusiterator.legacy.LegacyLocusIteratorByState libs = +// new org.broadinstitute.sting.utils.locusiterator.legacy.LegacyLocusIteratorByState( +// new LocusIteratorByStateBaseTest.FakeCloseableIterator(reads.iterator()), +// LocusIteratorByStateBaseTest.createTestReadProperties(), +// genomeLocParser, +// LocusIteratorByState.sampleListForSAMWithoutReadGroups()); +// +// while ( libs.hasNext() ) { +// AlignmentContext context = libs.next(); +// } +// } +// } + + public void timeNewLIBS(int rep) { + for ( int i = 0; i < rep; i++ ) { + final org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState libs = + new org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState( + new LocusIteratorByStateBaseTest.FakeCloseableIterator(reads.iterator()), + LocusIteratorByStateBaseTest.createTestReadProperties(), + genomeLocParser, + LocusIteratorByState.sampleListForSAMWithoutReadGroups()); + + while ( libs.hasNext() ) { + AlignmentContext context = libs.next(); + } + } + } + +// public void timeOriginalLIBSStateMachine(int rep) { +// for ( int i = 0; i < rep; i++ ) { +// for ( final SAMRecord read : reads ) { +// final SAMRecordAlignmentState alignmentStateMachine = new SAMRecordAlignmentState(read); +// while ( alignmentStateMachine.stepForwardOnGenome() != null ) { +// alignmentStateMachine.getGenomeOffset(); +// } +// } +// } +// } + + public void timeAlignmentStateMachine(int rep) { + for ( int i = 0; i < rep; i++ ) { + for ( final GATKSAMRecord read : reads ) { + final AlignmentStateMachine alignmentStateMachine = new AlignmentStateMachine(read); + while ( alignmentStateMachine.stepForwardOnGenome() != null ) { + ; + } + } + } + } + + public static void main(String[] args) { + com.google.caliper.Runner.main(LocusIteratorBenchmark.class, args); + } +} diff --git a/public/java/test/org/broadinstitute/sting/utils/locusiterator/LocusIteratorByStateBaseTest.java b/public/java/test/org/broadinstitute/sting/utils/locusiterator/LocusIteratorByStateBaseTest.java new file mode 100644 index 000000000..7c8c6108c --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/utils/locusiterator/LocusIteratorByStateBaseTest.java @@ -0,0 +1,252 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.locusiterator; + +import net.sf.samtools.*; +import net.sf.samtools.util.CloseableIterator; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.gatk.ReadProperties; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.broadinstitute.sting.gatk.datasources.reads.SAMReaderID; +import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod; +import org.broadinstitute.sting.gatk.filters.ReadFilter; +import org.broadinstitute.sting.gatk.iterators.ReadTransformer; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.QualityUtils; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.DataProvider; + +import java.util.*; + +/** + * testing of the new (non-legacy) version of LocusIteratorByState + */ +public class LocusIteratorByStateBaseTest extends BaseTest { + protected static SAMFileHeader header; + protected GenomeLocParser genomeLocParser; + + @BeforeClass + public void beforeClass() { + header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000); + genomeLocParser = new GenomeLocParser(header.getSequenceDictionary()); + } + + protected LocusIteratorByState makeLTBS(List reads, + ReadProperties readAttributes) { + return new LocusIteratorByState(new FakeCloseableIterator(reads.iterator()), + readAttributes, + genomeLocParser, + LocusIteratorByState.sampleListForSAMWithoutReadGroups()); + } + + public static ReadProperties createTestReadProperties() { + return createTestReadProperties(null, false); + } + + public static ReadProperties createTestReadProperties( DownsamplingMethod downsamplingMethod, final boolean keepReads ) { + return new ReadProperties( + Collections.emptyList(), + new SAMFileHeader(), + SAMFileHeader.SortOrder.coordinate, + false, + SAMFileReader.ValidationStringency.STRICT, + downsamplingMethod, + new ValidationExclusion(), + Collections.emptyList(), + Collections.emptyList(), + true, + (byte) -1, + keepReads); + } + + public static class FakeCloseableIterator implements CloseableIterator { + Iterator iterator; + + public FakeCloseableIterator(Iterator it) { + iterator = it; + } + + @Override + public void close() {} + + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public T next() { + return iterator.next(); + } + + @Override + public void remove() { + throw new UnsupportedOperationException("Don't remove!"); + } + } + + protected static class LIBSTest { + public static final int locus = 44367788; + final String cigarString; + final int readLength; + final private List elements; + + public LIBSTest(final String cigarString) { + final Cigar cigar = TextCigarCodec.getSingleton().decode(cigarString); + this.cigarString = cigarString; + this.elements = cigar.getCigarElements(); + this.readLength = cigar.getReadLength(); + } + + @Override + public String toString() { + return "LIBSTest{" + + "cigar='" + cigarString + '\'' + + ", readLength=" + readLength + + '}'; + } + + public List getElements() { + return elements; + } + + public GATKSAMRecord makeRead() { + GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, readLength); + read.setReadBases(Utils.dupBytes((byte) 'A', readLength)); + final byte[] quals = new byte[readLength]; + for ( int i = 0; i < readLength; i++ ) + quals[i] = (byte)(i % QualityUtils.MAX_QUAL_SCORE); + read.setBaseQualities(quals); + read.setCigarString(cigarString); + return read; + } + } + + private boolean isIndel(final CigarElement ce) { + return ce.getOperator() == CigarOperator.D || ce.getOperator() == CigarOperator.I; + } + + private boolean startsWithDeletion(final List elements) { + for ( final CigarElement element : elements ) { + switch ( element.getOperator() ) { + case M: + case I: + case EQ: + case X: + return false; + case D: + return true; + default: + // keep looking + } + } + + return false; + } + + private LIBSTest makePermutationTest(final List elements) { + CigarElement last = null; + boolean hasMatch = false; + + // starts with D => bad + if ( startsWithDeletion(elements) ) + return null; + + // ends with D => bad + if ( elements.get(elements.size()-1).getOperator() == CigarOperator.D ) + return null; + + // make sure it's valid + String cigar = ""; + int len = 0; + for ( final CigarElement ce : elements ) { + if ( ce.getOperator() == CigarOperator.N ) + return null; // TODO -- don't support N + + // abort on a bad cigar + if ( last != null ) { + if ( ce.getOperator() == last.getOperator() ) + return null; + if ( isIndel(ce) && isIndel(last) ) + return null; + } + + cigar += ce.getLength() + ce.getOperator().toString(); + len += ce.getLength(); + last = ce; + hasMatch = hasMatch || ce.getOperator() == CigarOperator.M; + } + + if ( ! hasMatch && elements.size() == 1 && + ! (last.getOperator() == CigarOperator.I || last.getOperator() == CigarOperator.S)) + return null; + + return new LIBSTest(cigar); + } + + @DataProvider(name = "LIBSTest") + public Object[][] createLIBSTests(final List cigarLengths, final List combinations) { + final List tests = new LinkedList(); + + final List allOps = Arrays.asList(CigarOperator.values()); + + final List singleCigars = new LinkedList(); + for ( final int len : cigarLengths ) + for ( final CigarOperator op : allOps ) + singleCigars.add(new CigarElement(len, op)); + + for ( final int complexity : combinations ) { + for ( final List elements : Utils.makePermutations(singleCigars, complexity, true) ) { + final LIBSTest test = makePermutationTest(elements); + if ( test != null ) tests.add(new Object[]{test}); + } + } + + return tests.toArray(new Object[][]{}); + } + + /** + * Work around inadequate tests that aren't worth fixing. + * + * Look at the CIGAR 2M2P2D2P2M. Both M states border a deletion, separated by P (padding elements). So + * the right answer for deletions here is true for isBeforeDeletion() and isAfterDeletion() for the first + * and second M. But the LIBS_position doesn't say so. + * + * @param elements + * @return + */ + protected static boolean hasNeighboringPaddedOps(final List elements, final int elementI) { + return (elementI - 1 >= 0 && isPadding(elements.get(elementI-1))) || + (elementI + 1 < elements.size() && isPadding(elements.get(elementI+1))); + } + + private static boolean isPadding(final CigarElement elt) { + return elt.getOperator() == CigarOperator.P || elt.getOperator() == CigarOperator.H || elt.getOperator() == CigarOperator.S; + } +} diff --git a/public/java/test/org/broadinstitute/sting/utils/locusiterator/LocusIteratorByStateUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/locusiterator/LocusIteratorByStateUnitTest.java new file mode 100644 index 000000000..e5e28e1f6 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/utils/locusiterator/LocusIteratorByStateUnitTest.java @@ -0,0 +1,689 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.locusiterator; + +import net.sf.samtools.CigarOperator; +import net.sf.samtools.SAMFileHeader; +import net.sf.samtools.SAMReadGroupRecord; +import org.broadinstitute.sting.gatk.ReadProperties; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; +import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod; +import org.broadinstitute.sting.utils.NGSPlatform; +import org.broadinstitute.sting.utils.QualityUtils; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.pileup.PileupElement; +import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; +import org.broadinstitute.sting.utils.sam.ArtificialBAMBuilder; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMReadGroupRecord; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.testng.Assert; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.util.*; + +/** + * testing of the new (non-legacy) version of LocusIteratorByState + */ +public class LocusIteratorByStateUnitTest extends LocusIteratorByStateBaseTest { + private static final boolean DEBUG = false; + protected LocusIteratorByState li; + + @Test(enabled = true) + public void testUnmappedAndAllIReadsPassThrough() { + final int readLength = 10; + GATKSAMRecord mapped1 = ArtificialSAMUtils.createArtificialRead(header,"mapped1",0,1,readLength); + GATKSAMRecord mapped2 = ArtificialSAMUtils.createArtificialRead(header,"mapped2",0,1,readLength); + GATKSAMRecord unmapped = ArtificialSAMUtils.createArtificialRead(header,"unmapped",0,1,readLength); + GATKSAMRecord allI = ArtificialSAMUtils.createArtificialRead(header,"allI",0,1,readLength); + + unmapped.setReadUnmappedFlag(true); + unmapped.setCigarString("*"); + allI.setCigarString(readLength + "I"); + + List reads = Arrays.asList(mapped1, unmapped, allI, mapped2); + + // create the iterator by state with the fake reads and fake records + li = makeLTBS(reads,createTestReadProperties(DownsamplingMethod.NONE, true)); + + Assert.assertTrue(li.hasNext()); + AlignmentContext context = li.next(); + ReadBackedPileup pileup = context.getBasePileup(); + Assert.assertEquals(pileup.depthOfCoverage(), 2, "Should see only 2 reads in pileup, even with unmapped and all I reads"); + + final List rawReads = li.transferReadsFromAllPreviousPileups(); + Assert.assertEquals(rawReads, reads, "Input and transferred read lists should be the same, and include the unmapped and all I reads"); + } + + @Test(enabled = true && ! DEBUG) + public void testXandEQOperators() { + final byte[] bases1 = new byte[] {'A','A','A','A','A','A','A','A','A','A'}; + final byte[] bases2 = new byte[] {'A','A','A','C','A','A','A','A','A','C'}; + + // create a test version of the Reads object + ReadProperties readAttributes = createTestReadProperties(); + + GATKSAMRecord r1 = ArtificialSAMUtils.createArtificialRead(header,"r1",0,1,10); + r1.setReadBases(bases1); + r1.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20}); + r1.setCigarString("10M"); + + GATKSAMRecord r2 = ArtificialSAMUtils.createArtificialRead(header,"r2",0,1,10); + r2.setReadBases(bases2); + r2.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20}); + r2.setCigarString("3=1X5=1X"); + + GATKSAMRecord r3 = ArtificialSAMUtils.createArtificialRead(header,"r3",0,1,10); + r3.setReadBases(bases2); + r3.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20}); + r3.setCigarString("3=1X5M1X"); + + GATKSAMRecord r4 = ArtificialSAMUtils.createArtificialRead(header,"r4",0,1,10); + r4.setReadBases(bases2); + r4.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20}); + r4.setCigarString("10M"); + + List reads = Arrays.asList(r1, r2, r3, r4); + + // create the iterator by state with the fake reads and fake records + li = makeLTBS(reads,readAttributes); + + while (li.hasNext()) { + AlignmentContext context = li.next(); + ReadBackedPileup pileup = context.getBasePileup(); + Assert.assertEquals(pileup.depthOfCoverage(), 4); + } + } + + @Test(enabled = true && ! DEBUG) + public void testIndelsInRegularPileup() { + final byte[] bases = new byte[] {'A','A','A','A','A','A','A','A','A','A'}; + final byte[] indelBases = new byte[] {'A','A','A','A','C','T','A','A','A','A','A','A'}; + + // create a test version of the Reads object + ReadProperties readAttributes = createTestReadProperties(); + + GATKSAMRecord before = ArtificialSAMUtils.createArtificialRead(header,"before",0,1,10); + before.setReadBases(bases); + before.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20}); + before.setCigarString("10M"); + + GATKSAMRecord during = ArtificialSAMUtils.createArtificialRead(header,"during",0,2,10); + during.setReadBases(indelBases); + during.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20}); + during.setCigarString("4M2I6M"); + + GATKSAMRecord after = ArtificialSAMUtils.createArtificialRead(header,"after",0,3,10); + after.setReadBases(bases); + after.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20}); + after.setCigarString("10M"); + + List reads = Arrays.asList(before, during, after); + + // create the iterator by state with the fake reads and fake records + li = makeLTBS(reads,readAttributes); + + boolean foundIndel = false; + while (li.hasNext()) { + AlignmentContext context = li.next(); + ReadBackedPileup pileup = context.getBasePileup().getBaseFilteredPileup(10); + for (PileupElement p : pileup) { + if (p.isBeforeInsertion()) { + foundIndel = true; + Assert.assertEquals(p.getLengthOfImmediatelyFollowingIndel(), 2, "Wrong event length"); + Assert.assertEquals(p.getBasesOfImmediatelyFollowingInsertion(), "CT", "Inserted bases are incorrect"); + break; + } + } + + } + + Assert.assertTrue(foundIndel,"Indel in pileup not found"); + } + + @Test(enabled = false && ! DEBUG) + public void testWholeIndelReadInIsolation() { + final int firstLocus = 44367789; + + // create a test version of the Reads object + ReadProperties readAttributes = createTestReadProperties(); + + GATKSAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header, "indelOnly", 0, firstLocus, 76); + indelOnlyRead.setReadBases(Utils.dupBytes((byte)'A',76)); + indelOnlyRead.setBaseQualities(Utils.dupBytes((byte) '@', 76)); + indelOnlyRead.setCigarString("76I"); + + List reads = Arrays.asList(indelOnlyRead); + + // create the iterator by state with the fake reads and fake records + li = makeLTBS(reads, readAttributes); + + // Traditionally, reads that end with indels bleed into the pileup at the following locus. Verify that the next pileup contains this read + // and considers it to be an indel-containing read. + Assert.assertTrue(li.hasNext(),"Should have found a whole-indel read in the normal base pileup without extended events enabled"); + AlignmentContext alignmentContext = li.next(); + Assert.assertEquals(alignmentContext.getLocation().getStart(), firstLocus, "Base pileup is at incorrect location."); + ReadBackedPileup basePileup = alignmentContext.getBasePileup(); + Assert.assertEquals(basePileup.getReads().size(),1,"Pileup is of incorrect size"); + Assert.assertSame(basePileup.getReads().get(0), indelOnlyRead, "Read in pileup is incorrect"); + } + + /** + * Test to make sure that reads supporting only an indel (example cigar string: 76I) do + * not negatively influence the ordering of the pileup. + */ + @Test(enabled = true && ! DEBUG) + public void testWholeIndelRead() { + final int firstLocus = 44367788, secondLocus = firstLocus + 1; + + GATKSAMRecord leadingRead = ArtificialSAMUtils.createArtificialRead(header,"leading",0,firstLocus,76); + leadingRead.setReadBases(Utils.dupBytes((byte)'A',76)); + leadingRead.setBaseQualities(Utils.dupBytes((byte)'@',76)); + leadingRead.setCigarString("1M75I"); + + GATKSAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header,"indelOnly",0,secondLocus,76); + indelOnlyRead.setReadBases(Utils.dupBytes((byte) 'A', 76)); + indelOnlyRead.setBaseQualities(Utils.dupBytes((byte)'@',76)); + indelOnlyRead.setCigarString("76I"); + + GATKSAMRecord fullMatchAfterIndel = ArtificialSAMUtils.createArtificialRead(header,"fullMatch",0,secondLocus,76); + fullMatchAfterIndel.setReadBases(Utils.dupBytes((byte)'A',76)); + fullMatchAfterIndel.setBaseQualities(Utils.dupBytes((byte)'@',76)); + fullMatchAfterIndel.setCigarString("75I1M"); + + List reads = Arrays.asList(leadingRead, indelOnlyRead, fullMatchAfterIndel); + + // create the iterator by state with the fake reads and fake records + li = makeLTBS(reads, createTestReadProperties()); + int currentLocus = firstLocus; + int numAlignmentContextsFound = 0; + + while(li.hasNext()) { + AlignmentContext alignmentContext = li.next(); + Assert.assertEquals(alignmentContext.getLocation().getStart(),currentLocus,"Current locus returned by alignment context is incorrect"); + + if(currentLocus == firstLocus) { + List readsAtLocus = alignmentContext.getBasePileup().getReads(); + Assert.assertEquals(readsAtLocus.size(),1,"Wrong number of reads at locus " + currentLocus); + Assert.assertSame(readsAtLocus.get(0),leadingRead,"leadingRead absent from pileup at locus " + currentLocus); + } + else if(currentLocus == secondLocus) { + List readsAtLocus = alignmentContext.getBasePileup().getReads(); + Assert.assertEquals(readsAtLocus.size(),1,"Wrong number of reads at locus " + currentLocus); + Assert.assertSame(readsAtLocus.get(0),fullMatchAfterIndel,"fullMatchAfterIndel absent from pileup at locus " + currentLocus); + } + + currentLocus++; + numAlignmentContextsFound++; + } + + Assert.assertEquals(numAlignmentContextsFound, 2, "Found incorrect number of alignment contexts"); + } + + /** + * Test to make sure that reads supporting only an indel (example cigar string: 76I) are represented properly + */ + @Test(enabled = false && ! DEBUG) + public void testWholeIndelReadRepresentedTest() { + final int firstLocus = 44367788, secondLocus = firstLocus + 1; + + GATKSAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,secondLocus,1); + read1.setReadBases(Utils.dupBytes((byte) 'A', 1)); + read1.setBaseQualities(Utils.dupBytes((byte) '@', 1)); + read1.setCigarString("1I"); + + List reads = Arrays.asList(read1); + + // create the iterator by state with the fake reads and fake records + li = makeLTBS(reads, createTestReadProperties()); + + while(li.hasNext()) { + AlignmentContext alignmentContext = li.next(); + ReadBackedPileup p = alignmentContext.getBasePileup(); + Assert.assertTrue(p.getNumberOfElements() == 1); + // TODO -- fix tests +// PileupElement pe = p.iterator().next(); +// Assert.assertTrue(pe.isBeforeInsertion()); +// Assert.assertFalse(pe.isAfterInsertion()); +// Assert.assertEquals(pe.getBasesOfImmediatelyFollowingInsertion(), "A"); + } + + GATKSAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,secondLocus,10); + read2.setReadBases(Utils.dupBytes((byte) 'A', 10)); + read2.setBaseQualities(Utils.dupBytes((byte) '@', 10)); + read2.setCigarString("10I"); + + reads = Arrays.asList(read2); + + // create the iterator by state with the fake reads and fake records + li = makeLTBS(reads, createTestReadProperties()); + + while(li.hasNext()) { + AlignmentContext alignmentContext = li.next(); + ReadBackedPileup p = alignmentContext.getBasePileup(); + Assert.assertTrue(p.getNumberOfElements() == 1); + // TODO -- fix tests +// PileupElement pe = p.iterator().next(); +// Assert.assertTrue(pe.isBeforeInsertion()); +// Assert.assertFalse(pe.isAfterInsertion()); +// Assert.assertEquals(pe.getBasesOfImmediatelyFollowingInsertion(), "AAAAAAAAAA"); + } + } + + + ///////////////////////////////////////////// + // get event length and bases calculations // + ///////////////////////////////////////////// + + @DataProvider(name = "IndelLengthAndBasesTest") + public Object[][] makeIndelLengthAndBasesTest() { + final String EVENT_BASES = "ACGTACGTACGT"; + final List tests = new LinkedList(); + + for ( int eventSize = 1; eventSize < 10; eventSize++ ) { + for ( final CigarOperator indel : Arrays.asList(CigarOperator.D, CigarOperator.I) ) { + final String cigar = String.format("2M%d%s1M", eventSize, indel.toString()); + final String eventBases = indel == CigarOperator.D ? "" : EVENT_BASES.substring(0, eventSize); + final int readLength = 3 + eventBases.length(); + + GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, 1, readLength); + read.setReadBases(("TT" + eventBases + "A").getBytes()); + final byte[] quals = new byte[readLength]; + for ( int i = 0; i < readLength; i++ ) + quals[i] = (byte)(i % QualityUtils.MAX_QUAL_SCORE); + read.setBaseQualities(quals); + read.setCigarString(cigar); + + tests.add(new Object[]{read, indel, eventSize, eventBases.equals("") ? null : eventBases}); + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(enabled = true && ! DEBUG, dataProvider = "IndelLengthAndBasesTest") + public void testIndelLengthAndBasesTest(GATKSAMRecord read, final CigarOperator op, final int eventSize, final String eventBases) { + // create the iterator by state with the fake reads and fake records + li = makeLTBS(Arrays.asList((GATKSAMRecord)read), createTestReadProperties()); + + Assert.assertTrue(li.hasNext()); + + final PileupElement firstMatch = getFirstPileupElement(li.next()); + + Assert.assertEquals(firstMatch.getLengthOfImmediatelyFollowingIndel(), 0, "Length != 0 for site not adjacent to indel"); + Assert.assertEquals(firstMatch.getBasesOfImmediatelyFollowingInsertion(), null, "Getbases of following event should be null at non-adajenct event"); + + Assert.assertTrue(li.hasNext()); + + final PileupElement pe = getFirstPileupElement(li.next()); + + if ( op == CigarOperator.D ) + Assert.assertTrue(pe.isBeforeDeletionStart()); + else + Assert.assertTrue(pe.isBeforeInsertion()); + + Assert.assertEquals(pe.getLengthOfImmediatelyFollowingIndel(), eventSize, "Length of event failed"); + Assert.assertEquals(pe.getBasesOfImmediatelyFollowingInsertion(), eventBases, "Getbases of following event failed"); + } + + private PileupElement getFirstPileupElement(final AlignmentContext context) { + final ReadBackedPileup p = context.getBasePileup(); + Assert.assertEquals(p.getNumberOfElements(), 1); + return p.iterator().next(); + } + + //////////////////////////////////////////// + // comprehensive LIBS/PileupElement tests // + //////////////////////////////////////////// + + @DataProvider(name = "LIBSTest") + public Object[][] makeLIBSTest() { + final List tests = new LinkedList(); + +// tests.add(new Object[]{new LIBSTest("2=2D2=2X", 1)}); +// return tests.toArray(new Object[][]{}); + + return createLIBSTests( + Arrays.asList(1, 2), + Arrays.asList(1, 2, 3, 4)); + +// return createLIBSTests( +// Arrays.asList(2), +// Arrays.asList(3)); + } + + @Test(enabled = true && ! DEBUG, dataProvider = "LIBSTest") + public void testLIBS(LIBSTest params) { + // create the iterator by state with the fake reads and fake records + final GATKSAMRecord read = params.makeRead(); + li = makeLTBS(Arrays.asList((GATKSAMRecord)read), createTestReadProperties()); + final LIBS_position tester = new LIBS_position(read); + + int bpVisited = 0; + int lastOffset = 0; + while ( li.hasNext() ) { + bpVisited++; + + AlignmentContext alignmentContext = li.next(); + ReadBackedPileup p = alignmentContext.getBasePileup(); + Assert.assertEquals(p.getNumberOfElements(), 1); + PileupElement pe = p.iterator().next(); + + Assert.assertEquals(p.getNumberOfDeletions(), pe.isDeletion() ? 1 : 0, "wrong number of deletions in the pileup"); + Assert.assertEquals(p.getNumberOfMappingQualityZeroReads(), pe.getRead().getMappingQuality() == 0 ? 1 : 0, "wront number of mapq reads in the pileup"); + + tester.stepForwardOnGenome(); + + if ( ! hasNeighboringPaddedOps(params.getElements(), pe.getCurrentCigarOffset()) ) { + Assert.assertEquals(pe.isBeforeDeletionStart(), tester.isBeforeDeletionStart, "before deletion start failure"); + Assert.assertEquals(pe.isAfterDeletionEnd(), tester.isAfterDeletionEnd, "after deletion end failure"); + } + + Assert.assertEquals(pe.isBeforeInsertion(), tester.isBeforeInsertion, "before insertion failure"); + Assert.assertEquals(pe.isAfterInsertion(), tester.isAfterInsertion, "after insertion failure"); + Assert.assertEquals(pe.isNextToSoftClip(), tester.isNextToSoftClip, "next to soft clip failure"); + + Assert.assertTrue(pe.getOffset() >= lastOffset, "Somehow read offsets are decreasing: lastOffset " + lastOffset + " current " + pe.getOffset()); + Assert.assertEquals(pe.getOffset(), tester.getCurrentReadOffset(), "Read offsets are wrong at " + bpVisited); + + Assert.assertEquals(pe.getCurrentCigarElement(), read.getCigar().getCigarElement(tester.currentOperatorIndex), "CigarElement index failure"); + Assert.assertEquals(pe.getOffsetInCurrentCigar(), tester.getCurrentPositionOnOperatorBase0(), "CigarElement index failure"); + + Assert.assertEquals(read.getCigar().getCigarElement(pe.getCurrentCigarOffset()), pe.getCurrentCigarElement(), "Current cigar element isn't what we'd get from the read itself"); + + Assert.assertTrue(pe.getOffsetInCurrentCigar() >= 0, "Offset into current cigar too small"); + Assert.assertTrue(pe.getOffsetInCurrentCigar() < pe.getCurrentCigarElement().getLength(), "Offset into current cigar too big"); + + Assert.assertEquals(pe.getOffset(), tester.getCurrentReadOffset(), "Read offset failure"); + lastOffset = pe.getOffset(); + } + + final int expectedBpToVisit = read.getAlignmentEnd() - read.getAlignmentStart() + 1; + Assert.assertEquals(bpVisited, expectedBpToVisit, "Didn't visit the expected number of bp"); + } + + // ------------------------------------------------------------ + // + // Tests for keeping reads + // + // ------------------------------------------------------------ + + @DataProvider(name = "LIBS_ComplexPileupTests") + public Object[][] makeLIBS_ComplexPileupTests() { + final List tests = new LinkedList(); + + for ( final int downsampleTo : Arrays.asList(-1, 1, 2, 5, 10, 30)) { + for ( final int nReadsPerLocus : Arrays.asList(1, 10, 60) ) { + for ( final int nLoci : Arrays.asList(1, 10, 25) ) { + for ( final int nSamples : Arrays.asList(1, 2, 10) ) { + for ( final boolean keepReads : Arrays.asList(true, false) ) { + for ( final boolean grabReadsAfterEachCycle : Arrays.asList(true, false) ) { +// for ( final int downsampleTo : Arrays.asList(1)) { +// for ( final int nReadsPerLocus : Arrays.asList(1) ) { +// for ( final int nLoci : Arrays.asList(1) ) { +// for ( final int nSamples : Arrays.asList(1) ) { +// for ( final boolean keepReads : Arrays.asList(true) ) { +// for ( final boolean grabReadsAfterEachCycle : Arrays.asList(true) ) { + tests.add(new Object[]{nReadsPerLocus, nLoci, nSamples, + keepReads, grabReadsAfterEachCycle, + downsampleTo}); + } + } + } + } + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(enabled = true && ! DEBUG, dataProvider = "LIBS_ComplexPileupTests") + public void testLIBS_ComplexPileupTests(final int nReadsPerLocus, + final int nLoci, + final int nSamples, + final boolean keepReads, + final boolean grabReadsAfterEachCycle, + final int downsampleTo) { + //logger.warn(String.format("testLIBSKeepSubmittedReads %d %d %d %b %b %b", nReadsPerLocus, nLoci, nSamples, keepReads, grabReadsAfterEachCycle, downsample)); + final int readLength = 10; + + final boolean downsample = downsampleTo != -1; + final DownsamplingMethod downsampler = downsample + ? new DownsamplingMethod(DownsampleType.BY_SAMPLE, downsampleTo, null, false) + : new DownsamplingMethod(DownsampleType.NONE, null, null, false); + + final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(header.getSequenceDictionary(), nReadsPerLocus, nLoci); + bamBuilder.createAndSetHeader(nSamples).setReadLength(readLength).setAlignmentStart(1); + + final List reads = bamBuilder.makeReads(); + li = new LocusIteratorByState(new FakeCloseableIterator(reads.iterator()), + createTestReadProperties(downsampler, keepReads), + genomeLocParser, + bamBuilder.getSamples()); + + final Set seenSoFar = new HashSet(); + final Set keptReads = new HashSet(); + int bpVisited = 0; + while ( li.hasNext() ) { + bpVisited++; + final AlignmentContext alignmentContext = li.next(); + final ReadBackedPileup p = alignmentContext.getBasePileup(); + + AssertWellOrderedPileup(p); + + if ( downsample ) { + // just not a safe test + //Assert.assertTrue(p.getNumberOfElements() <= maxDownsampledCoverage * nSamples, "Too many reads at locus after downsampling"); + } else { + final int minPileupSize = nReadsPerLocus * nSamples; + Assert.assertTrue(p.getNumberOfElements() >= minPileupSize); + } + + // the number of reads starting here + int nReadsStartingHere = 0; + for ( final GATKSAMRecord read : p.getReads() ) + if ( read.getAlignmentStart() == alignmentContext.getPosition() ) + nReadsStartingHere++; + + // we can have no more than maxDownsampledCoverage per sample + final int maxCoveragePerLocus = downsample ? downsampleTo : nReadsPerLocus; + Assert.assertTrue(nReadsStartingHere <= maxCoveragePerLocus * nSamples); + + seenSoFar.addAll(p.getReads()); + if ( keepReads && grabReadsAfterEachCycle ) { + final List locusReads = li.transferReadsFromAllPreviousPileups(); + + + if ( downsample ) { + // with downsampling we might have some reads here that were downsampled away + // in the pileup. We want to ensure that no more than the max coverage per sample is added + Assert.assertTrue(locusReads.size() >= nReadsStartingHere); + Assert.assertTrue(locusReads.size() <= maxCoveragePerLocus * nSamples); + } else { + Assert.assertEquals(locusReads.size(), nReadsStartingHere); + } + keptReads.addAll(locusReads); + + // check that all reads we've seen so far are in our keptReads + for ( final GATKSAMRecord read : seenSoFar ) { + Assert.assertTrue(keptReads.contains(read), "A read that appeared in a pileup wasn't found in the kept reads: " + read); + } + } + + if ( ! keepReads ) + Assert.assertTrue(li.getReadsFromAllPreviousPileups().isEmpty(), "Not keeping reads but the underlying list of reads isn't empty"); + } + + if ( keepReads && ! grabReadsAfterEachCycle ) + keptReads.addAll(li.transferReadsFromAllPreviousPileups()); + + if ( ! downsample ) { // downsampling may drop loci + final int expectedBpToVisit = nLoci + readLength - 1; + Assert.assertEquals(bpVisited, expectedBpToVisit, "Didn't visit the expected number of bp"); + } + + if ( keepReads ) { + // check we have the right number of reads + final int totalReads = nLoci * nReadsPerLocus * nSamples; + if ( ! downsample ) { // downsampling may drop reads + Assert.assertEquals(keptReads.size(), totalReads, "LIBS didn't keep the right number of reads during the traversal"); + + // check that the order of reads is the same as in our read list + for ( int i = 0; i < reads.size(); i++ ) { + final GATKSAMRecord inputRead = reads.get(i); + final GATKSAMRecord keptRead = reads.get(i); + Assert.assertSame(keptRead, inputRead, "Input reads and kept reads differ at position " + i); + } + } else { + Assert.assertTrue(keptReads.size() <= totalReads, "LIBS didn't keep the right number of reads during the traversal"); + } + + // check uniqueness + final Set readNames = new HashSet(); + for ( final GATKSAMRecord read : keptReads ) { + Assert.assertFalse(readNames.contains(read.getReadName()), "Found duplicate reads in the kept reads"); + readNames.add(read.getReadName()); + } + + // check that all reads we've seen are in our keptReads + for ( final GATKSAMRecord read : seenSoFar ) { + Assert.assertTrue(keptReads.contains(read), "A read that appeared in a pileup wasn't found in the kept reads: " + read); + } + + if ( ! downsample ) { + // check that every read in the list of keep reads occurred at least once in one of the pileups + for ( final GATKSAMRecord keptRead : keptReads ) { + Assert.assertTrue(seenSoFar.contains(keptRead), "There's a read " + keptRead + " in our keptReads list that never appeared in any pileup"); + } + } + } + } + + private void AssertWellOrderedPileup(final ReadBackedPileup pileup) { + if ( ! pileup.isEmpty() ) { + int leftMostPos = -1; + + for ( final PileupElement pe : pileup ) { + Assert.assertTrue(pileup.getLocation().getContig().equals(pe.getRead().getReferenceName()), "ReadBackedPileup contains an element " + pe + " that's on a different contig than the pileup itself"); + Assert.assertTrue(pe.getRead().getAlignmentStart() >= leftMostPos, + "ReadBackedPileup contains an element " + pe + " whose read's alignment start " + pe.getRead().getAlignmentStart() + + " occurs before the leftmost position we've seen previously " + leftMostPos); + } + } + } + + // --------------------------------------------------------------------------- + // make sure that downsampling isn't holding onto a bazillion reads + // + @DataProvider(name = "LIBS_NotHoldingTooManyReads") + public Object[][] makeLIBS_NotHoldingTooManyReads() { + final List tests = new LinkedList(); + + for ( final int downsampleTo : Arrays.asList(1, 10)) { + for ( final int nReadsPerLocus : Arrays.asList(100, 1000, 10000, 100000) ) { + for ( final int payloadInBytes : Arrays.asList(0, 1024, 1024*1024) ) { + tests.add(new Object[]{nReadsPerLocus, downsampleTo, payloadInBytes}); + } + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(enabled = true && ! DEBUG, dataProvider = "LIBS_NotHoldingTooManyReads") +// @Test(enabled = true, dataProvider = "LIBS_NotHoldingTooManyReads", timeOut = 100000) + public void testLIBS_NotHoldingTooManyReads(final int nReadsPerLocus, final int downsampleTo, final int payloadInBytes) { + logger.warn(String.format("testLIBS_NotHoldingTooManyReads %d %d %d", nReadsPerLocus, downsampleTo, payloadInBytes)); + final int readLength = 10; + + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 100000); + final int nSamples = 1; + final List samples = new ArrayList(nSamples); + for ( int i = 0; i < nSamples; i++ ) { + final GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord("rg" + i); + final String sample = "sample" + i; + samples.add(sample); + rg.setSample(sample); + rg.setPlatform(NGSPlatform.ILLUMINA.getDefaultPlatform()); + header.addReadGroup(rg); + } + + final boolean downsample = downsampleTo != -1; + final DownsamplingMethod downsampler = downsample + ? new DownsamplingMethod(DownsampleType.BY_SAMPLE, downsampleTo, null, false) + : new DownsamplingMethod(DownsampleType.NONE, null, null, false); + + // final List reads = ArtificialSAMUtils.createReadStream(nReadsPerLocus, nLoci, header, 1, readLength); + + final WeakReadTrackingIterator iterator = new WeakReadTrackingIterator(nReadsPerLocus, readLength, payloadInBytes, header); + + li = new LocusIteratorByState(iterator, + createTestReadProperties(downsampler, false), + genomeLocParser, + samples); + + while ( li.hasNext() ) { + final AlignmentContext next = li.next(); + Assert.assertTrue(next.getBasePileup().getNumberOfElements() <= downsampleTo, "Too many elements in pileup " + next); + // TODO -- assert that there are <= X reads in memory after GC for some X + } + } + + private static class WeakReadTrackingIterator implements Iterator { + final int nReads, readLength, payloadInBytes; + int readI = 0; + final SAMFileHeader header; + + private WeakReadTrackingIterator(int nReads, int readLength, final int payloadInBytes, final SAMFileHeader header) { + this.nReads = nReads; + this.readLength = readLength; + this.header = header; + this.payloadInBytes = payloadInBytes; + } + + @Override public boolean hasNext() { return readI < nReads; } + @Override public void remove() { throw new UnsupportedOperationException("no remove"); } + + @Override + public GATKSAMRecord next() { + readI++; + return makeRead(); + } + + private GATKSAMRecord makeRead() { + final SAMReadGroupRecord rg = header.getReadGroups().get(0); + final String readName = String.format("%s.%d.%s", "read", readI, rg.getId()); + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, readName, 0, 1, readLength); + read.setReadGroup(new GATKSAMReadGroupRecord(rg)); + if ( payloadInBytes > 0 ) + // add a payload byte array to push memory use per read even higher + read.setAttribute("PL", new byte[payloadInBytes]); + return read; + } + } +} diff --git a/public/java/test/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManagerUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManagerUnitTest.java new file mode 100644 index 000000000..b9f2fb29a --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManagerUnitTest.java @@ -0,0 +1,188 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.locusiterator; + +import net.sf.samtools.SAMRecord; +import org.broadinstitute.sting.utils.MathUtils; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.testng.Assert; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.util.*; + +/** + * testing of the new (non-legacy) version of LocusIteratorByState + */ +public class PerSampleReadStateManagerUnitTest extends LocusIteratorByStateBaseTest { + private class PerSampleReadStateManagerTest extends TestDataProvider { + private List readCountsPerAlignmentStart; + private List reads; + private List> recordStatesByAlignmentStart; + private int removalInterval; + + public PerSampleReadStateManagerTest( List readCountsPerAlignmentStart, int removalInterval ) { + super(PerSampleReadStateManagerTest.class); + + this.readCountsPerAlignmentStart = readCountsPerAlignmentStart; + this.removalInterval = removalInterval; + + reads = new ArrayList(); + recordStatesByAlignmentStart = new ArrayList>(); + + setName(String.format("%s: readCountsPerAlignmentStart: %s removalInterval: %d", + getClass().getSimpleName(), readCountsPerAlignmentStart, removalInterval)); + } + + public void run() { + PerSampleReadStateManager perSampleReadStateManager = new PerSampleReadStateManager(LIBSDownsamplingInfo.NO_DOWNSAMPLING); + + makeReads(); + + for ( ArrayList stackRecordStates : recordStatesByAlignmentStart ) { + perSampleReadStateManager.addStatesAtNextAlignmentStart(new LinkedList(stackRecordStates)); + } + + // read state manager should have the right number of reads + Assert.assertEquals(reads.size(), perSampleReadStateManager.size()); + + Iterator originalReadsIterator = reads.iterator(); + Iterator recordStateIterator = perSampleReadStateManager.iterator(); + int recordStateCount = 0; + int numReadStatesRemoved = 0; + + // Do a first-pass validation of the record state iteration by making sure we get back everything we + // put in, in the same order, doing any requested removals of read states along the way + while ( recordStateIterator.hasNext() ) { + AlignmentStateMachine readState = recordStateIterator.next(); + recordStateCount++; + SAMRecord readFromPerSampleReadStateManager = readState.getRead(); + + Assert.assertTrue(originalReadsIterator.hasNext()); + SAMRecord originalRead = originalReadsIterator.next(); + + // The read we get back should be literally the same read in memory as we put in + Assert.assertTrue(originalRead == readFromPerSampleReadStateManager); + + // If requested, remove a read state every removalInterval states + if ( removalInterval > 0 && recordStateCount % removalInterval == 0 ) { + recordStateIterator.remove(); + numReadStatesRemoved++; + } + } + + Assert.assertFalse(originalReadsIterator.hasNext()); + + // If we removed any read states, do a second pass through the read states to make sure the right + // states were removed + if ( numReadStatesRemoved > 0 ) { + Assert.assertEquals(perSampleReadStateManager.size(), reads.size() - numReadStatesRemoved); + + originalReadsIterator = reads.iterator(); + recordStateIterator = perSampleReadStateManager.iterator(); + int readCount = 0; + int readStateCount = 0; + + // Match record states with the reads that should remain after removal + while ( recordStateIterator.hasNext() ) { + AlignmentStateMachine readState = recordStateIterator.next(); + readStateCount++; + SAMRecord readFromPerSampleReadStateManager = readState.getRead(); + + Assert.assertTrue(originalReadsIterator.hasNext()); + + SAMRecord originalRead = originalReadsIterator.next(); + readCount++; + + if ( readCount % removalInterval == 0 ) { + originalRead = originalReadsIterator.next(); // advance to next read, since the previous one should have been discarded + readCount++; + } + + // The read we get back should be literally the same read in memory as we put in (after accounting for removals) + Assert.assertTrue(originalRead == readFromPerSampleReadStateManager); + } + + Assert.assertEquals(readStateCount, reads.size() - numReadStatesRemoved); + } + + // Allow memory used by this test to be reclaimed + readCountsPerAlignmentStart = null; + reads = null; + recordStatesByAlignmentStart = null; + } + + private void makeReads() { + int alignmentStart = 1; + + for ( int readsThisStack : readCountsPerAlignmentStart ) { + ArrayList stackReads = new ArrayList(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(readsThisStack, header, "foo", 0, alignmentStart, MathUtils.randomIntegerInRange(50, 100))); + ArrayList stackRecordStates = new ArrayList(); + + for ( GATKSAMRecord read : stackReads ) { + stackRecordStates.add(new AlignmentStateMachine(read)); + } + + reads.addAll(stackReads); + recordStatesByAlignmentStart.add(stackRecordStates); + } + } + } + + @DataProvider(name = "PerSampleReadStateManagerTestDataProvider") + public Object[][] createPerSampleReadStateManagerTests() { + for ( List thisTestReadStateCounts : Arrays.asList( Arrays.asList(1), + Arrays.asList(2), + Arrays.asList(10), + Arrays.asList(1, 1), + Arrays.asList(2, 2), + Arrays.asList(10, 10), + Arrays.asList(1, 10), + Arrays.asList(10, 1), + Arrays.asList(1, 1, 1), + Arrays.asList(2, 2, 2), + Arrays.asList(10, 10, 10), + Arrays.asList(1, 1, 1, 1, 1, 1), + Arrays.asList(10, 10, 10, 10, 10, 10), + Arrays.asList(1, 2, 10, 1, 2, 10) + ) ) { + + for ( int removalInterval : Arrays.asList(0, 2, 3) ) { + new PerSampleReadStateManagerTest(thisTestReadStateCounts, removalInterval); + } + } + + return PerSampleReadStateManagerTest.getTests(PerSampleReadStateManagerTest.class); + } + + @Test(dataProvider = "PerSampleReadStateManagerTestDataProvider") + public void runPerSampleReadStateManagerTest( PerSampleReadStateManagerTest test ) { + logger.warn("Running test: " + test); + + test.run(); + } +} diff --git a/public/java/test/org/broadinstitute/sting/utils/pileup/PileupElementUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/pileup/PileupElementUnitTest.java new file mode 100644 index 000000000..a760833f5 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/utils/pileup/PileupElementUnitTest.java @@ -0,0 +1,191 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.pileup; + +import net.sf.samtools.CigarElement; +import net.sf.samtools.CigarOperator; +import org.broadinstitute.sting.utils.QualityUtils; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.locusiterator.AlignmentStateMachine; +import org.broadinstitute.sting.utils.locusiterator.LIBS_position; +import org.broadinstitute.sting.utils.locusiterator.LocusIteratorByStateBaseTest; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.testng.Assert; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; + +/** + * testing of the new (non-legacy) version of LocusIteratorByState + */ +public class PileupElementUnitTest extends LocusIteratorByStateBaseTest { + @DataProvider(name = "PileupElementTest") + public Object[][] makePileupElementTest() { +// return new Object[][]{{new LIBSTest("2X2D2P2X")}}; +// return createLIBSTests( +// Arrays.asList(2), +// Arrays.asList(2)); + return createLIBSTests( + Arrays.asList(1, 2), + Arrays.asList(1, 2, 3, 4)); + } + + @Test(dataProvider = "PileupElementTest") + public void testPileupElementTest(LIBSTest params) { + final GATKSAMRecord read = params.makeRead(); + final AlignmentStateMachine state = new AlignmentStateMachine(read); + final LIBS_position tester = new LIBS_position(read); + + while ( state.stepForwardOnGenome() != null ) { + tester.stepForwardOnGenome(); + final PileupElement pe = state.makePileupElement(); + + Assert.assertEquals(pe.getRead(), read); + Assert.assertEquals(pe.getMappingQual(), read.getMappingQuality()); + Assert.assertEquals(pe.getOffset(), state.getReadOffset()); + + Assert.assertEquals(pe.isDeletion(), state.getCigarOperator() == CigarOperator.D); + Assert.assertEquals(pe.isAfterInsertion(), tester.isAfterInsertion); + Assert.assertEquals(pe.isBeforeInsertion(), tester.isBeforeInsertion); + Assert.assertEquals(pe.isNextToSoftClip(), tester.isNextToSoftClip); + + if ( ! hasNeighboringPaddedOps(params.getElements(), pe.getCurrentCigarOffset()) ) { + Assert.assertEquals(pe.isAfterDeletionEnd(), tester.isAfterDeletionEnd); + Assert.assertEquals(pe.isBeforeDeletionStart(), tester.isBeforeDeletionStart); + } + + + + Assert.assertEquals(pe.atEndOfCurrentCigar(), state.getOffsetIntoCurrentCigarElement() == state.getCurrentCigarElement().getLength() - 1, "atEndOfCurrentCigar failed"); + Assert.assertEquals(pe.atStartOfCurrentCigar(), state.getOffsetIntoCurrentCigarElement() == 0, "atStartOfCurrentCigar failed"); + + Assert.assertEquals(pe.getBase(), pe.isDeletion() ? PileupElement.DELETION_BASE : read.getReadBases()[state.getReadOffset()]); + Assert.assertEquals(pe.getQual(), pe.isDeletion() ? PileupElement.DELETION_QUAL : read.getBaseQualities()[state.getReadOffset()]); + + Assert.assertEquals(pe.getCurrentCigarElement(), state.getCurrentCigarElement()); + Assert.assertEquals(pe.getCurrentCigarOffset(), state.getCurrentCigarElementOffset()); + + // tested in libs + //pe.getLengthOfImmediatelyFollowingIndel(); + //pe.getBasesOfImmediatelyFollowingInsertion(); + + // Don't test -- pe.getBaseIndex(); + if ( pe.atEndOfCurrentCigar() && state.getCurrentCigarElementOffset() < read.getCigarLength() - 1 ) { + final CigarElement nextElement = read.getCigar().getCigarElement(state.getCurrentCigarElementOffset() + 1); + if ( nextElement.getOperator() == CigarOperator.I ) { + Assert.assertTrue(pe.getBetweenNextPosition().size() >= 1); + Assert.assertEquals(pe.getBetweenNextPosition().get(0), nextElement); + } + if ( nextElement.getOperator() == CigarOperator.M ) { + Assert.assertTrue(pe.getBetweenNextPosition().isEmpty()); + } + } else { + Assert.assertTrue(pe.getBetweenNextPosition().isEmpty()); + } + + if ( pe.atStartOfCurrentCigar() && state.getCurrentCigarElementOffset() > 0 ) { + final CigarElement prevElement = read.getCigar().getCigarElement(state.getCurrentCigarElementOffset() - 1); + if ( prevElement.getOperator() == CigarOperator.I ) { + Assert.assertTrue(pe.getBetweenPrevPosition().size() >= 1); + Assert.assertEquals(pe.getBetweenPrevPosition().getLast(), prevElement); + } + if ( prevElement.getOperator() == CigarOperator.M ) { + Assert.assertTrue(pe.getBetweenPrevPosition().isEmpty()); + } + } else { + Assert.assertTrue(pe.getBetweenPrevPosition().isEmpty()); + } + + // TODO -- add meaningful tests + pe.getBaseInsertionQual(); + pe.getBaseDeletionQual(); + pe.getRepresentativeCount(); + } + } + + + @DataProvider(name = "PrevAndNextTest") + public Object[][] makePrevAndNextTest() { + final List tests = new LinkedList(); + + final List operators = Arrays.asList(CigarOperator.I, CigarOperator.P, CigarOperator.S); + + for ( final CigarOperator firstOp : Arrays.asList(CigarOperator.M) ) { + for ( final CigarOperator lastOp : Arrays.asList(CigarOperator.M, CigarOperator.D) ) { + for ( final int nIntermediate : Arrays.asList(1, 2, 3) ) { + for ( final List combination : Utils.makePermutations(operators, nIntermediate, false) ) { + final int readLength = 2 + combination.size(); + GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, 1, readLength); + read.setReadBases(Utils.dupBytes((byte) 'A', readLength)); + read.setBaseQualities(Utils.dupBytes((byte) 30, readLength)); + + String cigar = "1" + firstOp; + for ( final CigarOperator op : combination ) cigar += "1" + op; + cigar += "1" + lastOp; + read.setCigarString(cigar); + + tests.add(new Object[]{read, firstOp, lastOp, combination}); + } + } + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "PrevAndNextTest") + public void testPrevAndNextTest(final GATKSAMRecord read, final CigarOperator firstOp, final CigarOperator lastOp, final List ops) { + final AlignmentStateMachine state = new AlignmentStateMachine(read); + + state.stepForwardOnGenome(); + final PileupElement pe = state.makePileupElement(); + Assert.assertEquals(pe.getBetweenNextPosition().size(), ops.size()); + Assert.assertEquals(pe.getBetweenPrevPosition().size(), 0); + assertEqualsOperators(pe.getBetweenNextPosition(), ops); + Assert.assertEquals(pe.getPreviousOnGenomeCigarElement(), null); + Assert.assertNotNull(pe.getNextOnGenomeCigarElement()); + Assert.assertEquals(pe.getNextOnGenomeCigarElement().getOperator(), lastOp); + + state.stepForwardOnGenome(); + final PileupElement pe2 = state.makePileupElement(); + Assert.assertEquals(pe2.getBetweenPrevPosition().size(), ops.size()); + Assert.assertEquals(pe2.getBetweenNextPosition().size(), 0); + assertEqualsOperators(pe2.getBetweenPrevPosition(), ops); + Assert.assertNotNull(pe2.getPreviousOnGenomeCigarElement()); + Assert.assertEquals(pe2.getPreviousOnGenomeCigarElement().getOperator(), firstOp); + Assert.assertEquals(pe2.getNextOnGenomeCigarElement(), null); + } + + private void assertEqualsOperators(final List elements, final List ops) { + for ( int i = 0; i < elements.size(); i++ ) { + Assert.assertEquals(elements.get(i).getOperator(), ops.get(i), "elements doesn't have expected operator at position " + i); + } + } +} diff --git a/public/java/test/org/broadinstitute/sting/utils/pileup/ReadBackedPileupUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/pileup/ReadBackedPileupUnitTest.java index 3951de93d..18fa8a302 100644 --- a/public/java/test/org/broadinstitute/sting/utils/pileup/ReadBackedPileupUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/pileup/ReadBackedPileupUnitTest.java @@ -25,12 +25,18 @@ package org.broadinstitute.sting.utils.pileup; +import net.sf.samtools.CigarElement; import net.sf.samtools.SAMFileHeader; import net.sf.samtools.SAMReadGroupRecord; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.Assert; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.util.*; @@ -39,6 +45,17 @@ import java.util.*; * Test routines for read-backed pileup. */ public class ReadBackedPileupUnitTest { + protected static SAMFileHeader header; + protected GenomeLocParser genomeLocParser; + private GenomeLoc loc; + + @BeforeClass + public void beforeClass() { + header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000); + genomeLocParser = new GenomeLocParser(header.getSequenceDictionary()); + loc = genomeLocParser.createGenomeLoc("chr1", 1); + } + /** * Ensure that basic read group splitting works. */ @@ -195,4 +212,98 @@ public class ReadBackedPileupUnitTest { missingSamplePileup = pileup.getPileupForSample("not here"); Assert.assertNull(missingSamplePileup,"Pileup for sample 'not here' should be null but isn't"); } -} + + private static int sampleI = 0; + private class RBPCountTest { + final String sample; + final int nReads, nMapq0, nDeletions; + + private RBPCountTest(int nReads, int nMapq0, int nDeletions) { + this.sample = "sample" + sampleI++; + this.nReads = nReads; + this.nMapq0 = nMapq0; + this.nDeletions = nDeletions; + } + + private List makeReads( final int n, final int mapq, final String op ) { + final int readLength = 3; + + final List elts = new LinkedList(); + for ( int i = 0; i < n; i++ ) { + GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, 1, readLength); + read.setReadBases(Utils.dupBytes((byte) 'A', readLength)); + read.setBaseQualities(Utils.dupBytes((byte) 30, readLength)); + read.setCigarString("1M1" + op + "1M"); + read.setMappingQuality(mapq); + final int baseOffset = op.equals("M") ? 1 : 0; + final CigarElement cigarElement = read.getCigar().getCigarElement(1); + elts.add(new PileupElement(read, baseOffset, cigarElement, 1, 0)); + } + + return elts; + } + + private ReadBackedPileupImpl makePileup() { + final List elts = new LinkedList(); + + elts.addAll(makeReads(nMapq0, 0, "M")); + elts.addAll(makeReads(nDeletions, 30, "D")); + elts.addAll(makeReads(nReads - nMapq0 - nDeletions, 30, "M")); + + return new ReadBackedPileupImpl(loc, elts); + } + + @Override + public String toString() { + return "RBPCountTest{" + + "sample='" + sample + '\'' + + ", nReads=" + nReads + + ", nMapq0=" + nMapq0 + + ", nDeletions=" + nDeletions + + '}'; + } + } + + @DataProvider(name = "RBPCountingTest") + public Object[][] makeRBPCountingTest() { + final List tests = new LinkedList(); + + for ( final int nMapq : Arrays.asList(0, 10, 20) ) { + for ( final int nDeletions : Arrays.asList(0, 10, 20) ) { + for ( final int nReg : Arrays.asList(0, 10, 20) ) { + final int total = nMapq + nDeletions + nReg; + if ( total > 0 ) + tests.add(new Object[]{new RBPCountTest(total, nMapq, nDeletions)}); + } + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "RBPCountingTest") + public void testRBPCountingTestSinglePileup(RBPCountTest params) { + testRBPCounts(params.makePileup(), params); + } + + @Test(dataProvider = "RBPCountingTest") + public void testRBPCountingTestMultiSample(RBPCountTest params) { + final RBPCountTest newSample = new RBPCountTest(2, 1, 1); + final Map pileupsBySample = new HashMap(); + pileupsBySample.put(newSample.sample, newSample.makePileup()); + pileupsBySample.put(params.sample, params.makePileup()); + final ReadBackedPileup pileup = new ReadBackedPileupImpl(loc, pileupsBySample); + testRBPCounts(pileup, new RBPCountTest(params.nReads + 2, params.nMapq0 + 1, params.nDeletions + 1)); + } + + + private void testRBPCounts(final ReadBackedPileup rbp, RBPCountTest expected) { + for ( int cycles = 0; cycles < 3; cycles++ ) { + // multiple cycles to make sure caching is working + Assert.assertEquals(rbp.getNumberOfElements(), expected.nReads); + Assert.assertEquals(rbp.depthOfCoverage(), expected.nReads); + Assert.assertEquals(rbp.getNumberOfDeletions(), expected.nDeletions); + Assert.assertEquals(rbp.getNumberOfMappingQualityZeroReads(), expected.nMapq0); + } + } +} \ No newline at end of file diff --git a/public/java/test/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilderUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilderUnitTest.java new file mode 100644 index 000000000..2a638eb69 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilderUnitTest.java @@ -0,0 +1,122 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.sam; + +import net.sf.samtools.SAMFileReader; +import net.sf.samtools.SAMRecord; +import org.apache.commons.collections.IteratorUtils; +import org.broadinstitute.sting.BaseTest; +import org.testng.Assert; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.io.File; +import java.util.Arrays; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; + +/** + * Created with IntelliJ IDEA. + * User: depristo + * Date: 1/15/13 + * Time: 3:49 PM + * To change this template use File | Settings | File Templates. + */ +public class ArtificialBAMBuilderUnitTest extends BaseTest { + @DataProvider(name = "ArtificialBAMBuilderUnitTestProvider") + public Object[][] makeArtificialBAMBuilderUnitTestProvider() { + final List tests = new LinkedList(); + + final List starts = Arrays.asList( + 1, // very start of the chromosome + ArtificialBAMBuilder.BAM_SHARD_SIZE - 100, // right before the shard boundary + ArtificialBAMBuilder.BAM_SHARD_SIZE + 100 // right after the shard boundary + ); + + for ( final int readLength : Arrays.asList(10, 20) ) { + for ( final int skips : Arrays.asList(0, 1, 10) ) { + for ( final int start : starts ) { + for ( final int nSamples : Arrays.asList(1, 2) ) { + for ( final int nReadsPerLocus : Arrays.asList(1, 10) ) { + for ( final int nLoci : Arrays.asList(10, 100, 1000) ) { + final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(nReadsPerLocus, nLoci); + bamBuilder.setReadLength(readLength); + bamBuilder.setSkipNLoci(skips); + bamBuilder.setAlignmentStart(start); + bamBuilder.createAndSetHeader(nSamples); + tests.add(new Object[]{bamBuilder, readLength, skips, start, nSamples, nReadsPerLocus, nLoci}); + } + } + } + } + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "ArtificialBAMBuilderUnitTestProvider") + public void testBamProvider(final ArtificialBAMBuilder bamBuilder, int readLength, int skips, int start, int nSamples, int nReadsPerLocus, int nLoci) { + Assert.assertEquals(bamBuilder.getReadLength(), readLength); + Assert.assertEquals(bamBuilder.getSkipNLoci(), skips); + Assert.assertEquals(bamBuilder.getAlignmentStart(), start); + Assert.assertEquals(bamBuilder.getNSamples(), nSamples); + Assert.assertEquals(bamBuilder.getnReadsPerLocus(), nReadsPerLocus); + Assert.assertEquals(bamBuilder.getnLoci(), nLoci); + + final List reads = bamBuilder.makeReads(); + Assert.assertEquals(reads.size(), bamBuilder.expectedNumberOfReads()); + for ( final GATKSAMRecord read : reads ) { + assertGoodRead(read, bamBuilder); + } + + final File bam = bamBuilder.makeTemporarilyBAMFile(); + final SAMFileReader reader = new SAMFileReader(bam); + Assert.assertTrue(reader.hasIndex()); + final Iterator bamIt = reader.iterator(); + int nReadsFromBam = 0; + int lastStart = -1; + while ( bamIt.hasNext() ) { + final SAMRecord read = bamIt.next(); + assertGoodRead(read, bamBuilder); + nReadsFromBam++; + Assert.assertTrue(read.getAlignmentStart() >= lastStart); + lastStart = read.getAlignmentStart(); + } + Assert.assertEquals(nReadsFromBam, bamBuilder.expectedNumberOfReads()); + } + + private void assertGoodRead(final SAMRecord read, final ArtificialBAMBuilder bamBuilder) { + Assert.assertEquals(read.getReadLength(), bamBuilder.getReadLength()); + Assert.assertEquals(read.getReadBases().length, bamBuilder.getReadLength()); + Assert.assertEquals(read.getBaseQualities().length, bamBuilder.getReadLength()); + Assert.assertTrue(read.getAlignmentStart() >= bamBuilder.getAlignmentStart()); + Assert.assertNotNull(read.getReadGroup()); + } +} + + diff --git a/public/java/test/org/broadinstitute/sting/utils/sam/GATKSAMRecordUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/sam/GATKSAMRecordUnitTest.java index 0bb385d5d..baf4bfbb0 100644 --- a/public/java/test/org/broadinstitute/sting/utils/sam/GATKSAMRecordUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/sam/GATKSAMRecordUnitTest.java @@ -27,6 +27,7 @@ package org.broadinstitute.sting.utils.sam; import net.sf.samtools.SAMFileHeader; import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState; import org.broadinstitute.sting.utils.pileup.PileupElement; import org.testng.Assert; import org.testng.annotations.BeforeClass; @@ -67,8 +68,8 @@ public class GATKSAMRecordUnitTest extends BaseTest { @Test public void testReducedReadPileupElement() { - PileupElement readp = new PileupElement(read, 0, false, false, false, false, false, false); - PileupElement reducedreadp = new PileupElement(reducedRead, 0, false, false, false, false, false, false); + PileupElement readp = LocusIteratorByState.createPileupForReadAndOffset(read, 0); + PileupElement reducedreadp = LocusIteratorByState.createPileupForReadAndOffset(reducedRead, 0); Assert.assertFalse(readp.getRead().isReducedRead()); diff --git a/public/java/test/org/broadinstitute/sting/utils/sam/ReadUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/sam/ReadUtilsUnitTest.java index 71c7d1bb0..4194aa6d5 100644 --- a/public/java/test/org/broadinstitute/sting/utils/sam/ReadUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/sam/ReadUtilsUnitTest.java @@ -40,7 +40,7 @@ public class ReadUtilsUnitTest extends BaseTest { final int mateStart = 1000; final int BEFORE = mateStart - 2; final int AFTER = mateStart + 2; - Integer myStart, boundary; + int myStart, boundary; GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, cigar); read.setMateAlignmentStart(mateStart); @@ -51,43 +51,43 @@ public class ReadUtilsUnitTest extends BaseTest { read.setAlignmentStart(myStart); read.setReadNegativeStrandFlag(false); boundary = ReadUtils.getAdaptorBoundary(read); - Assert.assertEquals(boundary.intValue(), myStart + fragmentSize + 1); + Assert.assertEquals(boundary, myStart + fragmentSize + 1); // Test case 2: positive strand, second read myStart = AFTER; read.setAlignmentStart(myStart); read.setReadNegativeStrandFlag(false); boundary = ReadUtils.getAdaptorBoundary(read); - Assert.assertEquals(boundary.intValue(), myStart + fragmentSize + 1); + Assert.assertEquals(boundary, myStart + fragmentSize + 1); // Test case 3: negative strand, second read myStart = AFTER; read.setAlignmentStart(myStart); read.setReadNegativeStrandFlag(true); boundary = ReadUtils.getAdaptorBoundary(read); - Assert.assertEquals(boundary.intValue(), mateStart - 1); + Assert.assertEquals(boundary, mateStart - 1); // Test case 4: negative strand, first read myStart = BEFORE; read.setAlignmentStart(myStart); read.setReadNegativeStrandFlag(true); boundary = ReadUtils.getAdaptorBoundary(read); - Assert.assertEquals(boundary.intValue(), mateStart - 1); + Assert.assertEquals(boundary, mateStart - 1); // Test case 5: mate is mapped to another chromosome (test both strands) read.setInferredInsertSize(0); read.setReadNegativeStrandFlag(true); boundary = ReadUtils.getAdaptorBoundary(read); - Assert.assertNull(boundary); + Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY); read.setReadNegativeStrandFlag(false); boundary = ReadUtils.getAdaptorBoundary(read); - Assert.assertNull(boundary); + Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY); read.setInferredInsertSize(10); // Test case 6: read is unmapped read.setReadUnmappedFlag(true); boundary = ReadUtils.getAdaptorBoundary(read); - Assert.assertNull(boundary); + Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY); read.setReadUnmappedFlag(false); // Test case 7: reads don't overlap and look like this: @@ -99,7 +99,7 @@ public class ReadUtilsUnitTest extends BaseTest { read.setInferredInsertSize(20); read.setReadNegativeStrandFlag(true); boundary = ReadUtils.getAdaptorBoundary(read); - Assert.assertNull(boundary); + Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY); // second read: myStart = 1000; @@ -107,6 +107,6 @@ public class ReadUtilsUnitTest extends BaseTest { read.setMateAlignmentStart(980); read.setReadNegativeStrandFlag(false); boundary = ReadUtils.getAdaptorBoundary(read); - Assert.assertNull(boundary); + Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY); } } diff --git a/public/java/test/org/broadinstitute/variant/utils/BaseUtilsUnitTest.java b/public/java/test/org/broadinstitute/variant/utils/BaseUtilsUnitTest.java index 372d13a7a..37627204f 100644 --- a/public/java/test/org/broadinstitute/variant/utils/BaseUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/variant/utils/BaseUtilsUnitTest.java @@ -50,6 +50,21 @@ public class BaseUtilsUnitTest extends BaseTest { Assert.assertTrue(MathUtils.compareDoubles(fraction, expected) == 0); } + @Test + public void testConvertIUPACtoN() { + + checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'A', 'A', 'A'}, false, false), new byte[]{'A', 'A', 'A'}); + checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'W', 'A', 'A'}, false, false), new byte[]{'N', 'A', 'A'}); + checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'A', 'M', 'A'}, false, false), new byte[]{'A', 'N', 'A'}); + checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'A', 'A', 'K'}, false, false), new byte[]{'A', 'A', 'N'}); + checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'M', 'M', 'M'}, false, false), new byte[]{'N', 'N', 'N'}); + } + + private void checkBytesAreEqual(final byte[] b1, final byte[] b2) { + for ( int i = 0; i < b1.length; i++ ) + Assert.assertEquals(b1[i], b2[i]); + } + @Test public void testTransitionTransversion() { logger.warn("Executing testTransitionTransversion"); diff --git a/public/java/test/org/broadinstitute/variant/variantcontext/GenotypeLikelihoodsUnitTest.java b/public/java/test/org/broadinstitute/variant/variantcontext/GenotypeLikelihoodsUnitTest.java index 49720d1f6..03d6f457f 100644 --- a/public/java/test/org/broadinstitute/variant/variantcontext/GenotypeLikelihoodsUnitTest.java +++ b/public/java/test/org/broadinstitute/variant/variantcontext/GenotypeLikelihoodsUnitTest.java @@ -154,9 +154,9 @@ public class GenotypeLikelihoodsUnitTest { public void testGetQualFromLikelihoodsMultiAllelic() { GenotypeLikelihoods gl = GenotypeLikelihoods.fromLog10Likelihoods(triAllelic); - Allele ref = Allele.create(BaseUtils.A,true); - Allele alt1 = Allele.create(BaseUtils.C); - Allele alt2 = Allele.create(BaseUtils.T); + Allele ref = Allele.create(BaseUtils.Base.A.base,true); + Allele alt1 = Allele.create(BaseUtils.Base.C.base); + Allele alt2 = Allele.create(BaseUtils.Base.T.base); List allAlleles = Arrays.asList(ref,alt1,alt2); List gtAlleles = Arrays.asList(alt1,alt2); GenotypeBuilder gtBuilder = new GenotypeBuilder(); diff --git a/public/java/test/org/broadinstitute/variant/variantcontext/VariantContextBenchmark.java b/public/java/test/org/broadinstitute/variant/variantcontext/VariantContextBenchmark.java index e6c67970c..34abe372f 100644 --- a/public/java/test/org/broadinstitute/variant/variantcontext/VariantContextBenchmark.java +++ b/public/java/test/org/broadinstitute/variant/variantcontext/VariantContextBenchmark.java @@ -27,13 +27,15 @@ package org.broadinstitute.variant.variantcontext; import com.google.caliper.Param; import com.google.caliper.SimpleBenchmark; -import com.google.caliper.runner.CaliperMain; import org.broad.tribble.Feature; import org.broad.tribble.FeatureCodec; import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.variant.vcf.VCFCodec; -import java.util.*; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; /** * Caliper microbenchmark of parsing a VCF file @@ -372,6 +374,6 @@ public class VariantContextBenchmark extends SimpleBenchmark { // } public static void main(String[] args) { - CaliperMain.main(VariantContextBenchmark.class, args); + com.google.caliper.Runner.main(VariantContextBenchmark.class, args); } } diff --git a/public/scala/src/org/broadinstitute/sting/queue/util/RemoteFile.scala b/public/scala/src/org/broadinstitute/sting/queue/util/RemoteFile.scala index 28be82136..23a99b586 100644 --- a/public/scala/src/org/broadinstitute/sting/queue/util/RemoteFile.scala +++ b/public/scala/src/org/broadinstitute/sting/queue/util/RemoteFile.scala @@ -27,6 +27,8 @@ package org.broadinstitute.sting.queue.util import java.io.File import org.broadinstitute.sting.utils.io.FileExtension +import java.util.Date +import java.net.URL /** * An extension of java.io.File that can be pulled from or pushed to a remote location. @@ -35,5 +37,6 @@ trait RemoteFile extends File with FileExtension { def pullToLocal() def pushToRemote() def deleteRemote() + def createUrl(expiration: Date): URL def remoteDescription: String }