diff --git a/README.md b/README.md new file mode 100644 index 000000000..13b3c0c6e --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +gsa-unstable +============ +See http://www.broadinstitute.org/gatk/ diff --git a/build.xml b/build.xml index 56bf4f0cd..2e9df4d5e 100644 --- a/build.xml +++ b/build.xml @@ -1031,6 +1031,7 @@ + @@ -1043,6 +1044,7 @@ + @@ -1078,6 +1080,7 @@ + diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java index a3a9e50e9..534834d0e 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java @@ -47,13 +47,11 @@ package org.broadinstitute.sting.gatk.walkers.annotator; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.sam.ReadUtils; import org.broadinstitute.variant.vcf.VCFHeaderLineType; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; import org.broadinstitute.sting.utils.pileup.PileupElement; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; -import org.broadinstitute.variant.variantcontext.Allele; import java.util.*; @@ -71,37 +69,11 @@ public class BaseQualityRankSumTest extends RankSumTest implements StandardAnnot public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("BaseQRankSum", 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt Vs. Ref base qualities")); } - protected void fillQualsFromPileup(final List allAlleles, final int refLoc, - final ReadBackedPileup pileup, - final PerReadAlleleLikelihoodMap alleleLikelihoodMap, - final List refQuals, final List altQuals){ - - if (alleleLikelihoodMap == null) { - // use fast SNP-based version if we don't have per-read allele likelihoods - for ( final PileupElement p : pileup ) { - if ( isUsableBase(p) ) { - if ( allAlleles.get(0).equals(Allele.create(p.getBase(),true)) ) { - refQuals.add((double)p.getQual()); - } else if ( allAlleles.contains(Allele.create(p.getBase()))) { - altQuals.add((double)p.getQual()); - } - } - } - return; - } - - for (Map el : alleleLikelihoodMap.getLikelihoodMapValues()) { - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el); - if (! a.isInformative()) - continue; // read is non-informative - if (a.getMostLikelyAllele().isReference()) - refQuals.add(-10.0*(double)el.get(a.getMostLikelyAllele())); - else if (allAlleles.contains(a.getMostLikelyAllele())) - altQuals.add(-10.0*(double)el.get(a.getMostLikelyAllele())); - - - } + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { + return (double)read.getBaseQualities()[ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, refLoc, ReadUtils.ClippingTail.RIGHT_TAIL)]; } - + protected Double getElementForPileupElement(final PileupElement p) { + return (double)p.getQual(); + } } \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java index 366512119..68e983bb8 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java @@ -46,14 +46,11 @@ package org.broadinstitute.sting.gatk.walkers.annotator; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.pileup.PileupElement; import org.broadinstitute.variant.vcf.VCFHeaderLineType; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; import org.broadinstitute.sting.utils.sam.AlignmentUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; -import org.broadinstitute.variant.variantcontext.Allele; import java.util.*; @@ -74,26 +71,12 @@ public class ClippingRankSumTest extends RankSumTest { public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("ClippingRankSum", 1, VCFHeaderLineType.Float, "Z-score From Wilcoxon rank sum test of Alt vs. Ref number of hard clipped bases")); } - - protected void fillQualsFromPileup(final List allAlleles, - final int refLoc, - final ReadBackedPileup pileup, - final PerReadAlleleLikelihoodMap likelihoodMap, final List refQuals, final List altQuals) { - // todo - only support non-pileup case for now, e.g. active-region based version - if (pileup != null || likelihoodMap == null) - return; - - for (Map.Entry> el : likelihoodMap.getLikelihoodReadMap().entrySet()) { - - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); - if (! a.isInformative()) - continue; // read is non-informative - if (a.getMostLikelyAllele().isReference()) - refQuals.add((double)AlignmentUtils.getNumHardClippedBases(el.getKey())); - else if (allAlleles.contains(a.getMostLikelyAllele())) - altQuals.add((double)AlignmentUtils.getNumHardClippedBases(el.getKey())); - - } + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { + return (double)AlignmentUtils.getNumHardClippedBases(read); } + protected Double getElementForPileupElement(final PileupElement p) { + // TODO - we only support the non-pileup case for now, e.g. an active-region based version + return null; + } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java index 1cf91f181..b22ea7931 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java @@ -66,10 +66,7 @@ import org.broadinstitute.variant.variantcontext.Genotype; import org.broadinstitute.variant.variantcontext.GenotypeBuilder; import org.broadinstitute.variant.variantcontext.VariantContext; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; /** @@ -135,20 +132,24 @@ public class DepthPerAlleleBySample extends GenotypeAnnotation implements Standa } private void annotateWithLikelihoods(final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap, final VariantContext vc, final GenotypeBuilder gb) { - final HashMap alleleCounts = new HashMap(); + final Set alleles = new HashSet<>(vc.getAlleles()); + + // make sure that there's a meaningful relationship between the alleles in the perReadAlleleLikelihoodMap and our VariantContext + if ( ! perReadAlleleLikelihoodMap.getAllelesSet().containsAll(alleles) ) + throw new IllegalStateException("VC alleles " + alleles + " not a strict subset of per read allele map alleles " + perReadAlleleLikelihoodMap.getAllelesSet()); + + final HashMap alleleCounts = new HashMap<>(); + for ( final Allele allele : vc.getAlleles() ) { alleleCounts.put(allele, 0); } - for ( final Allele allele : vc.getAlleles() ) { - alleleCounts.put(allele, 0); - } for (Map.Entry> el : perReadAlleleLikelihoodMap.getLikelihoodReadMap().entrySet()) { + final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue(), alleles); + if (! a.isInformative() ) continue; // read is non-informative final GATKSAMRecord read = el.getKey(); - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); - if (! a.isInformative() ) - continue; // read is non-informative - if (!vc.getAlleles().contains(a.getMostLikelyAllele())) - continue; // sanity check - shouldn't be needed - alleleCounts.put(a.getMostLikelyAllele(), alleleCounts.get(a.getMostLikelyAllele()) + (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1)); + final int prevCount = alleleCounts.get(a.getMostLikelyAllele()); + final int incCount = read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1; + alleleCounts.put(a.getMostLikelyAllele(), prevCount + incCount); } + final int[] counts = new int[alleleCounts.size()]; counts[0] = alleleCounts.get(vc.getReference()); for (int i = 0; i < vc.getAlternateAlleles().size(); i++) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerSampleHC.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerSampleHC.java new file mode 100644 index 000000000..9bd641011 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerSampleHC.java @@ -0,0 +1,126 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.annotator; + +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.AnnotatorCompatible; +import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.GenotypeAnnotation; +import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; +import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; +import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.sam.ReadUtils; +import org.broadinstitute.variant.variantcontext.Allele; +import org.broadinstitute.variant.variantcontext.Genotype; +import org.broadinstitute.variant.variantcontext.GenotypeBuilder; +import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.vcf.VCFConstants; +import org.broadinstitute.variant.vcf.VCFFormatHeaderLine; +import org.broadinstitute.variant.vcf.VCFStandardHeaderLines; + +import java.util.*; + + +/** + * The depth of coverage of each allele per sample + * + * the depth for the HC is the sum of the informative alleles at this site. It's not perfect (as we cannot + * differentiate between reads that align over the event but aren't informative vs. those that aren't even + * close) but it's a pretty good proxy and it matches with the AD field (i.e., sum(AD) = DP). + */ +public class DepthPerSampleHC extends GenotypeAnnotation { + public void annotate(final RefMetaDataTracker tracker, + final AnnotatorCompatible walker, + final ReferenceContext ref, + final AlignmentContext stratifiedContext, + final VariantContext vc, + final Genotype g, + final GenotypeBuilder gb, + final PerReadAlleleLikelihoodMap alleleLikelihoodMap) { + if ( g == null || !g.isCalled() || ( stratifiedContext == null && alleleLikelihoodMap == null) ) + return; + + if (alleleLikelihoodMap == null ) + throw new IllegalStateException("DepthPerSampleHC can only be used with likelihood based annotations in the HaplotypeCaller"); + + // the depth for the HC is the sum of the informative alleles at this site. It's not perfect (as we cannot + // differentiate between reads that align over the event but aren't informative vs. those that aren't even + // close) but it's a pretty good proxy and it matches with the AD field (i.e., sum(AD) = DP). + int dp = 0; + + if ( alleleLikelihoodMap.isEmpty() ) { + // there are no reads + } else { + final Set alleles = new HashSet<>(vc.getAlleles()); + + // make sure that there's a meaningful relationship between the alleles in the perReadAlleleLikelihoodMap and our VariantContext + if ( ! alleleLikelihoodMap.getAllelesSet().containsAll(alleles) ) + throw new IllegalStateException("VC alleles " + alleles + " not a strict subset of per read allele map alleles " + alleleLikelihoodMap.getAllelesSet()); + + for (Map.Entry> el : alleleLikelihoodMap.getLikelihoodReadMap().entrySet()) { + final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue(), alleles); + if ( a.isInformative() ) { + final GATKSAMRecord read = el.getKey(); + final int incCount = read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1; + dp += incCount; + } + } + + gb.DP(dp); + } + } + + public List getKeyNames() { + return Collections.singletonList(VCFConstants.DEPTH_KEY); + } + + public List getDescriptions() { + return Collections.singletonList(VCFStandardHeaderLines.getFormatLine(getKeyNames().get(0))); + } +} \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java index 957eb1aba..876dbf039 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java @@ -300,7 +300,7 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat for ( Map.Entry sample : stratifiedContexts.entrySet() ) { for (PileupElement p : sample.getValue().getBasePileup()) { - if ( ! RankSumTest.isUsableBase(p, false) ) // ignore deletions + if ( ! isUsableBase(p) ) // ignore deletions and bad MQ continue; if ( p.getQual() < minQScoreToConsider || p.getMappingQual() < minQScoreToConsider ) @@ -313,6 +313,20 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat return table; } + /** + * Can the base in this pileup element be used in comparative tests? + * + * @param p the pileup element to consider + * + * @return true if this base is part of a meaningful read for comparison, false otherwise + */ + private static boolean isUsableBase(final PileupElement p) { + return !( p.isDeletion() || + p.getMappingQual() == 0 || + p.getMappingQual() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE || + ((int) p.getQual()) < QualityUtils.MIN_USABLE_Q_SCORE); + } + private static void updateTable(final int[][] table, final Allele allele, final GATKSAMRecord read, final Allele ref, final Allele alt, final int representativeCount) { final boolean matchesRef = allele.equals(ref, true); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java index 3873138a2..0ebb09961 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java @@ -47,14 +47,10 @@ package org.broadinstitute.sting.gatk.walkers.annotator; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.pileup.PileupElement; import org.broadinstitute.variant.vcf.VCFHeaderLineType; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; -import org.broadinstitute.sting.utils.pileup.PileupElement; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; -import org.broadinstitute.variant.variantcontext.Allele; import java.util.*; @@ -73,35 +69,11 @@ public class MappingQualityRankSumTest extends RankSumTest implements StandardAn public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("MQRankSum", 1, VCFHeaderLineType.Float, "Z-score From Wilcoxon rank sum test of Alt vs. Ref read mapping qualities")); } - protected void fillQualsFromPileup(final List allAlleles, - final int refLoc, - final ReadBackedPileup pileup, - final PerReadAlleleLikelihoodMap likelihoodMap, - final List refQuals, final List altQuals) { - - if (pileup != null && likelihoodMap == null) { - // old UG snp-only path through the annotations - for ( final PileupElement p : pileup ) { - if ( isUsableBase(p) ) { - if ( allAlleles.get(0).equals(Allele.create(p.getBase(), true)) ) { - refQuals.add((double)p.getMappingQual()); - } else if ( allAlleles.contains(Allele.create(p.getBase()))) { - altQuals.add((double)p.getMappingQual()); - } - } - } - return; - } - for (Map.Entry> el : likelihoodMap.getLikelihoodReadMap().entrySet()) { - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); - // BUGBUG: There needs to be a comparable isUsableBase check here - if (! a.isInformative()) - continue; // read is non-informative - if (a.getMostLikelyAllele().isReference()) - refQuals.add((double)el.getKey().getMappingQuality()); - else if (allAlleles.contains(a.getMostLikelyAllele())) - altQuals.add((double)el.getKey().getMappingQuality()); - } + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { + return (double)read.getMappingQuality(); } - } \ No newline at end of file + protected Double getElementForPileupElement(final PileupElement p) { + return (double)p.getRead().getMappingQuality(); + } +} \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RMSMappingQuality.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RMSMappingQuality.java index 18348162e..d9bc5966c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RMSMappingQuality.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RMSMappingQuality.java @@ -56,6 +56,7 @@ import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnota import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.QualityUtils; +import org.broadinstitute.sting.utils.sam.ReadUtils; import org.broadinstitute.variant.vcf.VCFConstants; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; import org.broadinstitute.variant.vcf.VCFStandardHeaderLines; @@ -77,55 +78,41 @@ public class RMSMappingQuality extends InfoFieldAnnotation implements StandardAn final Map stratifiedContexts, final VariantContext vc, final Map perReadAlleleLikelihoodMap ) { - int totalSize = 0, index = 0; - int qualities[]; - if (stratifiedContexts != null) { + + final List qualities = new ArrayList<>(); + if ( stratifiedContexts != null ) { if ( stratifiedContexts.size() == 0 ) return null; - for ( AlignmentContext context : stratifiedContexts.values() ) - totalSize += context.size(); - - qualities = new int[totalSize]; - - for ( Map.Entry sample : stratifiedContexts.entrySet() ) { - AlignmentContext context = sample.getValue(); - for (PileupElement p : context.getBasePileup() ) - index = fillMappingQualitiesFromPileupAndUpdateIndex(p.getRead(), index, qualities); + for ( final Map.Entry sample : stratifiedContexts.entrySet() ) { + final AlignmentContext context = sample.getValue(); + for ( final PileupElement p : context.getBasePileup() ) + fillMappingQualitiesFromPileup(p.getRead().getMappingQuality(), p.getRepresentativeCount(), qualities); } } else if (perReadAlleleLikelihoodMap != null) { if ( perReadAlleleLikelihoodMap.size() == 0 ) return null; - for ( PerReadAlleleLikelihoodMap perReadLikelihoods : perReadAlleleLikelihoodMap.values() ) - totalSize += perReadLikelihoods.size(); - - qualities = new int[totalSize]; - for ( PerReadAlleleLikelihoodMap perReadLikelihoods : perReadAlleleLikelihoodMap.values() ) { - for (GATKSAMRecord read : perReadLikelihoods.getStoredElements()) - index = fillMappingQualitiesFromPileupAndUpdateIndex(read, index, qualities); - - - } + for ( final PerReadAlleleLikelihoodMap perReadLikelihoods : perReadAlleleLikelihoodMap.values() ) { + for ( final GATKSAMRecord read : perReadLikelihoods.getStoredElements() ) + fillMappingQualitiesFromPileup(read.getMappingQuality(), (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1), qualities); + } } else return null; - - - double rms = MathUtils.rms(qualities); - Map map = new HashMap(); - map.put(getKeyNames().get(0), String.format("%.2f", rms)); - return map; + final double rms = MathUtils.rms(qualities); + return Collections.singletonMap(getKeyNames().get(0), (Object)String.format("%.2f", rms)); } - private static int fillMappingQualitiesFromPileupAndUpdateIndex(final GATKSAMRecord read, final int inputIdx, final int[] qualities) { - int outputIdx = inputIdx; - if ( read.getMappingQuality() != QualityUtils.MAPPING_QUALITY_UNAVAILABLE ) - qualities[outputIdx++] = read.getMappingQuality(); - - return outputIdx; + private static void fillMappingQualitiesFromPileup(final int mq, final int representativeCount, final List qualities) { + if ( mq != QualityUtils.MAPPING_QUALITY_UNAVAILABLE ) { + if ( representativeCount == 1 ) + qualities.add(mq); + else + qualities.addAll(Collections.nCopies(representativeCount, mq)); + } } public List getKeyNames() { return Arrays.asList(VCFConstants.RMS_MAPPING_QUALITY_KEY); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java index ef456824e..37508fc06 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java @@ -53,9 +53,11 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.ActiveRegionBasedAnnotation; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.AnnotatorCompatible; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.InfoFieldAnnotation; +import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.MannWhitneyU; import org.broadinstitute.sting.utils.QualityUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.variant.vcf.VCFHeaderLine; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.pileup.PileupElement; @@ -87,31 +89,33 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements ActiveR if (genotypes == null || genotypes.size() == 0) return null; - final ArrayList refQuals = new ArrayList(); - final ArrayList altQuals = new ArrayList(); + final ArrayList refQuals = new ArrayList<>(); + final ArrayList altQuals = new ArrayList<>(); for ( final Genotype genotype : genotypes.iterateInSampleNameOrder() ) { - PerReadAlleleLikelihoodMap indelLikelihoodMap = null; - ReadBackedPileup pileup = null; + boolean usePileup = true; - if (stratifiedContexts != null) { // the old UG SNP-only path through the annotations - final AlignmentContext context = stratifiedContexts.get(genotype.getSampleName()); - if ( context != null ) - pileup = context.getBasePileup(); + if ( stratifiedPerReadAlleleLikelihoodMap != null ) { + final PerReadAlleleLikelihoodMap likelihoodMap = stratifiedPerReadAlleleLikelihoodMap.get(genotype.getSampleName()); + if ( likelihoodMap != null && !likelihoodMap.isEmpty() ) { + fillQualsFromLikelihoodMap(vc.getAlleles(), vc.getStart(), likelihoodMap, refQuals, altQuals); + usePileup = false; + } } - if (stratifiedPerReadAlleleLikelihoodMap != null ) - indelLikelihoodMap = stratifiedPerReadAlleleLikelihoodMap.get(genotype.getSampleName()); - if (indelLikelihoodMap != null && indelLikelihoodMap.isEmpty()) - indelLikelihoodMap = null; - // treat an empty likelihood map as a null reference - will simplify contract with fillQualsFromPileup - if (indelLikelihoodMap == null && pileup == null) - continue; - - fillQualsFromPileup(vc.getAlleles(), vc.getStart(), pileup, indelLikelihoodMap, refQuals, altQuals ); + // the old UG SNP-only path through the annotations + if ( usePileup && stratifiedContexts != null ) { + final AlignmentContext context = stratifiedContexts.get(genotype.getSampleName()); + if ( context != null ) { + final ReadBackedPileup pileup = context.getBasePileup(); + if ( pileup != null ) + fillQualsFromPileup(vc.getAlleles(), pileup, refQuals, altQuals); + } + } } - if (refQuals.isEmpty() && altQuals.isEmpty()) + + if ( refQuals.isEmpty() && altQuals.isEmpty() ) return null; final MannWhitneyU mannWhitneyU = new MannWhitneyU(useDithering); @@ -136,18 +140,72 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements ActiveR // we are testing that set1 (the alt bases) have lower quality scores than set2 (the ref bases) final Pair testResults = mannWhitneyU.runOneSidedTest(MannWhitneyU.USet.SET1); - final Map map = new HashMap(); + final Map map = new HashMap<>(); if (!Double.isNaN(testResults.first)) map.put(getKeyNames().get(0), String.format("%.3f", testResults.first)); return map; } - protected abstract void fillQualsFromPileup(final List alleles, - final int refLoc, - final ReadBackedPileup readBackedPileup, - final PerReadAlleleLikelihoodMap alleleLikelihoodMap, - final List refQuals, - final List altQuals); + private void fillQualsFromPileup(final List alleles, + final ReadBackedPileup pileup, + final List refQuals, + final List altQuals) { + for ( final PileupElement p : pileup ) { + if ( isUsableBase(p) ) { + final Double value = getElementForPileupElement(p); + if ( value == null ) + continue; + + if ( alleles.get(0).equals(Allele.create(p.getBase(), true)) ) + refQuals.add(value); + else if ( alleles.contains(Allele.create(p.getBase())) ) + altQuals.add(value); + } + } + } + + private void fillQualsFromLikelihoodMap(final List alleles, + final int refLoc, + final PerReadAlleleLikelihoodMap likelihoodMap, + final List refQuals, + final List altQuals) { + for ( final Map.Entry> el : likelihoodMap.getLikelihoodReadMap().entrySet() ) { + final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); + if ( ! a.isInformative() ) + continue; // read is non-informative + + final GATKSAMRecord read = el.getKey(); + if ( isUsableRead(read, refLoc) ) { + final Double value = getElementForRead(read, refLoc); + if ( value == null ) + continue; + + if ( a.getMostLikelyAllele().isReference() ) + refQuals.add(value); + else if ( alleles.contains(a.getMostLikelyAllele()) ) + altQuals.add(value); + } + } + } + + /** + * Get the element for the given read at the given reference position + * + * @param read the read + * @param refLoc the reference position + * @return a Double representing the element to be used in the rank sum test, or null if it should not be used + */ + protected abstract Double getElementForRead(final GATKSAMRecord read, final int refLoc); + + // TODO -- until the ReadPosRankSumTest stops treating these differently, we need to have separate methods for GATKSAMRecords and PileupElements. Yuck. + + /** + * Get the element for the given read at the given reference position + * + * @param p the pileup element + * @return a Double representing the element to be used in the rank sum test, or null if it should not be used + */ + protected abstract Double getElementForPileupElement(final PileupElement p); /** * Can the base in this pileup element be used in comparative tests between ref / alt bases? @@ -157,30 +215,33 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements ActiveR * @param p the pileup element to consider * @return true if this base is part of a meaningful read for comparison, false otherwise */ - public static boolean isUsableBase(final PileupElement p) { - return isUsableBase(p, false); + protected boolean isUsableBase(final PileupElement p) { + return !(p.isDeletion() || + p.getMappingQual() == 0 || + p.getMappingQual() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE || + ((int) p.getQual()) < QualityUtils.MIN_USABLE_Q_SCORE || // need the unBAQed quality score here + p.getRead().isReducedRead() ); } /** - * Can the base in this pileup element be used in comparative tests between ref / alt bases? + * Can the read be used in comparative tests between ref / alt bases? * - * @param p the pileup element to consider - * @param allowDeletions if true, allow p to be a deletion base - * @return true if this base is part of a meaningful read for comparison, false otherwise + * @param read the read to consider + * @param refLoc the reference location + * @return true if this read is meaningful for comparison, false otherwise */ - public static boolean isUsableBase(final PileupElement p, final boolean allowDeletions) { - return !((! allowDeletions && p.isDeletion()) || - p.getMappingQual() == 0 || - p.getMappingQual() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE || - ((int) p.getQual()) < QualityUtils.MIN_USABLE_Q_SCORE); // need the unBAQed quality score here + protected boolean isUsableRead(final GATKSAMRecord read, final int refLoc) { + return !( read.getMappingQuality() == 0 || + read.getMappingQuality() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE || + read.isReducedRead() ); } /** * Initialize the rank sum test annotation using walker and engine information. Right now this checks to see if * engine randomization is turned off, and if so does not dither. - * @param walker - * @param toolkit - * @param headerLines + * @param walker the walker + * @param toolkit the GATK engine + * @param headerLines the header lines */ public void initialize ( AnnotatorCompatible walker, GenomeAnalysisEngine toolkit, Set headerLines ) { useDithering = ! toolkit.getArguments().disableDithering; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java index 6ce4aab49..37faaed22 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java @@ -51,17 +51,13 @@ import net.sf.samtools.CigarElement; import net.sf.samtools.CigarOperator; import net.sf.samtools.SAMRecord; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.gatk.walkers.indels.PairHMMIndelErrorModel; import org.broadinstitute.variant.vcf.VCFHeaderLineType; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; import org.broadinstitute.sting.utils.pileup.PileupElement; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; import org.broadinstitute.sting.utils.sam.AlignmentUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.sam.ReadUtils; -import org.broadinstitute.variant.variantcontext.Allele; import java.util.*; @@ -83,55 +79,34 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio return Arrays.asList(new VCFInfoHeaderLine("ReadPosRankSum", 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt vs. Ref read position bias")); } - protected void fillQualsFromPileup(final List allAlleles, - final int refLoc, - final ReadBackedPileup pileup, - final PerReadAlleleLikelihoodMap alleleLikelihoodMap, - final List refQuals, final List altQuals) { + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { + final int offset = ReadUtils.getReadCoordinateForReferenceCoordinate( read.getSoftStart(), read.getCigar(), refLoc, ReadUtils.ClippingTail.RIGHT_TAIL, true ); + if ( offset == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) + return null; - if (alleleLikelihoodMap == null) { - // use old UG SNP-based version if we don't have per-read allele likelihoods - for ( final PileupElement p : pileup ) { - if ( isUsableBase(p) && p.getRead().getCigar() != null ) { - int readPos = AlignmentUtils.calcAlignmentByteArrayOffset(p.getRead().getCigar(), p, 0, 0); - - readPos = getFinalReadPosition(p.getRead(),readPos); - - if ( allAlleles.get(0).equals(Allele.create(p.getBase(), true)) ) { - refQuals.add((double)readPos); - } else if ( allAlleles.contains(Allele.create(p.getBase()))) { - altQuals.add((double)readPos); - } - } - } - return; - } - - for (Map.Entry> el : alleleLikelihoodMap.getLikelihoodReadMap().entrySet()) { - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); - if (! a.isInformative() ) - continue; // read is non-informative - - final GATKSAMRecord read = el.getKey(); - if ( read.getSoftStart() + read.getCigar().getReadLength() <= refLoc ) { // make sure the read actually covers the requested ref loc - continue; - } - final int offset = ReadUtils.getReadCoordinateForReferenceCoordinate( read.getSoftStart(), read.getCigar(), refLoc, ReadUtils.ClippingTail.RIGHT_TAIL, true ); - if ( offset == ReadUtils.CLIPPING_GOAL_NOT_REACHED || read.getCigar() == null ) - continue; - int readPos = AlignmentUtils.calcAlignmentByteArrayOffset( read.getCigar(), offset, false, 0, 0 ); - final int numAlignedBases = AlignmentUtils.getNumAlignedBasesCountingSoftClips( read ); - if (readPos > numAlignedBases / 2) - readPos = numAlignedBases - (readPos + 1); - - if (a.getMostLikelyAllele().isReference()) - refQuals.add((double)readPos); - else if (allAlleles.contains(a.getMostLikelyAllele())) - altQuals.add((double)readPos); - } + int readPos = AlignmentUtils.calcAlignmentByteArrayOffset( read.getCigar(), offset, false, 0, 0 ); + final int numAlignedBases = AlignmentUtils.getNumAlignedBasesCountingSoftClips( read ); + if (readPos > numAlignedBases / 2) + readPos = numAlignedBases - (readPos + 1); + return (double)readPos; } - int getFinalReadPosition(GATKSAMRecord read, int initialReadPosition) { + protected Double getElementForPileupElement(final PileupElement p) { + final int offset = AlignmentUtils.calcAlignmentByteArrayOffset(p.getRead().getCigar(), p, 0, 0); + return (double)getFinalReadPosition(p.getRead(), offset); + } + + @Override + protected boolean isUsableBase(final PileupElement p) { + return super.isUsableBase(p) && p.getRead().getCigar() != null; + } + + @Override + protected boolean isUsableRead(final GATKSAMRecord read, final int refLoc) { + return super.isUsableRead(read, refLoc) && read.getSoftStart() + read.getCigar().getReadLength() > refLoc; + } + + private int getFinalReadPosition(final GATKSAMRecord read, final int initialReadPosition) { final int numAlignedBases = getNumAlignedBases(read); int readPos = initialReadPosition; @@ -141,7 +116,8 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio return readPos; } - int getNumClippedBasesAtStart(SAMRecord read) { + + private int getNumClippedBasesAtStart(final SAMRecord read) { // compute total number of clipped bases (soft or hard clipped) // check for hard clips (never consider these bases): final Cigar c = read.getCigar(); @@ -151,8 +127,8 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio if (first.getOperator() == CigarOperator.H) { numStartClippedBases = first.getLength(); } - byte[] unclippedReadBases = read.getReadBases(); - byte[] unclippedReadQuals = read.getBaseQualities(); + final byte[] unclippedReadBases = read.getReadBases(); + final byte[] unclippedReadQuals = read.getBaseQualities(); // Do a stricter base clipping than provided by CIGAR string, since this one may be too conservative, // and may leave a string of Q2 bases still hanging off the reads. @@ -167,11 +143,11 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio return numStartClippedBases; } - int getNumAlignedBases(SAMRecord read) { + private int getNumAlignedBases(final GATKSAMRecord read) { return read.getReadLength() - getNumClippedBasesAtStart(read) - getNumClippedBasesAtEnd(read); } - int getNumClippedBasesAtEnd(SAMRecord read) { + private int getNumClippedBasesAtEnd(final GATKSAMRecord read) { // compute total number of clipped bases (soft or hard clipped) // check for hard clips (never consider these bases): final Cigar c = read.getCigar(); @@ -181,8 +157,8 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio if (last.getOperator() == CigarOperator.H) { numEndClippedBases = last.getLength(); } - byte[] unclippedReadBases = read.getReadBases(); - byte[] unclippedReadQuals = read.getBaseQualities(); + final byte[] unclippedReadBases = read.getReadBases(); + final byte[] unclippedReadQuals = read.getBaseQualities(); // Do a stricter base clipping than provided by CIGAR string, since this one may be too conservative, // and may leave a string of Q2 bases still hanging off the reads. @@ -193,11 +169,6 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio break; } - return numEndClippedBases; } - - int getOffsetFromClippedReadStart(SAMRecord read, int offset) { - return offset - getNumClippedBasesAtStart(read); - } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java new file mode 100644 index 000000000..7a7527dd1 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java @@ -0,0 +1,583 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ +package org.broadinstitute.sting.gatk.walkers.bqsr; + +import com.google.java.contract.Requires; +import org.broadinstitute.sting.commandline.Argument; +import org.broadinstitute.sting.commandline.Input; +import org.broadinstitute.sting.commandline.Output; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.walkers.RodWalker; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.recalibration.RecalUtils; +import org.broadinstitute.sting.utils.recalibration.RecalibrationReport; +import org.broadinstitute.sting.utils.recalibration.BaseRecalibration; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; + + +/** + * Tool to analyze and evaluate base recalibration ables. + *

+ * For now it generates a plot report to assess the quality of a recalibration. + * + *

Input

+ * + * The tool can take up to three different sets of recalibration tables. + * The resulting plots will be overlaid on top of each other to make + * comparisons easy. + * + * + * + * + * + * + * + * + * + * + * + * + * + *
SetArgumentLabelColorDescription
Original-beforeBEFOREMaroon1First pass recalibration + * tables obtained from applying {@link BaseRecalibration} + * on the original alignment.
Recalibrated-afterAFTERBlueSecond pass recalibration tables + * results from the application of {@link BaseRecalibration} + * on the alignment recalibrated using the first pass tables
Input-BQSRBQSRBlackAny recalibration table without a specific role
+ *
+ * + * You need to specify one set at least. Multiple sets need to have the same values for the following parameters: + *

+ * covariate (order is not important), no_standard_covs, run_without_dbsnp, solid_recal_mode, + * solid_nocall_strategy, mismatches_context_size, mismatches_default_quality, deletions_default_quality, + * insertions_default_quality, maximum_cycle_value, low_quality_tail, default_platform, force_platform, + * quantizing_levels and binary_tag_name + *

Output

+ * + * Currently this tool generates two outputs: + * + *
+ *
-plots my-report.pdf
+ *
A pdf document that encloses plots to assess the quality of the recalibration.
+ *
-csv my-report.csv
+ *
A csv file that contains a table with all the data required to generate those plots.
+ *
+ * + * You need to specify at least one of them. + * + *

Other Arguments

+ * + *

-ignoreLMT, --ignoreLastModificationTimes

+ * + * when set, no warning message will be displayed in the -before recalibration table file is older than the -after one. + * + *

Examples

+ * + * + *

Plot a single recalibration table

+ *
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T AnalyzeCovariates \
+ *      -R myrefernce.fasta \
+ *      -BQSR myrecal.table \
+ *      -plots BQSR.pdf
+ * 
+ * + *

Plot before (first pass) and after (second pass) recalibration table to compare them

+ * + *
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T AnalyzeCovariates \
+ *      -R myrefernce.fasta \
+ *      -before recal2.table \
+ *      -after recal3.table \
+ *      -plots recalQC.pdf
+ * 
+ * + *

Plot up to three recalibration tables for comparison

+ * + *
+ *
+ * # You can ignore the before/after semantics completely if you like (if you do add -ignoreLMT
+ * # to avoid a possible warning), but all tables should have been generated using the same parameters.
+ *
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T AnalyzeCovariates \
+ *      -R myrefernce.fasta \
+ *      -ignoreLMT \
+ *      -BQSR recal1.table \   # you can discard any two
+ *      -before recal2.table \
+ *      -after recal3.table \
+ *      -plots myrecals.pdf
+ * 
+ * + *

Full BQSR quality assessment pipeline

+ * + *
+ * # Generate the first pass recalibration table file.
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T BaseRecalibrator \
+ *      -R myreference.fasta \
+ *      -I myinput.bam \
+ *      -knownSites bundle/my-trusted-snps.vcf \ # optional but recommendable
+ *      -knownSites bundle/my-trusted-indels.vcf \ # optional but recommendable
+ *      ... other options
+ *      -o firstpass.table
+ *
+ * # Generate the second pass recalibration table file.
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T BaseRecalibrator \
+ *      -BQSR firstpass.table \
+ *      -R myreference.fasta \
+ *      -I myinput.bam \
+ *      -knownSites bundle/my-trusted-snps.vcf \
+ *      -knownSites bundle/my-trusted-indels.vcf \
+ *      ... other options \
+ *      -o secondpass.table
+ *
+ * # Finally generate the plots report and also keep a copy of the csv (optional).
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T AnalyzeCovariates \
+ *      -R myrefernce.fasta \
+ *      -before firstpass.table \
+ *      -after secondpass.table \
+ *      -csv BQSR.csv \ # optional
+ *      -plots BQSR.pdf
+ * 
+ * + * @author Valentin Ruano-Rubio <valentin@broadinstitute.org> + * @version 6/16/2013 + * @since 2.6 + */ +public final class AnalyzeCovariates extends RodWalker { + + + // Constants on option short names that are used in some error/warning messages: + + static final String CSV_ARG_SHORT_NAME = "csv"; + static final String PDF_ARG_SHORT_NAME = "plots"; + static final String BEFORE_ARG_SHORT_NAME = "before"; + static final String AFTER_ARG_SHORT_NAME = "after"; + + /** + * File containing the recalibration tables from the first pass. + */ + @Input(shortName=BEFORE_ARG_SHORT_NAME,fullName="beforeReportFile", doc = "file containing the BQSR first-pass report file",required = false) + protected File beforeFile = null; + + /** + * File containing the recalibration tables from the second pass. + */ + @Input(shortName=AFTER_ARG_SHORT_NAME, fullName="afterReportFile", doc = "file containing the BQSR second-pass report file",required = false) + protected File afterFile = null; + + /** + * If true, it won't show a warning if the last-modification time of the before and after input files suggest that they have been reversed. + */ + @Argument(shortName="ignoreLMT", fullName="ignoreLastModificationTimes", doc= "do not emit warning messages related to suspicious last modification time order of inputs", required = false) + protected boolean ignoreLastModificationTime = false; + + /** + * Output report file name. + */ + @Output(shortName=PDF_ARG_SHORT_NAME, fullName="plotsReportFile" ,doc = "location of the output report", required = false) + protected File pdfFile = null; + + /** + * Output csv file name. + */ + @Output(shortName=CSV_ARG_SHORT_NAME,fullName="intermediateCsvFile" ,doc = "location of the csv intermediate file", required = false) + protected File csvFile = null; + + /** + * Convenience reference to the RECAL_BQSR_FILE argument value. + *

+ * This field value is resolved by {@link #initialize()}. + */ + protected File bqsrFile = null; + + /** + * Checks inputs and argument values. + *

+ * Notice that this routine will not validate the content of files. It may have some minor side effects as + * the output of warning messages back to the user. + * + * @throw IllegalStateException there is some required argument value that has not been loaded yet. + * @throw UserException if there is some error caused by or under the end user's control. + */ + private void checkArgumentsValues() { + checkInputReportFile("BQSR",bqsrFile); + checkInputReportFile("before",beforeFile); + checkInputReportFile("after",afterFile); + if (bqsrFile == null && beforeFile == null && afterFile == null) { + throw new UserException("you must provide at least one recalibration report file " + + "(arguments -BQSR, -" + BEFORE_ARG_SHORT_NAME + " or -" + AFTER_ARG_SHORT_NAME); + } + + checkOutputFile(PDF_ARG_SHORT_NAME,pdfFile); + checkOutputFile(CSV_ARG_SHORT_NAME, csvFile); + checkInputReportFileLMT(beforeFile,afterFile); + checkOutputRequested(); + } + + /** + * Checks whether the last-modification-time of the inputs is consistent with their relative roles. + * + * This routine does not thrown an exception but may output a warning message if inconsistencies are spotted. + * + * @param beforeFile the before report file. + * @param afterFile the after report file. + */ + private void checkInputReportFileLMT(final File beforeFile, final File afterFile) { + + if (ignoreLastModificationTime || beforeFile == null || afterFile == null) { + return; // nothing to do here + } else if (beforeFile.lastModified() > afterFile.lastModified()) { + Utils.warnUser("Last modification timestamp for 'Before' and 'After'" + + "recalibration reports are in the wrong order. Perhaps, have they been swapped?"); + } + } + + /** + * Checks that at least one output was requested. + * + * @throw UserException if no output was requested. + */ + private void checkOutputRequested() { + if (pdfFile == null && csvFile == null) { + throw new UserException("you need to request at least one output:" + + " the intermediate csv file (-" + CSV_ARG_SHORT_NAME + " FILE)" + + " or the final plot file (-" + PDF_ARG_SHORT_NAME + " FILE)."); + } + } + + /** + * Checks the value provided to input file arguments. + * + * @throw UserException if there is any problem cause by or under the end user's control + * + * @param name command line argument short name. + * @param value the argument value. + */ + private void checkInputReportFile(final String name,final File value) { + if (value == null) { + return; + } else if (!value.exists()) { + throw new UserException.BadArgumentValue(name, "input report '" + + value + "' does not exist or is unreachable"); + } else if (!value.isFile()) { + throw new UserException.BadArgumentValue(name, "input report '" + + value + "' is not a regular file"); + } else if (!value.canRead()) { + throw new UserException.BadArgumentValue(name, "input report '" + + value + "' cannot be read"); + } + } + + /** + * Checks the value provided for output arguments. + * + * @throw UserException if there is any problem cause by or under the end user's control + * + * @param name command line argument short name. + * @param value the argument value. + */ + private void checkOutputFile(final String name, final File value) { + if (value == null) { + return; + } + if (value.exists() && !value.isFile()) { + throw new UserException.BadArgumentValue(name, "the output file location '" + + value + "' exists as not a file"); + } + final File parent = value.getParentFile(); + if (parent == null) { + return; + } + if (!parent.exists()) { + throw new UserException.BadArgumentValue(name, "the output file parent directory '" + + parent + "' does not exists or is unreachable"); + } else if (!parent.isDirectory()) { + throw new UserException.BadArgumentValue(name, "the output file parent directory '" + + parent + "' is not a directory"); + } else if (!parent.canWrite()) { + throw new UserException.BadArgumentValue(name, "the output file parent directory '" + + parent + "' cannot be written"); + } + + } + + /** + * Generates the plots using the external R script. + * + *

+ * If plotsFile is null, it does not perform any plotting. + * + * @param csvFile the intermediary csv file. + * @param plotsFile the output plot location. + */ + private void generatePlots(final File csvFile, final Map reportFiles, final File plotsFile) { + + if (plotsFile == null) { + return; + } + logger.info("Generating plots file '" + plotsFile + "'"); + final File exampleReportFile = reportFiles.values().iterator().next(); + RecalUtils.generatePlots(csvFile,exampleReportFile,plotsFile); + } + + @Override + public void initialize() { + super.initialize(); + bqsrFile = getToolkit().getArguments().BQSR_RECAL_FILE; + checkArgumentsValues(); + final Map reportFiles = buildReportFileMap(); + final Map reports = buildReportMap(reportFiles); + checkReportConsistency(reports); + final File csvFile = resolveCsvFile(); + generateCsvFile(csvFile,reports); + final File plotFile = resolvePlotFile(); + generatePlots(csvFile, reportFiles, plotFile); + } + + /** + * Returns the plot output file + * @return might be null if the user has not indicated and output file. + */ + private File resolvePlotFile() { + return pdfFile; + } + + /** + * Generates the intermediary Csv file. + * + * @param csvFile where to write the file. + * @param reports the reports to be included. + */ + private void generateCsvFile(final File csvFile, final Map reports) { + try { + logger.info("Generating csv file '" + csvFile + "'"); + RecalUtils.generateCsv(csvFile, reports); + } catch (FileNotFoundException e) { + throw new UserException( + String.format("There is a problem creating the intermediary Csv file '%s': %s", + csvFile,e.getMessage()),e); + } + } + + /** + * Checks whether multiple input recalibration report files argument values are consistent (equal). + * + * @param reports map with report to verify. + * + * @throw UserException if there is any inconsistency. + */ + private void checkReportConsistency(final Map reports) { + final Map.Entry[] reportEntries = + reports.entrySet().toArray((Map.Entry[]) new Map.Entry[reports.size()]); + + final Map.Entry exampleEntry = reportEntries[0]; + + for (int i = 1; i < reportEntries.length; i++) { + final Map diffs = exampleEntry.getValue().getRAC().compareReportArguments( + reportEntries[i].getValue().getRAC(),exampleEntry.getKey(),reportEntries[i].getKey()); + if (diffs.size() != 0) { + throw new UserException.IncompatibleRecalibrationTableParameters("There are differences in relevant arguments of" + + " two or more input recalibration reports. Please make sure" + + " they have been created using the same recalibration parameters." + + " " + Utils.join("// ", reportDifferencesStringArray(diffs))); + } + } + } + + + /** + * Creates a map with all input recalibration files indexed by their "role". + *

+ * The key is the role and the value the corresponding report file. + *

+ * Roles: "Before" (recalibration), "After" (recalibration), "BQSR" (the tool standard argument recalibration file) + * + * @return never null + */ + private Map buildReportFileMap() { + final Map reports = new LinkedHashMap<>(3); + if (bqsrFile != null) { + reports.put("BQSR",bqsrFile); + } + if (beforeFile != null) { + reports.put("Before",beforeFile); + } + if (afterFile != null) { + reports.put("After",afterFile); + } + return reports; + } + + /** + * Transforms a recalibration file map into a report object map. + * + * @param reportFileMap the file map to transforms. + * @return never null, a new map with the same size as + * reportFileMap and the same key set. + */ + @Requires("reportFileMap != null") + private Map buildReportMap(final Map reportFileMap) { + final Map reports = new LinkedHashMap<>(reportFileMap.size()); + for (final Map.Entry e : reportFileMap.entrySet()) { + reports.put(e.getKey(),new RecalibrationReport(e.getValue())); + } + return reports; + } + + /** + * Generates a flatter String array representation of recalibration argument differences. + * @param diffs the differences to represent. + * + * @return never null, an array of the same length as the size of the input diffs. + */ + @Requires("diffs != null") + private String[] reportDifferencesStringArray(final Map diffs) { + final String[] result = new String[diffs.size()]; + int i = 0; + for (final Map.Entry e : diffs.entrySet()) { + result[i++] = capitalize(e.getKey()) + ": " + e.getValue(); + } + return result; + } + + /** + * Returns the input string capitalizing the first letter. + * + * @param str the string to capitalize + * @return never null. + */ + @Requires("str != null") + private String capitalize(final String str) { + if (str.isEmpty()) { + return str; + } else { + return Character.toUpperCase(str.charAt(0)) + str.substring(1); + } + } + + /** + * Returns the csv file to use. + *

+ * This is the the one specified by the user if any or a temporary file + * that will be deleted as soon as the VM exists by default. + * + * @return never null. + */ + private File resolveCsvFile() { + if (csvFile != null) { + return csvFile; + } else { + try { + final File result = File.createTempFile("AnalyzeCovariates", ".csv"); + result.deleteOnExit(); + return result; + } catch (IOException e) { + throw new UserException("Could not create temporary Csv file",e); + } + } + } + + /** + * Always return true, forcing the immediate termination of the travesal. + * @return + */ + @Override + public boolean isDone() { + return true; + } + + /** + * {@inheritDoc} + */ + @Override + public None reduceInit() { + return new None(); + } + + /** + * Is not supposed to ever be called, thus it always results in an exception. + * + * @throws IllegalStateException always. + */ + @Override + public None reduce(None value, None sum) { + throw new IllegalStateException("AnalyzeCovariates reduce method is not supposed to be invoked ever"); + } + + + /** + * Is not supposed to ever be called, thus it always results in an exception. + * + * @throws IllegalStateException always. + */ + @Override + public None map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + throw new IllegalStateException("AnalyzeCovariates map method is not supposed to be invoked ever"); + } + + /** + * Dummy map and reduce types for the {@link AnalyzeCovariates} tool that in fact does not do any traversal. + */ + protected static class None { + private None() { + } + } +} + + diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java index ad97dc008..d6f0e16e8 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java @@ -92,18 +92,6 @@ public class BQSRGatherer extends Gatherer { generalReport.calculateQuantizedQualities(); - RecalibrationArgumentCollection RAC = generalReport.getRAC(); - if ( RAC.RECAL_PDF_FILE != null ) { - RAC.RECAL_TABLE_FILE = output; - if ( RAC.existingRecalibrationReport != null ) { - final RecalibrationReport originalReport = new RecalibrationReport(RAC.existingRecalibrationReport); - RecalUtils.generateRecalibrationPlot(RAC, originalReport.getRecalibrationTables(), generalReport.getRecalibrationTables(), generalReport.getCovariates()); - } - else { - RecalUtils.generateRecalibrationPlot(RAC, generalReport.getRecalibrationTables(), generalReport.getCovariates()); - } - } - generalReport.output(outputFile); } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java index dde49b7db..3882b70fa 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java @@ -61,6 +61,7 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.*; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.BaseUtils; +import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.baq.BAQ; import org.broadinstitute.sting.utils.clipping.ReadClipper; import org.broadinstitute.sting.utils.collections.Pair; @@ -124,7 +125,7 @@ import java.util.List; * -R resources/Homo_sapiens_assembly18.fasta \ * -knownSites bundle/hg18/dbsnp_132.hg18.vcf \ * -knownSites another/optional/setOfSitesToMask.vcf \ - * -o recal_data.grp + * -o recal_data.table * */ @@ -366,9 +367,7 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche } protected static int[] calculateIsIndel( final GATKSAMRecord read, final EventType mode ) { - final byte[] readBases = read.getReadBases(); - final int[] indel = new int[readBases.length]; - Arrays.fill(indel, 0); + final int[] indel = new int[read.getReadBases().length]; int readPos = 0; for ( final CigarElement ce : read.getCigar().getCigarElements() ) { final int elementLength = ce.getLength(); @@ -383,21 +382,19 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche } case D: { - final int index = ( read.getReadNegativeStrandFlag() ? readPos : ( readPos > 0 ? readPos - 1 : readPos ) ); - indel[index] = ( mode.equals(EventType.BASE_DELETION) ? 1 : 0 ); + final int index = ( read.getReadNegativeStrandFlag() ? readPos : readPos - 1 ); + updateIndel(indel, index, mode, EventType.BASE_DELETION); break; } case I: { final boolean forwardStrandRead = !read.getReadNegativeStrandFlag(); if( forwardStrandRead ) { - indel[(readPos > 0 ? readPos - 1 : readPos)] = ( mode.equals(EventType.BASE_INSERTION) ? 1 : 0 ); - } - for (int iii = 0; iii < elementLength; iii++) { - readPos++; + updateIndel(indel, readPos - 1, mode, EventType.BASE_INSERTION); } + readPos += elementLength; if( !forwardStrandRead ) { - indel[(readPos < indel.length ? readPos : readPos - 1)] = ( mode.equals(EventType.BASE_INSERTION) ? 1 : 0 ); + updateIndel(indel, readPos, mode, EventType.BASE_INSERTION); } break; } @@ -412,6 +409,12 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche return indel; } + private static void updateIndel(final int[] indel, final int index, final EventType mode, final EventType requiredMode) { + if ( mode == requiredMode && index >= 0 && index < indel.length ) + // protect ourselves from events at the start or end of the read (1D3M or 3M1D) + indel[index] = 1; + } + protected static double[] calculateFractionalErrorArray( final int[] errorArray, final byte[] baqArray ) { if(errorArray.length != baqArray.length ) { throw new ReviewedStingException("Array length mismatch detected. Malformed read?"); @@ -514,28 +517,13 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche generateReport(); logger.info("...done!"); - if ( RAC.RECAL_PDF_FILE != null ) { - logger.info("Generating recalibration plots..."); - generatePlots(); - } - - logger.info("Processed: " + result + " reads"); + logger.info("BaseRecalibrator was able to recalibrate " + result + " reads"); } private RecalibrationTables getRecalibrationTable() { return recalibrationEngine.getFinalRecalibrationTables(); } - private void generatePlots() { - File recalFile = getToolkit().getArguments().BQSR_RECAL_FILE; - if (recalFile != null) { - RecalibrationReport report = new RecalibrationReport(recalFile); - RecalUtils.generateRecalibrationPlot(RAC, report.getRecalibrationTables(), getRecalibrationTable(), requestedCovariates); - } - else - RecalUtils.generateRecalibrationPlot(RAC, getRecalibrationTable(), requestedCovariates); - } - /** * go through the quality score table and use the # observations and the empirical quality score * to build a quality score histogram for quantization. Then use the QuantizeQual algorithm to diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java index 0a4899f1c..b9f16132c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java @@ -46,15 +46,17 @@ package org.broadinstitute.sting.gatk.walkers.bqsr; +import com.google.java.contract.Requires; import org.broad.tribble.Feature; import org.broadinstitute.sting.commandline.*; import org.broadinstitute.sting.gatk.report.GATKReportTable; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.exceptions.StingException; import org.broadinstitute.sting.utils.recalibration.RecalUtils; import java.io.File; import java.io.PrintStream; -import java.util.Collections; -import java.util.List; +import java.util.*; /** * Created by IntelliJ IDEA. @@ -65,7 +67,7 @@ import java.util.List; * This set of arguments will also be passed to the constructor of every Covariate when it is instantiated. */ -public class RecalibrationArgumentCollection { +public class RecalibrationArgumentCollection implements Cloneable { /** * This algorithm treats every reference mismatch as an indication of error. However, real genetic variation is expected to mismatch the reference, @@ -87,21 +89,6 @@ public class RecalibrationArgumentCollection { public File RECAL_TABLE_FILE = null; public PrintStream RECAL_TABLE; - /** - * If not provided, then no plots will be generated (useful for queue scatter/gathering). - * However, we *highly* recommend that users generate these plots whenever possible for QC checking. - */ - @Output(fullName = "plot_pdf_file", shortName = "plots", doc = "The output recalibration pdf file to create", required = false, defaultToStdout = false) - public File RECAL_PDF_FILE = null; - - /** - * If not provided, then a temporary file is created and then deleted upon completion. - * For advanced users only. - */ - @Advanced - @Argument(fullName = "intermediate_csv_file", shortName = "intermediate", doc = "The intermediate csv file to create", required = false) - public File RECAL_CSV_FILE = null; - /** * Note that the --list argument requires a fully resolved and correct command-line to work. */ @@ -219,6 +206,10 @@ public class RecalibrationArgumentCollection { @Argument(fullName = "force_platform", shortName = "fP", required = false, doc = "If provided, the platform of EVERY read will be forced to be the provided String. Valid options are illumina, 454, and solid.") public String FORCE_PLATFORM = null; + @Hidden + @Argument(fullName = "force_readgroup", shortName = "fRG", required = false, doc = "If provided, the read group of EVERY read will be forced to be the provided String.") + public String FORCE_READGROUP = null; + @Hidden @Output(fullName = "recal_table_update_log", shortName = "recal_table_update_log", required = false, doc = "If provided, log all updates to the recalibration tables to the given file. For debugging/testing purposes only", defaultToStdout = false) public PrintStream RECAL_TABLE_UPDATE_LOG = null; @@ -278,11 +269,147 @@ public class RecalibrationArgumentCollection { argumentsTable.set("quantizing_levels", RecalUtils.ARGUMENT_VALUE_COLUMN_NAME, QUANTIZING_LEVELS); argumentsTable.addRowID("recalibration_report", true); argumentsTable.set("recalibration_report", RecalUtils.ARGUMENT_VALUE_COLUMN_NAME, existingRecalibrationReport == null ? "null" : existingRecalibrationReport.getAbsolutePath()); - argumentsTable.addRowID("plot_pdf_file", true); - argumentsTable.set("plot_pdf_file", RecalUtils.ARGUMENT_VALUE_COLUMN_NAME, RECAL_PDF_FILE == null ? "null" : RECAL_PDF_FILE.getAbsolutePath()); argumentsTable.addRowID("binary_tag_name", true); argumentsTable.set("binary_tag_name", RecalUtils.ARGUMENT_VALUE_COLUMN_NAME, BINARY_TAG_NAME == null ? "null" : BINARY_TAG_NAME); return argumentsTable; } + /** + * Returns a map with the arguments that differ between this an + * another {@link RecalibrationArgumentCollection} instance. + *

+ * The key is the name of that argument in the report file. The value is a message + * that explains the difference to the end user. + *

+ * Thus, a empty map indicates that there is no differences between both argument collection that + * is relevant to report comparison. + *

+ * This method should not throw any exception. + * + * @param other the argument-collection to compare against. + * @param thisRole the name used to refer to this RAC report that makes sense to the end user. + * @param otherRole the name used to refer to the other RAC report that makes sense to the end user. + * + * @return never null, but a zero-size collection if there are no differences. + */ + @Requires("other != null && thisRole != null && otherRole != null && !thisRole.equalsIgnoreCase(otherRole)") + Map compareReportArguments(final RecalibrationArgumentCollection other,final String thisRole, final String otherRole) { + final Map result = new LinkedHashMap<>(15); + compareRequestedCovariates(result, other, thisRole, otherRole); + compareSimpleReportArgument(result,"no_standard_covs", DO_NOT_USE_STANDARD_COVARIATES, other.DO_NOT_USE_STANDARD_COVARIATES, thisRole, otherRole); + compareSimpleReportArgument(result,"run_without_dbsnp",RUN_WITHOUT_DBSNP,other.RUN_WITHOUT_DBSNP,thisRole,otherRole); + compareSimpleReportArgument(result,"solid_recal_mode", SOLID_RECAL_MODE, other.SOLID_RECAL_MODE,thisRole,otherRole); + compareSimpleReportArgument(result,"solid_nocall_strategy", SOLID_NOCALL_STRATEGY, other.SOLID_NOCALL_STRATEGY,thisRole,otherRole); + compareSimpleReportArgument(result,"mismatches_context_size", MISMATCHES_CONTEXT_SIZE,other.MISMATCHES_CONTEXT_SIZE,thisRole,otherRole); + compareSimpleReportArgument(result,"mismatches_default_quality", MISMATCHES_DEFAULT_QUALITY, other.MISMATCHES_DEFAULT_QUALITY,thisRole,otherRole); + compareSimpleReportArgument(result,"deletions_default_quality", DELETIONS_DEFAULT_QUALITY, other.DELETIONS_DEFAULT_QUALITY,thisRole,otherRole); + compareSimpleReportArgument(result,"insertions_default_quality", INSERTIONS_DEFAULT_QUALITY, other.INSERTIONS_DEFAULT_QUALITY,thisRole,otherRole); + compareSimpleReportArgument(result,"maximum_cycle_value", MAXIMUM_CYCLE_VALUE, other.MAXIMUM_CYCLE_VALUE,thisRole,otherRole); + compareSimpleReportArgument(result,"low_quality_tail", LOW_QUAL_TAIL, other.LOW_QUAL_TAIL,thisRole,otherRole); + compareSimpleReportArgument(result,"default_platform", DEFAULT_PLATFORM, other.DEFAULT_PLATFORM,thisRole,otherRole); + compareSimpleReportArgument(result,"force_platform", FORCE_PLATFORM, other.FORCE_PLATFORM,thisRole,otherRole); + compareSimpleReportArgument(result,"quantizing_levels", QUANTIZING_LEVELS, other.QUANTIZING_LEVELS,thisRole,otherRole); + compareSimpleReportArgument(result,"binary_tag_name", BINARY_TAG_NAME, other.BINARY_TAG_NAME,thisRole,otherRole); + return result; + } + + + /** + * Compares the covariate report lists. + * + * @param diffs map where to annotate the difference. + * @param other the argument collection to compare against. + * @param thisRole the name for this argument collection that makes sense to the user. + * @param otherRole the name for the other argument collection that makes sense to the end user. + * + * @return true if a difference was found. + */ + @Requires("diffs != null && other != null && thisRole != null && otherRole != null") + private boolean compareRequestedCovariates(final Map diffs, + final RecalibrationArgumentCollection other, final String thisRole, final String otherRole) { + + final Set beforeNames = new HashSet<>(this.COVARIATES.length); + final Set afterNames = new HashSet<>(other.COVARIATES.length); + Utils.addAll(beforeNames, this.COVARIATES); + Utils.addAll(afterNames,other.COVARIATES); + final Set intersect = new HashSet<>(Math.min(beforeNames.size(),afterNames.size())); + intersect.addAll(beforeNames); + intersect.retainAll(afterNames); + + String diffMessage = null; + if (intersect.size() == 0) { // In practice this is not possible due to required covariates but... + diffMessage = String.format("There are no common covariates between '%s' and '%s'" + + " recalibrator reports. Covariates in '%s': {%s}. Covariates in '%s': {%s}.",thisRole,otherRole, + thisRole,Utils.join(", ",this.COVARIATES), + otherRole,Utils.join(",",other.COVARIATES)); + } else if (intersect.size() != beforeNames.size() || intersect.size() != afterNames.size()) { + beforeNames.removeAll(intersect); + afterNames.removeAll(intersect); + diffMessage = String.format("There are differences in the set of covariates requested in the" + + " '%s' and '%s' recalibrator reports. " + + " Exclusive to '%s': {%s}. Exclusive to '%s': {%s}.",thisRole,otherRole, + thisRole,Utils.join(", ",beforeNames), + otherRole,Utils.join(", ",afterNames)); + } + if (diffMessage != null) { + diffs.put("covariate",diffMessage); + return true; + } else { + return false; + } + } + + /** + * Annotates a map with any difference encountered in a simple value report argument that differs between this an + * another {@link RecalibrationArgumentCollection} instance. + *

+ * The key of the new entry would be the name of that argument in the report file. The value is a message + * that explains the difference to the end user. + *

+ * + *

+ * This method should not return any exception. + * + * @param diffs where to annotate the differences. + * @param name the name of the report argument to compare. + * @param thisValue this argument collection value for that argument. + * @param otherValue the other collection value for that argument. + * @param thisRole the name used to refer to this RAC report that makes sense to the end user. + * @param otherRole the name used to refer to the other RAC report that makes sense to the end user. + * + * @type T the argument Object value type. + * + * @return true if a difference has been spotted, thus diff has been modified. + */ + private boolean compareSimpleReportArgument(final Map diffs, + final String name, final T thisValue, final T otherValue, final String thisRole, final String otherRole) { + if (thisValue == null && otherValue == null) { + return false; + } else if (thisValue != null && thisValue.equals(otherValue)) { + return false; + } else { + diffs.put(name, + String.format("differences between '%s' {%s} and '%s' {%s}.", + thisRole,thisValue == null ? "" : thisValue, + otherRole,otherValue == null ? "" : otherValue)); + return true; + } + + } + + /** + * Create a shallow copy of this argument collection. + * + * @return never null. + */ + @Override + public RecalibrationArgumentCollection clone() { + try { + return (RecalibrationArgumentCollection) super.clone(); + } catch (CloneNotSupportedException e) { + throw new StingException("Unreachable code clone not supported thrown when the class " + + this.getClass().getName() + " is cloneable ",e); + } + } + } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/HeaderElement.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/HeaderElement.java index 38b9e957b..ba2c2ae56 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/HeaderElement.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/HeaderElement.java @@ -207,7 +207,7 @@ public class HeaderElement { public void removeInsertionToTheRight() { this.insertionsToTheRight--; if (insertionsToTheRight < 0) - throw new ReviewedStingException("Removed too many insertions, header is now negative!"); + throw new ReviewedStingException("Removed too many insertions, header is now negative at position " + location); } public boolean hasInsertionToTheRight() { diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReads.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReads.java index 71910e566..e636f8f17 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReads.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReads.java @@ -64,6 +64,7 @@ import org.broadinstitute.sting.gatk.io.StingSAMFileWriter; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.*; import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.SampleUtils; import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.clipping.ReadClipper; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; @@ -236,6 +237,15 @@ public class ReduceReads extends ReadWalker, Redu @Argument(fullName = "downsample_coverage", shortName = "ds", doc = "", required = false) public int downsampleCoverage = 250; + /** + * Generally, this tool is not meant to be run for more than 1 sample at a time. The one valid exception + * brought to our attention by colleagues is the specific case of tumor/normal pairs in cancer analysis. + * To prevent users from unintentionally running the tool in a less than ideal manner, we require them + * to explicitly enable multi-sample analysis with this argument. + */ + @Argument(fullName = "cancer_mode", shortName = "cancer_mode", doc = "enable multi-samples reduction for cancer analysis", required = false) + public boolean ALLOW_MULTIPLE_SAMPLES = false; + @Hidden @Argument(fullName = "nwayout", shortName = "nw", doc = "", required = false) public boolean nwayout = false; @@ -263,8 +273,9 @@ public class ReduceReads extends ReadWalker, Redu int nCompressedReads = 0; - Object2LongOpenHashMap readNameHash; // This hash will keep the name of the original read the new compressed name (a number). + private static int READ_NAME_HASH_DEFAULT_SIZE = 1000; Long nextReadNumber = 1L; // The next number to use for the compressed read name. + Object2LongOpenHashMap readNameHash; // This hash will keep the name of the original read the new compressed name (a number). ObjectSortedSet intervalList; @@ -294,13 +305,16 @@ public class ReduceReads extends ReadWalker, Redu if ( minAltProportionToTriggerVariant < 0.0 || minAltProportionToTriggerVariant > 1.0 ) throw new UserException.BadArgumentValue("--minimum_alt_proportion_to_trigger_variant", "must be a value between 0 and 1 (inclusive)"); + if ( SampleUtils.getSAMFileSamples(getToolkit().getSAMFileHeader()).size() > 1 && !ALLOW_MULTIPLE_SAMPLES ) + throw new UserException.BadInput("Reduce Reads is not meant to be run for more than 1 sample at a time except for the specific case of tumor/normal pairs in cancer analysis"); + if ( known.isEmpty() ) knownSnpPositions = null; else knownSnpPositions = new ObjectAVLTreeSet(); GenomeAnalysisEngine toolkit = getToolkit(); - readNameHash = new Object2LongOpenHashMap(100000); // prepare the read name hash to keep track of what reads have had their read names compressed + this.resetReadNameHash(); // prepare the read name hash to keep track of what reads have had their read names compressed intervalList = new ObjectAVLTreeSet(); // get the interval list from the engine. If no interval list was provided, the walker will work in WGS mode if (toolkit.getIntervals() != null) @@ -322,6 +336,16 @@ public class ReduceReads extends ReadWalker, Redu } } + /** Initializer for {@link #readNameHash}. */ + private void resetReadNameHash() { + // If the hash grows large, subsequent clear operations can be very expensive, so trim the hash down if it grows beyond its default. + if (readNameHash == null || readNameHash.size() > READ_NAME_HASH_DEFAULT_SIZE) { + readNameHash = new Object2LongOpenHashMap(READ_NAME_HASH_DEFAULT_SIZE); + } else { + readNameHash.clear(); + } + } + /** * Takes in a read and prepares it for the SlidingWindow machinery by performing the * following optional clipping operations: @@ -458,7 +482,7 @@ public class ReduceReads extends ReadWalker, Redu // stash.compress(), the readNameHash can be cleared after the for() loop above. // The advantage of clearing the hash is that otherwise it holds all reads that have been encountered, // which can use a lot of memory and cause RR to slow to a crawl and/or run out of memory. - readNameHash.clear(); + this.resetReadNameHash(); } } else diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java index d3ca037be..0425af3df 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java @@ -877,6 +877,10 @@ public class SlidingWindow { final int start = region.getStart() - windowHeaderStart; int stop = region.getStop() - windowHeaderStart; + // make sure the bitset is complete given the region (it might not be in multi-sample mode) + if ( region.getStop() > markedSites.getStartLocation() + markedSites.getVariantSiteBitSet().length ) + markSites(region.getStop()); + CloseVariantRegionResult closeVariantRegionResult = closeVariantRegion(start, stop, knownSnpPositions); allReads.addAll(closeVariantRegionResult.reads); @@ -1195,7 +1199,7 @@ public class SlidingWindow { } // Special case for leading insertions before the beginning of the sliding read - if ( ReadUtils.readStartsWithInsertion(read).getFirst() && (readStart == headerStart || headerStart < 0) ) { + if ( (readStart == headerStart || headerStart < 0) && ReadUtils.readStartsWithInsertion(read.getCigar(), false) != null ) { // create a new first element to the window header with no bases added header.addFirst(new HeaderElement(readStart - 1)); // this allows the first element (I) to look at locationIndex - 1 when we update the header and do the right thing diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/AbstractStratification.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/AbstractStratification.java index dca83af44..ceccdcb2e 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/AbstractStratification.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/AbstractStratification.java @@ -63,6 +63,10 @@ abstract class AbstractStratification { private Map statusTally = null; protected ThresHolder thresholds; + public AbstractStratification(ThresHolder thresholds) { + this.thresholds = thresholds; + } + /** * Calculates the average "good" coverage of this sample. Good means "passes the base and * mapping quality requirements. @@ -116,11 +120,11 @@ abstract class AbstractStratification { * * @return the callable status(es) for the whole object */ - public abstract Iterable callableStatuses(); + public abstract List callableStatuses(); /** - * Tally up all the callable status of all the loci in this sample. + * Tally up all the callable status of all elements of the stratification. * * @return a map of callable status and counts */ @@ -136,10 +140,10 @@ abstract class AbstractStratification { return statusTally; } - public static List queryStatus(List statList, AbstractStratification stratification) { + public List queryStatus(List statList) { List output = new LinkedList(); for (Metric stat : statList) { - final CallableStatus status = stat.status(stratification); + final CallableStatus status = stat.status(this); if (status != null) { output.add(status); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargets.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargets.java index 32f87b973..bde324e3c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargets.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargets.java @@ -52,6 +52,7 @@ import org.broadinstitute.sting.commandline.Output; import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.*; import org.broadinstitute.sting.utils.GenomeLoc; @@ -65,6 +66,7 @@ import org.broadinstitute.variant.variantcontext.*; import org.broadinstitute.variant.variantcontext.writer.VariantContextWriter; import org.broadinstitute.variant.vcf.*; +import java.io.PrintStream; import java.util.*; /** @@ -109,9 +111,13 @@ import java.util.*; @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} ) @By(value = DataSource.READS) @PartitionBy(PartitionType.INTERVAL) +@Downsample(by = DownsampleType.NONE) public class DiagnoseTargets extends LocusWalker { private static final String AVG_INTERVAL_DP_KEY = "IDP"; + private static final String LOW_COVERAGE_LOCI = "LL"; + private static final String ZERO_COVERAGE_LOCI = "ZL"; + @Output(doc = "File to which interval statistics should be written") private VariantContextWriter vcfWriter = null; @@ -119,13 +125,12 @@ public class DiagnoseTargets extends LocusWalker { @ArgumentCollection private ThresHolder thresholds = new ThresHolder(); - private Map intervalMap = null; // maps each interval => statistics + private Map intervalMap = null; // maps each interval => statistics private PeekableIterator intervalListIterator; // an iterator to go over all the intervals provided as we traverse the genome private Set samples = null; // all the samples being processed private static final Allele SYMBOLIC_ALLELE = Allele.create("

", false); // avoid creating the symbolic allele multiple times private static final Allele UNCOVERED_ALLELE = Allele.create("A", true); // avoid creating the 'fake' ref allele for uncovered intervals multiple times - - private static final int INITIAL_HASH_SIZE = 500000; + private static final int INITIAL_HASH_SIZE = 50; // enough room for potential overlapping intervals plus recently finished intervals @Override public void initialize() { @@ -134,7 +139,7 @@ public class DiagnoseTargets extends LocusWalker { if (getToolkit().getIntervals() == null || getToolkit().getIntervals().isEmpty()) throw new UserException("This tool only works if you provide one or more intervals (use the -L argument). If you want to run whole genome, use -T DepthOfCoverage instead."); - intervalMap = new HashMap(INITIAL_HASH_SIZE); + intervalMap = new LinkedHashMap(INITIAL_HASH_SIZE); intervalListIterator = new PeekableIterator(getToolkit().getIntervals().iterator()); // get all of the unique sample names for the VCF Header @@ -146,13 +151,13 @@ public class DiagnoseTargets extends LocusWalker { } @Override - public Long map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + public Long map(final RefMetaDataTracker tracker, final ReferenceContext ref, final AlignmentContext context) { GenomeLoc refLocus = ref.getLocus(); // process and remove any intervals in the map that are don't overlap the current locus anymore // and add all new intervals that may overlap this reference locus - outputFinishedIntervals(refLocus, ref.getBase()); addNewOverlappingIntervals(refLocus); + outputFinishedIntervals(refLocus, ref.getBase()); // at this point, all intervals in intervalMap overlap with this locus, so update all of them for (IntervalStratification intervalStratification : intervalMap.values()) @@ -184,7 +189,7 @@ public class DiagnoseTargets extends LocusWalker { * @param result number of loci processed by the walker */ @Override - public void onTraversalDone(Long result) { + public void onTraversalDone(final Long result) { for (GenomeLoc interval : intervalMap.keySet()) outputStatsToVCF(intervalMap.get(interval), UNCOVERED_ALLELE); @@ -194,6 +199,10 @@ public class DiagnoseTargets extends LocusWalker { intervalListIterator.next(); interval = intervalListIterator.peek(); } + + if (thresholds.missingTargets != null) { + thresholds.missingTargets.close(); + } } /** @@ -203,24 +212,21 @@ public class DiagnoseTargets extends LocusWalker { * @param refBase the reference allele */ private void outputFinishedIntervals(final GenomeLoc refLocus, final byte refBase) { - GenomeLoc interval = intervalListIterator.peek(); - - // output empty statistics for uncovered intervals - while (interval != null && interval.isBefore(refLocus)) { - final IntervalStratification stats = intervalMap.get(interval); - outputStatsToVCF(stats != null ? stats : createIntervalStatistic(interval), UNCOVERED_ALLELE); - if (stats != null) intervalMap.remove(interval); - intervalListIterator.next(); - interval = intervalListIterator.peek(); - } - - // remove any potential leftover interval in intervalMap (this will only happen when we have overlapping intervals) + // output any intervals that were finished + final List toRemove = new LinkedList(); for (GenomeLoc key : intervalMap.keySet()) { if (key.isBefore(refLocus)) { - outputStatsToVCF(intervalMap.get(key), Allele.create(refBase, true)); - intervalMap.remove(key); + final IntervalStratification intervalStats = intervalMap.get(key); + outputStatsToVCF(intervalStats, Allele.create(refBase, true)); + if (hasMissingLoci(intervalStats)) { + outputMissingInterval(intervalStats); + } + toRemove.add(key); } } + for (GenomeLoc key : toRemove) { + intervalMap.remove(key); + } } /** @@ -228,7 +234,7 @@ public class DiagnoseTargets extends LocusWalker { * * @param refLocus the current reference locus */ - private void addNewOverlappingIntervals(GenomeLoc refLocus) { + private void addNewOverlappingIntervals(final GenomeLoc refLocus) { GenomeLoc interval = intervalListIterator.peek(); while (interval != null && !interval.isPast(refLocus)) { intervalMap.put(interval, createIntervalStatistic(interval)); @@ -243,14 +249,24 @@ public class DiagnoseTargets extends LocusWalker { * @param stats The statistics of the interval * @param refAllele the reference allele */ - private void outputStatsToVCF(IntervalStratification stats, Allele refAllele) { + private void outputStatsToVCF(final IntervalStratification stats, final Allele refAllele) { GenomeLoc interval = stats.getInterval(); + final List alleles = new ArrayList(); + final Map attributes = new HashMap(); + final ArrayList genotypes = new ArrayList(); - List alleles = new ArrayList(); - Map attributes = new HashMap(); - ArrayList genotypes = new ArrayList(); + for (String sample : samples) { + final GenotypeBuilder gb = new GenotypeBuilder(sample); + SampleStratification sampleStat = stats.getSampleStatistics(sample); + gb.attribute(AVG_INTERVAL_DP_KEY, sampleStat.averageCoverage(interval.size())); + gb.attribute(LOW_COVERAGE_LOCI, sampleStat.getNLowCoveredLoci()); + gb.attribute(ZERO_COVERAGE_LOCI, sampleStat.getNUncoveredLoci()); + gb.filters(statusToStrings(stats.getSampleStatistics(sample).callableStatuses(), false)); + + genotypes.add(gb.make()); + } alleles.add(refAllele); alleles.add(SYMBOLIC_ALLELE); VariantContextBuilder vcb = new VariantContextBuilder("DiagnoseTargets", interval.getContig(), interval.getStart(), interval.getStop(), alleles); @@ -262,21 +278,56 @@ public class DiagnoseTargets extends LocusWalker { attributes.put(AVG_INTERVAL_DP_KEY, stats.averageCoverage(interval.size())); vcb = vcb.attributes(attributes); - for (String sample : samples) { - final GenotypeBuilder gb = new GenotypeBuilder(sample); - - SampleStratification sampleStat = stats.getSampleStatistics(sample); - gb.attribute(AVG_INTERVAL_DP_KEY, sampleStat.averageCoverage(interval.size())); - - gb.filters(statusToStrings(stats.getSampleStatistics(sample).callableStatuses(), false)); - - genotypes.add(gb.make()); - } vcb = vcb.genotypes(genotypes); vcfWriter.add(vcb.make()); } + private boolean hasMissingStatuses(AbstractStratification stats) { + return !stats.callableStatuses().isEmpty(); + } + + private boolean hasMissingLoci(final IntervalStratification stats) { + return thresholds.missingTargets != null && hasMissingStatuses(stats); + } + + private void outputMissingInterval(final IntervalStratification stats) { + final GenomeLoc interval = stats.getInterval(); + final boolean missing[] = new boolean[interval.size()]; + Arrays.fill(missing, true); + for (AbstractStratification sample : stats.getElements()) { + if (hasMissingStatuses(sample)) { + int pos = 0; + for (AbstractStratification locus : sample.getElements()) { + if (locus.callableStatuses().isEmpty()) { + missing[pos] = false; + } + pos++; + } + } + } + int start = -1; + boolean insideMissing = false; + for (int i = 0; i < missing.length; i++) { + if (missing[i] && !insideMissing) { + start = interval.getStart() + i; + insideMissing = true; + } else if (!missing[i] && insideMissing) { + final int stop = interval.getStart() + i - 1; + outputMissingInterval(interval.getContig(), start, stop); + insideMissing = false; + } + } + if (insideMissing) { + outputMissingInterval(interval.getContig(), start, interval.getStop()); + } + } + + private void outputMissingInterval(final String contig, final int start, final int stop) { + final PrintStream out = thresholds.missingTargets; + out.println(String.format("%s:%d-%d", contig, start, stop)); + } + /** * Function that process a set of statuses into strings * @@ -345,6 +396,8 @@ public class DiagnoseTargets extends LocusWalker { // FORMAT fields for each genotype headerLines.add(VCFStandardHeaderLines.getFormatLine(VCFConstants.GENOTYPE_FILTER_KEY)); headerLines.add(new VCFFormatHeaderLine(AVG_INTERVAL_DP_KEY, 1, VCFHeaderLineType.Float, "Average sample depth across the interval. Sum of the sample specific depth in all loci divided by interval size.")); + headerLines.add(new VCFFormatHeaderLine(LOW_COVERAGE_LOCI, 1, VCFHeaderLineType.Integer, "Number of loci for this sample, in this interval with low coverage (below the minimum coverage) but not zero.")); + headerLines.add(new VCFFormatHeaderLine(ZERO_COVERAGE_LOCI, 1, VCFHeaderLineType.Integer, "Number of loci for this sample, in this interval with zero coverage.")); // FILTER fields for (CallableStatus stat : CallableStatus.values()) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/IntervalStratification.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/IntervalStratification.java index 6c20403d1..3b5a23d51 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/IntervalStratification.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/IntervalStratification.java @@ -56,11 +56,11 @@ import java.util.*; final class IntervalStratification extends AbstractStratification { private final Map samples; private final GenomeLoc interval; - private final ThresHolder thresholds; + private List callableStatuses; public IntervalStratification(Set samples, GenomeLoc interval, ThresHolder thresholds) { + super(thresholds); this.interval = interval; - this.thresholds = thresholds; this.samples = new HashMap(samples.size()); for (String sample : samples) this.samples.put(sample, new SampleStratification(interval, thresholds)); @@ -114,7 +114,13 @@ final class IntervalStratification extends AbstractStratification { * {@inheritDoc} */ @Override - public Iterable callableStatuses() { + public List callableStatuses() { + if (callableStatuses == null) + callableStatuses = calculateStatus(); + return callableStatuses; + } + + private List calculateStatus() { final List output = new LinkedList(); // check if any of the votes pass the threshold @@ -125,7 +131,7 @@ final class IntervalStratification extends AbstractStratification { } } - output.addAll(queryStatus(thresholds.intervalMetricList, this)); + output.addAll(queryStatus(thresholds.intervalMetricList)); return output; } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/LocusStratification.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/LocusStratification.java index d6acaf850..5902fce31 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/LocusStratification.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/LocusStratification.java @@ -46,22 +46,20 @@ package org.broadinstitute.sting.gatk.walkers.diagnostics.diagnosetargets; -import java.util.LinkedList; import java.util.List; final class LocusStratification extends AbstractStratification { private long coverage; private long rawCoverage; - private final List locusStatisticsList; public LocusStratification(ThresHolder thresholds) { this(0,0,thresholds); } protected LocusStratification(int coverage, int rawCoverage, ThresHolder thresholds) { + super(thresholds); this.coverage = coverage; this.rawCoverage = rawCoverage; - this.locusStatisticsList = thresholds.locusMetricList; } @Override @@ -79,14 +77,7 @@ final class LocusStratification extends AbstractStratification { * @return a set of all statuses that apply */ public List callableStatuses() { - List output = new LinkedList(); - for (Metric stats : locusStatisticsList) { - CallableStatus status = stats.status(this); - if (status != null) { - output.add(status); - } - } - return output; + return queryStatus(thresholds.locusMetricList); } @Override diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/PluginUtils.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/PluginUtils.java index 1085e8cac..7984ba7e7 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/PluginUtils.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/PluginUtils.java @@ -58,6 +58,6 @@ final class PluginUtils { final Map totals = sampleStratification.getStatusTally(); final int size = sampleStratification.getIntervalSize(); final int statusCount = totals.containsKey(CALL) ? totals.get(CALL) : 0; - return ( (double) statusCount / size) >= threshold ? CALL: null; + return ( (double) statusCount / size) > threshold ? CALL: null; } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/SampleStratification.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/SampleStratification.java index b9ae1f3cf..0f84c7d22 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/SampleStratification.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/SampleStratification.java @@ -61,15 +61,14 @@ import java.util.List; final class SampleStratification extends AbstractStratification { private final GenomeLoc interval; private final ArrayList loci; - private final ThresHolder thresholds; private int nReads = -1; private int nBadMates = -1; public SampleStratification(final GenomeLoc interval, final ThresHolder thresholds) { + super(thresholds); this.interval = interval; this.loci = new ArrayList(interval.size()); - this.thresholds = thresholds; nReads = 0; nBadMates = 0; @@ -118,10 +117,10 @@ final class SampleStratification extends AbstractStratification { * {@inheritDoc} */ @Override - public Iterable callableStatuses() { + public List callableStatuses() { final List output = new LinkedList(); - // get the tally of all the locus callable statuses + // get the sample statuses of all the Loci Metrics for (Metric locusStat : thresholds.locusMetricList) { final CallableStatus status = ((LocusMetric) locusStat).sampleStatus(this); if (status != null) { @@ -130,12 +129,7 @@ final class SampleStratification extends AbstractStratification { } // get the sample specific statitics statuses - for (Metric sampleStat : thresholds.sampleMetricList) { - final CallableStatus status = sampleStat.status(this); - if (status != null) { - output.add(status); - } - } + output.addAll(queryStatus(thresholds.sampleMetricList)); // special case, if there are no reads, then there is no sense reporting coverage gaps. if (output.contains(CallableStatus.NO_READS) && output.contains(CallableStatus.COVERAGE_GAPS)) @@ -159,4 +153,17 @@ final class SampleStratification extends AbstractStratification { read.setTemporaryAttribute("seen", true); } } + + public int getNLowCoveredLoci() { + return getCallableStatusCount(CallableStatus.LOW_COVERAGE); + } + + public int getNUncoveredLoci() { + return getCallableStatusCount(CallableStatus.COVERAGE_GAPS); + } + + private int getCallableStatusCount(CallableStatus status) { + final Integer x = getStatusTally().get(status); + return x == null ? 0 : x; + } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/ThresHolder.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/ThresHolder.java index b0c999460..a6cbc1da3 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/ThresHolder.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/ThresHolder.java @@ -47,7 +47,9 @@ package org.broadinstitute.sting.gatk.walkers.diagnostics.diagnosetargets; import org.broadinstitute.sting.commandline.Argument; +import org.broadinstitute.sting.commandline.Output; +import java.io.PrintStream; import java.util.LinkedList; import java.util.List; @@ -114,6 +116,9 @@ final class ThresHolder { @Argument(fullName = "quality_status_threshold", shortName = "stQ", doc = "The proportion of the loci needed for calling POOR_QUALITY", required = false) public double qualityStatusThreshold = 0.50; + @Output(fullName = "missing_intervals", shortName = "missing", defaultToStdout = false, doc ="Produces a file with the intervals that don't pass filters", required = false) + public PrintStream missingTargets = null; + public final List locusMetricList = new LinkedList(); public final List sampleMetricList = new LinkedList(); public final List intervalMetricList = new LinkedList(); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/Metrics.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/Metrics.java new file mode 100644 index 000000000..5e3da5f4f --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/Metrics.java @@ -0,0 +1,110 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.diagnostics.missing; + +/** + * Short one line description of the walker. + *

+ *

+ * [Long description of the walker] + *

+ *

+ *

+ *

Input

+ *

+ * [Description of the Input] + *

+ *

+ *

Output

+ *

+ * [Description of the Output] + *

+ *

+ *

Examples

+ *
+ *    java
+ *      -jar GenomeAnalysisTK.jar
+ *      -T [walker name]
+ *  
+ * + * @author Mauricio Carneiro + * @since 5/1/13 + */ +final class Metrics { + private double gccontent; + private double baseQual; + private double mapQual; + private int reads; + private int refs; + + void reads(int reads) {this.reads = reads;} + void refs(int refs) {this.refs = refs;} + + void gccontent(double gccontent) {this.gccontent = gccontent;} + void baseQual(double baseQual) {this.baseQual = baseQual;} + void mapQual(double mapQual) {this.mapQual = mapQual;} + + double gccontent() {return refs > 0 ? gccontent/refs : 0.0;} + double baseQual() {return reads > 0 ? baseQual/reads : 0.0;} + double mapQual() {return reads > 0 ? mapQual/reads : 0.0;} + + /** + * Combines two metrics + * + * @param value the other metric to combine + * @return itself, for simple reduce + */ + public Metrics combine(Metrics value) { + this.gccontent += value.gccontent; + this.baseQual += value.baseQual; + this.mapQual += value.mapQual; + this.reads += value.reads; + this.refs += value.refs; + + return this; + } +} diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java new file mode 100644 index 000000000..62716d6d2 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java @@ -0,0 +1,226 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.diagnostics.missing; + +import org.broadinstitute.sting.commandline.Argument; +import org.broadinstitute.sting.commandline.Output; +import org.broadinstitute.sting.gatk.CommandLineGATK; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.report.GATKReport; +import org.broadinstitute.sting.gatk.walkers.By; +import org.broadinstitute.sting.gatk.walkers.DataSource; +import org.broadinstitute.sting.gatk.walkers.LocusWalker; +import org.broadinstitute.sting.gatk.walkers.NanoSchedulable; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.GenomeLocSortedSet; +import org.broadinstitute.sting.utils.collections.Pair; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.help.DocumentedGATKFeature; +import org.broadinstitute.sting.utils.help.HelpConstants; +import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; +import org.broadinstitute.sting.utils.text.XReadLines; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.PrintStream; +import java.util.List; + +/** + * Walks along reference and calculates a few metrics for each interval. + * + * Metrics: + *
    + *
  • Average Base Quality
  • + *
  • Average Mapping Quality
  • + *
  • GC Content
  • + *
  • Position in the target
  • + *
  • Coding Sequence / Intron
  • + *
  • Length of the uncovered area
  • + *
+ * + *

Input

+ *

+ * A reference file + *

+ * + *

Output

+ *

+ * GC content calculations per interval. + *

+ * + *

Example

+ *
+ * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ *   -T QualifyMissingIntervals \
+ *   -R ref.fasta \
+ *   -o output.grp \
+ *   -L input.intervals \
+ *   -cds cds.intervals \
+ *   -targets targets.intervals
+ * 
+ * + */ +@DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} ) +@By(DataSource.REFERENCE) +public final class QualifyMissingIntervals extends LocusWalker implements NanoSchedulable { + @Output + protected PrintStream out; + + @Argument(shortName = "targets", required = true) + public File targetsFile; + + @Argument(shortName = "cds", required = false) + public File cdsFile; + + GATKReport simpleReport; + GenomeLocSortedSet target; + GenomeLocSortedSet cds; + + public boolean isReduceByInterval() { + return true; + } + + public void initialize() { + simpleReport = GATKReport.newSimpleReport("QualifyMissingIntervals", "IN", "GC", "BQ", "MQ", "TP", "CD", "LN"); + final GenomeLocParser parser = getToolkit().getGenomeLocParser(); + target = new GenomeLocSortedSet(parser); + cds = new GenomeLocSortedSet(parser); + parseFile(targetsFile, target, parser); + parseFile(cdsFile, cds, parser); + } + + public Metrics reduceInit() { + return new Metrics(); + } + + public Metrics map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + if (tracker == null) + return null; + + final Metrics metrics = new Metrics(); + final byte baseIndex = ref.getBase(); + final ReadBackedPileup pileup = context.getBasePileup(); + final int nBases = pileup.getNumberOfElements(); + + double baseQual = 0.0; + for (byte qual : pileup.getQuals()) { + baseQual += qual; + } + double mapQual = 0.0; + for (byte qual : pileup.getMappingQuals()) { + mapQual += qual; + } + + metrics.baseQual(baseQual); + metrics.mapQual(mapQual); + metrics.gccontent(baseIndex == 'C' || baseIndex == 'G' ? 1.0 : 0.0); + metrics.reads(nBases); + metrics.refs(1); + + return metrics; + } + + @Override + public Metrics reduce(Metrics value, Metrics sum) { + return sum.combine(value); + } + + public void onTraversalDone(List> results) { + for (Pair r : results) { + GenomeLoc interval = r.getFirst(); + Metrics metrics = r.getSecond(); + simpleReport.addRow( + interval.toString(), + metrics.gccontent(), + metrics.baseQual(), + metrics.mapQual(), + getPositionInTarget(interval), + cds.overlaps(interval), + interval.size() + ); + } + simpleReport.print(out); + out.close(); + } + + private static GenomeLoc parseInterval(String s, GenomeLocParser parser) { + if (s.isEmpty()) { + return null; + } + String[] first = s.split(":"); + if (first.length == 2) { + String[] second = first[1].split("\\-"); + return parser.createGenomeLoc(first[0], Integer.decode(second[0]), Integer.decode(second[1])); + } else { + throw new UserException.BadInput("Interval doesn't parse correctly: " + s); + } + } + + private void parseFile(File file, GenomeLocSortedSet set, GenomeLocParser parser) { + try { + for (String s : new XReadLines(file) ) { + GenomeLoc interval = parseInterval(s, parser); + if (interval != null) + set.add(interval, true); + } + } catch (FileNotFoundException e) { + e.printStackTrace(); + } + } + + private int getPositionInTarget(GenomeLoc interval) { + final List hits = target.getOverlapping(interval); + int result = 0; + for (GenomeLoc hit : hits) { + result = interval.getStart() - hit.getStart(); // if there are multiple hits, we'll get the last one. + } + return result; + } +} diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java index c6e9ea379..0f3f7739d 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java @@ -76,7 +76,8 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood private List alleleList = new ArrayList(); - protected IndelGenotypeLikelihoodsCalculationModel(UnifiedArgumentCollection UAC, Logger logger) { + protected IndelGenotypeLikelihoodsCalculationModel(final UnifiedArgumentCollection UAC, + final Logger logger) { super(UAC, logger); pairModel = new PairHMMIndelErrorModel(UAC.INDEL_GAP_OPEN_PENALTY, UAC.INDEL_GAP_CONTINUATION_PENALTY, UAC.OUTPUT_DEBUG_INDEL_INFO, UAC.pairHMM); @@ -85,10 +86,11 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood ignoreSNPAllelesWhenGenotypingIndels = UAC.IGNORE_SNP_ALLELES; } - protected static List computeConsensusAlleles(ReferenceContext ref, - Map contexts, - AlignmentContextUtils.ReadOrientation contextType, - GenomeLocParser locParser, UnifiedArgumentCollection UAC) { + protected static List computeConsensusAlleles(final ReferenceContext ref, + final Map contexts, + final AlignmentContextUtils.ReadOrientation contextType, + final GenomeLocParser locParser, + final UnifiedArgumentCollection UAC) { ConsensusAlleleCounter counter = new ConsensusAlleleCounter(locParser, true, UAC.MIN_INDEL_COUNT_FOR_GENOTYPING, UAC.MIN_INDEL_FRACTION_PER_SAMPLE); return counter.computeConsensusAlleles(ref, contexts, contextType); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java index ce5f94478..360f88e51 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java @@ -147,9 +147,17 @@ public class SNPGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoodsC // if we only want variants, then we don't need to calculate genotype likelihoods if ( UAC.OutputMode == UnifiedGenotyperEngine.OUTPUT_MODE.EMIT_VARIANTS_ONLY ) return builder.make(); + // if user requires all PLs at all sites, add all possible alt alleles + else if (UAC.annotateAllSitesWithPLs) { + for ( final byte base : BaseUtils.BASES ) { + if ( base != refBase ) + alleles.add(Allele.create(base)); + } + } - // otherwise, choose any alternate allele (it doesn't really matter) - alleles.add(Allele.create(BaseUtils.baseIndexToSimpleBase(indexOfRefBase == 0 ? 1 : 0))); + else + // otherwise, choose any alternate allele (it doesn't really matter) + alleles.add(Allele.create(BaseUtils.baseIndexToSimpleBase(indexOfRefBase == 0 ? 1 : 0))); } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java index e346b10b7..f156468cc 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java @@ -52,6 +52,9 @@ import org.broadinstitute.sting.utils.pairhmm.PairHMM; import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; import org.broadinstitute.variant.variantcontext.VariantContext; +import java.util.Collections; +import java.util.List; + public class UnifiedArgumentCollection extends StandardCallerArgumentCollection { @Argument(fullName = "genotype_likelihoods_model", shortName = "glm", doc = "Genotype likelihoods calculation model to employ -- SNP is the default option, while INDEL is also available for calling indels and BOTH is available for calling both together", required = false) @@ -82,7 +85,7 @@ public class UnifiedArgumentCollection extends StandardCallerArgumentCollection * The PairHMM implementation to use for -glm INDEL genotype likelihood calculations. The various implementations balance a tradeoff of accuracy and runtime. */ @Argument(fullName = "pair_hmm_implementation", shortName = "pairHMM", doc = "The PairHMM implementation to use for -glm INDEL genotype likelihood calculations", required = false) - public PairHMM.HMM_IMPLEMENTATION pairHMM = PairHMM.HMM_IMPLEMENTATION.ORIGINAL; + public PairHMM.HMM_IMPLEMENTATION pairHMM = PairHMM.HMM_IMPLEMENTATION.LOGLESS_CACHING; /** * The minimum confidence needed in a given base for it to be used in variant calling. Note that the base quality of a base @@ -95,6 +98,18 @@ public class UnifiedArgumentCollection extends StandardCallerArgumentCollection @Argument(fullName = "max_deletion_fraction", shortName = "deletions", doc = "Maximum fraction of reads with deletions spanning this locus for it to be callable [to disable, set to < 0 or > 1; default:0.05]", required = false) public Double MAX_DELETION_FRACTION = 0.05; + /** + * Advanced, experimental argument: if SNP likelihood model is specified, and if EMIT_ALL_SITES output mode is set, when we set this argument then we will also emit PLs at all sites. + * This will give a measure of reference confidence and a measure of which alt alleles are more plausible (if any). + * WARNINGS: + * - This feature will inflate VCF file size considerably. + * - All SNP ALT alleles will be emitted with corresponding 10 PL values. + * - An error will be emitted if EMIT_ALL_SITES is not set, or if anything other than diploid SNP model is used + */ + @Advanced + @Argument(fullName = "allSitePLs", shortName = "allSitePLs", doc = "Annotate all sites with PLs", required = false) + public boolean annotateAllSitesWithPLs = false; + // indel-related arguments /** * A candidate indel is genotyped (and potentially called) if there are this number of reads with a consensus indel at a site. @@ -247,7 +262,7 @@ public class UnifiedArgumentCollection extends StandardCallerArgumentCollection this.EXCLUDE_FILTERED_REFERENCE_SITES = uac.EXCLUDE_FILTERED_REFERENCE_SITES; this.IGNORE_LANE_INFO = uac.IGNORE_LANE_INFO; this.pairHMM = uac.pairHMM; - + this.annotateAllSitesWithPLs = uac.annotateAllSitesWithPLs; // todo- arguments to remove this.IGNORE_SNP_ALLELES = uac.IGNORE_SNP_ALLELES; } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java index 3380efcc9..9f3368cf8 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java @@ -83,6 +83,9 @@ public class UnifiedGenotyperEngine { public static final double HUMAN_SNP_HETEROZYGOSITY = 1e-3; public static final double HUMAN_INDEL_HETEROZYGOSITY = 1e-4; + private static final int SNP_MODEL = 0; + private static final int INDEL_MODEL = 1; + public enum OUTPUT_MODE { /** produces calls only at variant sites */ EMIT_VARIANTS_ONLY, @@ -165,6 +168,13 @@ public class UnifiedGenotyperEngine { filter.add(LOW_QUAL_FILTER_NAME); determineGLModelsToUse(); + + // do argument checking + if (UAC.annotateAllSitesWithPLs) { + if (!modelsToUse.contains(GenotypeLikelihoodsCalculationModel.Model.SNP)) + throw new IllegalArgumentException("Invalid genotype likelihood model specification: Only diploid SNP model can be used in conjunction with option allSitePLs"); + + } } /** @@ -436,7 +446,8 @@ public class UnifiedGenotyperEngine { bestGuessIsRef = false; } // if in GENOTYPE_GIVEN_ALLELES mode, we still want to allow the use of a poor allele - else if ( UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) { + else if ( UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES || + UAC.annotateAllSitesWithPLs) { myAlleles.add(alternateAllele); alleleCountsofMLE.add(AFresult.getAlleleCountAtMLE(alternateAllele)); } @@ -446,7 +457,7 @@ public class UnifiedGenotyperEngine { // note the math.abs is necessary because -10 * 0.0 => -0.0 which isn't nice final double phredScaledConfidence = - Math.abs(! bestGuessIsRef || UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES + Math.abs(! bestGuessIsRef || UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES || UAC.annotateAllSitesWithPLs ? -10 * AFresult.getLog10PosteriorOfAFEq0() : -10 * AFresult.getLog10PosteriorOfAFGT0()); @@ -540,11 +551,6 @@ public class UnifiedGenotyperEngine { builder.attributes(attributes); VariantContext vcCall = builder.make(); - // if we are subsetting alleles (either because there were too many or because some were not polymorphic) - // then we may need to trim the alleles (because the original VariantContext may have had to pad at the end). - if ( myAlleles.size() != vc.getAlleles().size() && !limitedContext ) // limitedContext callers need to handle allele trimming on their own to keep their perReadAlleleLikelihoodMap alleles in sync - vcCall = GATKVariantContextUtils.reverseTrimAlleles(vcCall); - if ( annotationEngine != null && !limitedContext ) { // limitedContext callers need to handle annotations on their own by calling their own annotationEngine // Note: we want to use the *unfiltered* and *unBAQed* context for the annotations final ReadBackedPileup pileup = rawContext.getBasePileup(); @@ -553,6 +559,11 @@ public class UnifiedGenotyperEngine { vcCall = annotationEngine.annotateContext(tracker, refContext, stratifiedContexts, vcCall, perReadAlleleLikelihoodMap); } + // if we are subsetting alleles (either because there were too many or because some were not polymorphic) + // then we may need to trim the alleles (because the original VariantContext may have had to pad at the end). + if ( myAlleles.size() != vc.getAlleles().size() && !limitedContext ) // limitedContext callers need to handle allele trimming on their own to keep their perReadAlleleLikelihoodMap alleles in sync + vcCall = GATKVariantContextUtils.reverseTrimAlleles(vcCall); + return new VariantCallContext(vcCall, confidentlyCalled(phredScaledConfidence, PoFGT0)); } @@ -693,13 +704,13 @@ public class UnifiedGenotyperEngine { } private void determineGLModelsToUse() { - String modelPrefix = ""; if ( !UAC.GLmodel.name().contains(GPSTRING) && UAC.samplePloidy != GATKVariantContextUtils.DEFAULT_PLOIDY ) modelPrefix = GPSTRING; - if ( UAC.GLmodel.name().toUpperCase().contains("BOTH") ) { - modelPrefix += UAC.GLmodel.name().toUpperCase().replaceAll("BOTH",""); + // GGA mode => must initialize both the SNP and indel models + if ( UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES || + UAC.GLmodel.name().toUpperCase().contains("BOTH") ) { modelsToUse.add(GenotypeLikelihoodsCalculationModel.Model.valueOf(modelPrefix+"SNP")); modelsToUse.add(GenotypeLikelihoodsCalculationModel.Model.valueOf(modelPrefix+"INDEL")); } @@ -712,31 +723,24 @@ public class UnifiedGenotyperEngine { private List getGLModelsToUse(final RefMetaDataTracker tracker, final ReferenceContext refContext, final AlignmentContext rawContext) { - if ( UAC.GenotypingMode != GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) return modelsToUse; + if ( modelsToUse.size() != 2 ) + throw new IllegalStateException("GGA mode assumes that we have initialized both the SNP and indel models but found " + modelsToUse); + // if we're genotyping given alleles then we need to choose the model corresponding to the variant type requested - final List GGAmodel = new ArrayList(1); final VariantContext vcInput = getVCFromAllelesRod(tracker, refContext, rawContext.getLocation(), false, logger, UAC.alleles); - if ( vcInput == null ) - return GGAmodel; // no work to be done - if ( vcInput.isSNP() ) { - // use the SNP model unless the user chose INDEL mode only - if ( modelsToUse.size() == 2 || modelsToUse.get(0).name().endsWith("SNP") ) - GGAmodel.add(modelsToUse.get(0)); + if ( vcInput == null ) { + return Collections.emptyList(); // no work to be done + } else if ( vcInput.isSNP() ) { + return Collections.singletonList(modelsToUse.get(SNP_MODEL)); + } else if ( vcInput.isIndel() || vcInput.isMixed() ) { + return Collections.singletonList(modelsToUse.get(INDEL_MODEL)); + } else { + return Collections.emptyList(); // No support for other types yet } - else if ( vcInput.isIndel() || vcInput.isMixed() ) { - // use the INDEL model unless the user chose SNP mode only - if ( modelsToUse.size() == 2 ) - GGAmodel.add(modelsToUse.get(1)); - else if ( modelsToUse.get(0).name().endsWith("INDEL") ) - GGAmodel.add(modelsToUse.get(0)); - } - // No support for other types yet - - return GGAmodel; } /** diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/afcalc/DiploidExactAFCalc.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/afcalc/DiploidExactAFCalc.java index 170b6e250..2ece18002 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/afcalc/DiploidExactAFCalc.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/afcalc/DiploidExactAFCalc.java @@ -106,7 +106,7 @@ public abstract class DiploidExactAFCalc extends ExactAFCalc { alleles.add(vc.getReference()); alleles.addAll(chooseMostLikelyAlternateAlleles(vc, getMaxAltAlleles())); builder.alleles(alleles); - builder.genotypes(GATKVariantContextUtils.subsetDiploidAlleles(vc, alleles, false)); + builder.genotypes(GATKVariantContextUtils.subsetDiploidAlleles(vc, alleles, GATKVariantContextUtils.GenotypeAssignmentMethod.SET_TO_NO_CALL)); return builder.make(); } else { return vc; @@ -352,6 +352,9 @@ public abstract class DiploidExactAFCalc extends ExactAFCalc { final List allelesToUse, final boolean assignGenotypes, final int ploidy) { - return GATKVariantContextUtils.subsetDiploidAlleles(vc, allelesToUse, assignGenotypes); + return allelesToUse.size() == 1 + ? GATKVariantContextUtils.subsetToRefOnly(vc, ploidy) + : GATKVariantContextUtils.subsetDiploidAlleles(vc, allelesToUse, + assignGenotypes ? GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN : GATKVariantContextUtils.GenotypeAssignmentMethod.SET_TO_NO_CALL); } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ActiveRegionTrimmer.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ActiveRegionTrimmer.java new file mode 100644 index 000000000..063e3b218 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ActiveRegionTrimmer.java @@ -0,0 +1,142 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller; + +import org.apache.log4j.Logger; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.activeregion.ActiveRegion; +import org.broadinstitute.variant.variantcontext.VariantContext; + +import java.util.LinkedList; +import java.util.List; +import java.util.TreeSet; + +/** + * Trim down an active region based on a set of variants found across the haplotypes within the region + * + * User: depristo + * Date: 4/27/13 + * Time: 2:10 PM + */ +class ActiveRegionTrimmer { + private final static Logger logger = Logger.getLogger(ActiveRegionTrimmer.class); + private final boolean logTrimming; + private final int snpPadding, nonSnpPadding, maxDistanceInExtensionForGenotyping; + private final GenomeLocParser parser; + + /** + * Create a new ActiveRegionTrimmer + * + * @param logTrimming should we log our trimming events? + * @param snpPadding how much bp context should we ensure around snps? + * @param nonSnpPadding how much bp context should we ensure around anything not a snp? + * @param maxDistanceInExtensionForGenotyping the max extent we are will to go into the extended region of the + * origin active region in order to properly genotype events in the + * non-extended active region? + * @param parser a genome loc parser so we can create genome locs + */ + ActiveRegionTrimmer(boolean logTrimming, int snpPadding, int nonSnpPadding, int maxDistanceInExtensionForGenotyping, GenomeLocParser parser) { + if ( snpPadding < 0 ) throw new IllegalArgumentException("snpPadding must be >= 0 but got " + snpPadding); + if ( nonSnpPadding < 0 ) throw new IllegalArgumentException("nonSnpPadding must be >= 0 but got " + nonSnpPadding); + if ( maxDistanceInExtensionForGenotyping < 0 ) throw new IllegalArgumentException("maxDistanceInExtensionForGenotyping must be >= 0 but got " + maxDistanceInExtensionForGenotyping); + if ( parser == null ) throw new IllegalArgumentException("parser cannot be null"); + + this.logTrimming = logTrimming; + this.snpPadding = snpPadding; + this.nonSnpPadding = nonSnpPadding; + this.maxDistanceInExtensionForGenotyping = maxDistanceInExtensionForGenotyping; + this.parser = parser; + } + + /** + * Trim down the active region to a region large enough to properly genotype the events found within the active + * region span, excluding all variants that only occur within its extended span. + * + * This function merely creates the region, but it doesn't populate the reads back into the region. + * + * @param region our full active region + * @param allVariantsWithinExtendedRegion all of the variants found in the entire region, sorted by their start position + * @return a new ActiveRegion trimmed down to just what's needed for genotyping, or null if we couldn't do this successfully + */ + public ActiveRegion trimRegion(final ActiveRegion region, final TreeSet allVariantsWithinExtendedRegion) { + if ( allVariantsWithinExtendedRegion.isEmpty() ) // no variants, so just return the current region + return null; + + final List withinActiveRegion = new LinkedList(); + int pad = snpPadding; + GenomeLoc trimLoc = null; + for ( final VariantContext vc : allVariantsWithinExtendedRegion ) { + final GenomeLoc vcLoc = parser.createGenomeLoc(vc); + if ( region.getLocation().overlapsP(vcLoc) ) { + if ( ! vc.isSNP() ) // if anything isn't a SNP use the bigger padding + pad = nonSnpPadding; + trimLoc = trimLoc == null ? vcLoc : trimLoc.endpointSpan(vcLoc); + withinActiveRegion.add(vc); + } + } + + // we don't actually have anything in the region after removing variants that don't overlap the region's full location + if ( trimLoc == null ) return null; + + final GenomeLoc maxSpan = parser.createPaddedGenomeLoc(region.getLocation(), maxDistanceInExtensionForGenotyping); + final GenomeLoc idealSpan = parser.createPaddedGenomeLoc(trimLoc, pad); + final GenomeLoc finalSpan = maxSpan.intersect(idealSpan); + + final ActiveRegion trimmedRegion = region.trim(finalSpan); + if ( logTrimming ) { + logger.info("events : " + withinActiveRegion); + logger.info("trimLoc : " + trimLoc); + logger.info("pad : " + pad); + logger.info("idealSpan : " + idealSpan); + logger.info("maxSpan : " + maxSpan); + logger.info("finalSpan : " + finalSpan); + logger.info("regionSpan : " + trimmedRegion.getExtendedLoc() + " size is " + trimmedRegion.getExtendedLoc().size()); + } + return trimmedRegion; + } +} diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java index 12a4841bf..d876a403b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java @@ -46,102 +46,55 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; -import com.google.java.contract.Ensures; import com.google.java.contract.Requires; -import net.sf.samtools.Cigar; -import net.sf.samtools.CigarElement; -import net.sf.samtools.CigarOperator; -import org.apache.commons.lang.ArrayUtils; import org.apache.log4j.Logger; -import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; -import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.DeBruijnGraph; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.SeqGraph; import org.broadinstitute.sting.utils.MathUtils; -import org.broadinstitute.sting.utils.smithwaterman.SWPairwiseAlignment; -import org.broadinstitute.sting.utils.activeregion.ActiveRegion; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.haplotype.Haplotype; -import org.broadinstitute.sting.utils.sam.AlignmentUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.sam.ReadUtils; -import org.broadinstitute.sting.utils.smithwaterman.SWParameterSet; -import org.broadinstitute.variant.variantcontext.Allele; -import org.broadinstitute.variant.variantcontext.VariantContext; import java.io.File; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; /** - * Created by IntelliJ IDEA. + * DeBruijn assembler for the HaplotypeCaller + * * User: ebanks, rpoplin * Date: Mar 14, 2011 */ - public class DeBruijnAssembler extends LocalAssemblyEngine { private final static Logger logger = Logger.getLogger(DeBruijnAssembler.class); - private static final int KMER_OVERLAP = 5; // the additional size of a valid chunk of sequence, used to string together k-mers - // TODO -- this number is very low, and limits our ability to explore low-frequency variants. It should // TODO -- be increased to a large number of eliminated altogether when moving to the bubble caller where // TODO -- we are no longer considering a combinatorial number of haplotypes as the number of bubbles increases - private static final int NUM_BEST_PATHS_PER_KMER_GRAPH = 25; + private final static int NUM_PATHS_PER_GRAPH = 25; + private static final int KMER_OVERLAP = 5; // the additional size of a valid chunk of sequence, used to string together k-mers private static final int GRAPH_KMER_STEP = 6; + private static final int GGA_MODE_ARTIFICIAL_COUNTS = 1000; - private final boolean debug; - private final boolean debugGraphTransformations; private final int minKmer; - private final boolean allowCyclesInKmerGraphToGeneratePaths; - private final int onlyBuildKmersOfThisSizeWhenDebuggingGraphAlgorithms; - protected DeBruijnAssembler() { - this(false, -1, 11, false); + this(25, -1); } - public DeBruijnAssembler(final boolean debug, - final int debugGraphTransformations, - final int minKmer, - final boolean allowCyclesInKmerGraphToGeneratePaths) { - super(); - this.debug = debug; - this.debugGraphTransformations = debugGraphTransformations > 0; - this.onlyBuildKmersOfThisSizeWhenDebuggingGraphAlgorithms = debugGraphTransformations; + public DeBruijnAssembler(final int minKmer, final int onlyBuildKmersOfThisSizeWhenDebuggingGraphAlgorithms) { + super(NUM_PATHS_PER_GRAPH); this.minKmer = minKmer; - this.allowCyclesInKmerGraphToGeneratePaths = allowCyclesInKmerGraphToGeneratePaths; + this.onlyBuildKmersOfThisSizeWhenDebuggingGraphAlgorithms = onlyBuildKmersOfThisSizeWhenDebuggingGraphAlgorithms; } - /** - * Main entry point into the assembly engine. Build a set of deBruijn graphs out of the provided reference sequence and list of reads - * @param activeRegion ActiveRegion object holding the reads which are to be used during assembly - * @param refHaplotype reference haplotype object - * @param fullReferenceWithPadding byte array holding the reference sequence with padding - * @param refLoc GenomeLoc object corresponding to the reference sequence with padding - * @param activeAllelesToGenotype the alleles to inject into the haplotypes during GGA mode - * @return a non-empty list of all the haplotypes that are produced during assembly - */ - @Ensures({"result.contains(refHaplotype)"}) - public List runLocalAssembly( final ActiveRegion activeRegion, final Haplotype refHaplotype, final byte[] fullReferenceWithPadding, final GenomeLoc refLoc, final List activeAllelesToGenotype ) { - if( activeRegion == null ) { throw new IllegalArgumentException("Assembly engine cannot be used with a null ActiveRegion."); } - if( refHaplotype == null ) { throw new IllegalArgumentException("Reference haplotype cannot be null."); } - if( fullReferenceWithPadding.length != refLoc.size() ) { throw new IllegalArgumentException("Reference bases and reference loc must be the same size."); } - if( pruneFactor < 0 ) { throw new IllegalArgumentException("Pruning factor cannot be negative"); } - - // create the graphs - final List graphs = createDeBruijnGraphs( activeRegion.getReads(), refHaplotype ); - - // print the graphs if the appropriate debug option has been turned on - if( graphWriter != null ) { - printGraphs(graphs); - } - - // find the best paths in the graphs and return them as haplotypes - return findBestPaths( graphs, refHaplotype, fullReferenceWithPadding, refLoc, activeAllelesToGenotype, activeRegion.getExtendedLoc() ); - } - - @Requires({"reads != null", "refHaplotype != null"}) - protected List createDeBruijnGraphs( final List reads, final Haplotype refHaplotype ) { - final List graphs = new LinkedList(); + @Override + protected List assemble(final List reads, final Haplotype refHaplotype, final List activeAlleleHaplotypes ) { + final List graphs = new LinkedList<>(); final int maxKmer = ReadUtils.getMaxReadLength(reads) - KMER_OVERLAP - 1; if( maxKmer < minKmer) { @@ -154,7 +107,7 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { continue; if ( debug ) logger.info("Creating de Bruijn graph for " + kmer + " kmer using " + reads.size() + " reads"); - DeBruijnGraph graph = createGraphFromSequences( reads, kmer, refHaplotype); + DeBruijnGraph graph = createGraphFromSequences(reads, kmer, refHaplotype, activeAlleleHaplotypes); if( graph != null ) { // graphs that fail during creation ( for example, because there are cycles in the reference graph ) will show up here as a null graph object // do a series of steps to clean up the raw assembly graph to make it analysis-ready if ( debugGraphTransformations ) graph.printGraph(new File("unpruned.dot"), pruneFactor); @@ -165,10 +118,9 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { " future subsystem will actually go and error correct the reads"); } - final SeqGraph seqGraph = toSeqGraph(graph); + final SeqGraph seqGraph = cleanupSeqGraph(graph.convertToSequenceGraph()); if ( seqGraph != null ) { // if the graph contains interesting variation from the reference - sanityCheckReferenceGraph(seqGraph, refHaplotype); graphs.add(seqGraph); if ( debugGraphTransformations ) // we only want to use one graph size @@ -181,71 +133,8 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { return graphs; } - private SeqGraph toSeqGraph(final DeBruijnGraph deBruijnGraph) { - final SeqGraph seqGraph = deBruijnGraph.convertToSequenceGraph(); - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.1.dot"), pruneFactor); - - // TODO -- we need to come up with a consistent pruning algorithm. The current pruning algorithm - // TODO -- works well but it doesn't differentiate between an isolated chain that doesn't connect - // TODO -- to anything from one that's actually has good support along the chain but just happens - // TODO -- to have a connection in the middle that has weight of < pruneFactor. Ultimately - // TODO -- the pruning algorithm really should be an error correction algorithm that knows more - // TODO -- about the structure of the data and can differentiate between an infrequent path but - // TODO -- without evidence against it (such as occurs when a region is hard to get any reads through) - // TODO -- from a error with lots of weight going along another similar path - // the very first thing we need to do is zip up the graph, or pruneGraph will be too aggressive - seqGraph.zipLinearChains(); - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.2.zipped.dot"), pruneFactor); - - // now go through and prune the graph, removing vertices no longer connected to the reference chain - // IMPORTANT: pruning must occur before we call simplifyGraph, as simplifyGraph adds 0 weight - // edges to maintain graph connectivity. - seqGraph.pruneGraph(pruneFactor); - seqGraph.removeVerticesNotConnectedToRefRegardlessOfEdgeDirection(); - - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.3.pruned.dot"), pruneFactor); - seqGraph.simplifyGraph(); - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.4.merged.dot"), pruneFactor); - - // The graph has degenerated in some way, so the reference source and/or sink cannot be id'd. Can - // happen in cases where for example the reference somehow manages to acquire a cycle, or - // where the entire assembly collapses back into the reference sequence. - if ( seqGraph.getReferenceSourceVertex() == null || seqGraph.getReferenceSinkVertex() == null ) - return null; - - seqGraph.removePathsNotConnectedToRef(); - seqGraph.simplifyGraph(); - if ( seqGraph.vertexSet().size() == 1 ) { - // we've perfectly assembled into a single reference haplotype, add a empty seq vertex to stop - // the code from blowing up. - // TODO -- ref properties should really be on the vertices, not the graph itself - final SeqVertex complete = seqGraph.vertexSet().iterator().next(); - final SeqVertex dummy = new SeqVertex(""); - seqGraph.addVertex(dummy); - seqGraph.addEdge(complete, dummy, new BaseEdge(true, 0)); - } - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.5.final.dot"), pruneFactor); - - return seqGraph; - } - - protected void sanityCheckReferenceGraph(final BaseGraph graph, final Haplotype refHaplotype) { - if( graph.getReferenceSourceVertex() == null ) { - throw new IllegalStateException("All reference graphs must have a reference source vertex."); - } - if( graph.getReferenceSinkVertex() == null ) { - throw new IllegalStateException("All reference graphs must have a reference sink vertex."); - } - if( !Arrays.equals(graph.getReferenceBytes(graph.getReferenceSourceVertex(), graph.getReferenceSinkVertex(), true, true), refHaplotype.getBases()) ) { - throw new IllegalStateException("Mismatch between the reference haplotype and the reference assembly graph path." + - " graph = " + new String(graph.getReferenceBytes(graph.getReferenceSourceVertex(), graph.getReferenceSinkVertex(), true, true)) + - " haplotype = " + new String(refHaplotype.getBases()) - ); - } - } - @Requires({"reads != null", "kmerLength > 0", "refHaplotype != null"}) - protected DeBruijnGraph createGraphFromSequences( final List reads, final int kmerLength, final Haplotype refHaplotype ) { + protected DeBruijnGraph createGraphFromSequences( final List reads, final int kmerLength, final Haplotype refHaplotype, final List activeAlleleHaplotypes ) { final DeBruijnGraph graph = new DeBruijnGraph(kmerLength); final DeBruijnGraphBuilder builder = new DeBruijnGraphBuilder(graph); @@ -254,6 +143,11 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { // something went wrong, so abort right now with a null graph return null; + // add the artificial GGA haplotypes to the graph + if ( ! addGGAKmersToGraph(builder, activeAlleleHaplotypes) ) + // something went wrong, so abort right now with a null graph + return null; + // now go through the graph already seeded with the reference sequence and add the read kmers to it if ( ! addReadKmersToGraph(builder, reads) ) // some problem was detected adding the reads to the graph, return null to indicate we failed @@ -263,6 +157,28 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { return graph; } + /** + * Add the high-quality kmers from the artificial GGA haplotypes to the graph + * + * @param builder a debruijn graph builder to add the read kmers to + * @param activeAlleleHaplotypes a list of haplotypes to add to the graph for GGA mode + * @return true if we successfully added the read kmers to the graph without corrupting it in some way + */ + protected boolean addGGAKmersToGraph(final DeBruijnGraphBuilder builder, final List activeAlleleHaplotypes) { + + final int kmerLength = builder.getKmerSize(); + + for( final Haplotype haplotype : activeAlleleHaplotypes ) { + final int end = haplotype.length() - kmerLength; + for( int start = 0; start < end; start++ ) { + builder.addKmerPairFromSeqToGraph( haplotype.getBases(), start, GGA_MODE_ARTIFICIAL_COUNTS ); + } + } + + // always returns true now, but it's possible that we'd add kmers and decide we don't like the graph in some way + return true; + } + /** * Add the high-quality kmers from the reads to the graph * @@ -344,290 +260,10 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { return true; } - protected void printGraphs(final List graphs) { - final int writeFirstGraphWithSizeSmallerThan = 50; - - graphWriter.println("digraph assemblyGraphs {"); - for( final SeqGraph graph : graphs ) { - if ( debugGraphTransformations && graph.getKmerSize() >= writeFirstGraphWithSizeSmallerThan ) { - logger.info("Skipping writing of graph with kmersize " + graph.getKmerSize()); - continue; - } - - graph.printGraph(graphWriter, false, pruneFactor); - - if ( debugGraphTransformations ) - break; - } - - graphWriter.println("}"); - } - - @Requires({"refWithPadding.length > refHaplotype.getBases().length", "refLoc.containsP(activeRegionWindow)"}) - @Ensures({"result.contains(refHaplotype)"}) - private List findBestPaths( final List graphs, final Haplotype refHaplotype, final byte[] refWithPadding, final GenomeLoc refLoc, final List activeAllelesToGenotype, final GenomeLoc activeRegionWindow ) { - - // add the reference haplotype separately from all the others to ensure that it is present in the list of haplotypes - // TODO -- this use of an array with contains lower may be a performance problem returning in an O(N^2) algorithm - final List returnHaplotypes = new ArrayList(); - refHaplotype.setAlignmentStartHapwrtRef(activeRegionWindow.getStart() - refLoc.getStart()); - final Cigar c = new Cigar(); - c.add(new CigarElement(refHaplotype.getBases().length, CigarOperator.M)); - refHaplotype.setCigar(c); - returnHaplotypes.add( refHaplotype ); - - final int activeRegionStart = refHaplotype.getAlignmentStartHapwrtRef(); - final int activeRegionStop = refHaplotype.getAlignmentStartHapwrtRef() + refHaplotype.getCigar().getReferenceLength(); - - // for GGA mode, add the desired allele into the haplotype - for( final VariantContext compVC : activeAllelesToGenotype ) { - for( final Allele compAltAllele : compVC.getAlternateAlleles() ) { - final Haplotype insertedRefHaplotype = refHaplotype.insertAllele(compVC.getReference(), compAltAllele, activeRegionStart + compVC.getStart() - activeRegionWindow.getStart(), compVC.getStart()); - addHaplotypeForGGA( insertedRefHaplotype, refWithPadding, returnHaplotypes, activeRegionStart, activeRegionStop, true ); - } - } - - for( final SeqGraph graph : graphs ) { - final SeqVertex source = graph.getReferenceSourceVertex(); - final SeqVertex sink = graph.getReferenceSinkVertex(); - if ( source == null || sink == null ) throw new IllegalArgumentException("Both source and sink cannot be null but got " + source + " and sink " + sink + " for graph "+ graph); - - final KBestPaths pathFinder = new KBestPaths(allowCyclesInKmerGraphToGeneratePaths); - for ( final Path path : pathFinder.getKBestPaths(graph, NUM_BEST_PATHS_PER_KMER_GRAPH, source, sink) ) { -// logger.info("Found path " + path); - Haplotype h = new Haplotype( path.getBases() ); - if( !returnHaplotypes.contains(h) ) { - final Cigar cigar = path.calculateCigar(); - if( cigar.isEmpty() ) { - throw new IllegalStateException("Smith-Waterman alignment failure. Cigar = " + cigar + " with reference length " + cigar.getReferenceLength() + " but expecting reference length of " + refHaplotype.getCigar().getReferenceLength()); - } else if ( pathIsTooDivergentFromReference(cigar) || cigar.getReferenceLength() < 60 ) { // N cigar elements means that a bubble was too divergent from the reference so skip over this path - continue; - } else if( cigar.getReferenceLength() != refHaplotype.getCigar().getReferenceLength() ) { // SW failure - throw new IllegalStateException("Smith-Waterman alignment failure. Cigar = " + cigar + " with reference length " + cigar.getReferenceLength() + " but expecting reference length of " + refHaplotype.getCigar().getReferenceLength()); - } - h.setCigar(cigar); - - // extend partial haplotypes which are anchored in the reference to include the full active region - h = extendPartialHaplotype(h, activeRegionStart, refWithPadding); - final Cigar leftAlignedCigar = leftAlignCigarSequentially(AlignmentUtils.consolidateCigar(h.getCigar()), refWithPadding, h.getBases(), activeRegionStart, 0); - if( leftAlignedCigar.getReferenceLength() != refHaplotype.getCigar().getReferenceLength() ) { // left alignment failure - continue; - } - if( !returnHaplotypes.contains(h) ) { - h.setAlignmentStartHapwrtRef(activeRegionStart); - h.setCigar(leftAlignedCigar); - h.setScore(path.getScore()); - returnHaplotypes.add(h); - - if ( debug ) - logger.info("Adding haplotype " + h.getCigar() + " from debruijn graph with kmer " + graph.getKmerSize()); - - // for GGA mode, add the desired allele into the haplotype if it isn't already present - if( !activeAllelesToGenotype.isEmpty() ) { - final Map eventMap = GenotypingEngine.generateVCsFromAlignment( h, refWithPadding, refLoc, "HCassembly" ); // BUGBUG: need to put this function in a shared place - for( final VariantContext compVC : activeAllelesToGenotype ) { // for GGA mode, add the desired allele into the haplotype if it isn't already present - final VariantContext vcOnHaplotype = eventMap.get(compVC.getStart()); - - // This if statement used to additionally have: - // "|| !vcOnHaplotype.hasSameAllelesAs(compVC)" - // but that can lead to problems downstream when e.g. you are injecting a 1bp deletion onto - // a haplotype that already contains a 1bp insertion (so practically it is reference but - // falls into the bin for the 1bp deletion because we keep track of the artificial alleles). - if( vcOnHaplotype == null ) { - for( final Allele compAltAllele : compVC.getAlternateAlleles() ) { - addHaplotypeForGGA( h.insertAllele(compVC.getReference(), compAltAllele, activeRegionStart + compVC.getStart() - activeRegionWindow.getStart(), compVC.getStart()), refWithPadding, returnHaplotypes, activeRegionStart, activeRegionStop, false ); - } - } - } - } - } - } - } - } - - // add genome locs to the haplotypes - for ( final Haplotype h : returnHaplotypes ) h.setGenomeLocation(activeRegionWindow); - - if ( returnHaplotypes.size() < returnHaplotypes.size() ) - logger.info("Found " + returnHaplotypes.size() + " candidate haplotypes of " + returnHaplotypes.size() + " possible combinations to evaluate every read against at " + refLoc); - - if( debug ) { - if( returnHaplotypes.size() > 1 ) { - logger.info("Found " + returnHaplotypes.size() + " candidate haplotypes of " + returnHaplotypes.size() + " possible combinations to evaluate every read against."); - } else { - logger.info("Found only the reference haplotype in the assembly graph."); - } - for( final Haplotype h : returnHaplotypes ) { - logger.info( h.toString() ); - logger.info( "> Cigar = " + h.getCigar() + " : " + h.getCigar().getReferenceLength() + " score " + h.getScore() ); - } - } - - return returnHaplotypes; - } - - /** - * Extend partial haplotypes which are anchored in the reference to include the full active region - * @param haplotype the haplotype to extend - * @param activeRegionStart the place where the active region starts in the ref byte array - * @param refWithPadding the full reference byte array with padding which encompasses the active region - * @return a haplotype fully extended to encompass the active region - */ - @Requires({"haplotype != null", "activeRegionStart >= 0", "refWithPadding != null", "refWithPadding.length > 0"}) - @Ensures({"result != null", "result.getCigar() != null"}) - private Haplotype extendPartialHaplotype( final Haplotype haplotype, final int activeRegionStart, final byte[] refWithPadding ) { - final Cigar cigar = haplotype.getCigar(); - final Cigar newCigar = new Cigar(); - byte[] newHaplotypeBases = haplotype.getBases(); - int refPos = activeRegionStart; - int hapPos = 0; - for( int iii = 0; iii < cigar.getCigarElements().size(); iii++ ) { - final CigarElement ce = cigar.getCigarElement(iii); - switch (ce.getOperator()) { - case M: - refPos += ce.getLength(); - hapPos += ce.getLength(); - newCigar.add(ce); - break; - case I: - hapPos += ce.getLength(); - newCigar.add(ce); - break; - case D: - if( iii == 0 || iii == cigar.getCigarElements().size() - 1 ) { - newHaplotypeBases = ArrayUtils.addAll( Arrays.copyOfRange(newHaplotypeBases, 0, hapPos), - ArrayUtils.addAll(Arrays.copyOfRange(refWithPadding, refPos, refPos + ce.getLength()), - Arrays.copyOfRange(newHaplotypeBases, hapPos, newHaplotypeBases.length))); - hapPos += ce.getLength(); - refPos += ce.getLength(); - newCigar.add(new CigarElement(ce.getLength(), CigarOperator.M)); - } else { - refPos += ce.getLength(); - newCigar.add(ce); - } - break; - default: - throw new IllegalStateException("Unsupported cigar operator detected: " + ce.getOperator()); - } - } - final Haplotype returnHaplotype = new Haplotype(newHaplotypeBases, haplotype.isReference()); - returnHaplotype.setCigar( newCigar ); - return returnHaplotype; - } - - /** - * We use CigarOperator.N as the signal that an incomplete or too divergent bubble was found during bubble traversal - * @param c the cigar to test - * @return true if we should skip over this path - */ - @Requires("c != null") - private boolean pathIsTooDivergentFromReference( final Cigar c ) { - for( final CigarElement ce : c.getCigarElements() ) { - if( ce.getOperator().equals(CigarOperator.N) ) { - return true; - } - } - return false; - } - - /** - * Left align the given cigar sequentially. This is needed because AlignmentUtils doesn't accept cigars with more than one indel in them. - * This is a target of future work to incorporate and generalize into AlignmentUtils for use by others. - * @param cigar the cigar to left align - * @param refSeq the reference byte array - * @param readSeq the read byte array - * @param refIndex 0-based alignment start position on ref - * @param readIndex 0-based alignment start position on read - * @return the left-aligned cigar - */ - @Ensures({"cigar != null", "refSeq != null", "readSeq != null", "refIndex >= 0", "readIndex >= 0"}) - protected Cigar leftAlignCigarSequentially(final Cigar cigar, final byte[] refSeq, final byte[] readSeq, int refIndex, int readIndex) { - final Cigar cigarToReturn = new Cigar(); - Cigar cigarToAlign = new Cigar(); - for (int i = 0; i < cigar.numCigarElements(); i++) { - final CigarElement ce = cigar.getCigarElement(i); - if (ce.getOperator() == CigarOperator.D || ce.getOperator() == CigarOperator.I) { - cigarToAlign.add(ce); - for( final CigarElement toAdd : AlignmentUtils.leftAlignIndel(cigarToAlign, refSeq, readSeq, refIndex, readIndex, false).getCigarElements() ) { - cigarToReturn.add(toAdd); - } - refIndex += cigarToAlign.getReferenceLength(); - readIndex += cigarToAlign.getReadLength(); - cigarToAlign = new Cigar(); - } else { - cigarToAlign.add(ce); - } - } - if( !cigarToAlign.isEmpty() ) { - for( final CigarElement toAdd : cigarToAlign.getCigarElements() ) { - cigarToReturn.add(toAdd); - } - } - return cigarToReturn; - } - - /** - * Take a haplotype which was generated by injecting an allele into a string of bases and run SW against the reference to determine the variants on the haplotype. - * Unfortunately since this haplotype didn't come from the assembly graph you can't straightforwardly use the bubble traversal algorithm to get this information. - * This is a target for future work as we rewrite the HaplotypeCaller to be more bubble-caller based. - * @param haplotype the candidate haplotype - * @param ref the reference bases to align against - * @param haplotypeList the current list of haplotypes - * @param activeRegionStart the start of the active region in the reference byte array - * @param activeRegionStop the stop of the active region in the reference byte array - * @param FORCE_INCLUSION_FOR_GGA_MODE if true will include in the list even if it already exists - * @return true if the candidate haplotype was successfully incorporated into the haplotype list - */ - @Requires({"ref != null", "ref.length >= activeRegionStop - activeRegionStart"}) - private boolean addHaplotypeForGGA( final Haplotype haplotype, final byte[] ref, final List haplotypeList, final int activeRegionStart, final int activeRegionStop, final boolean FORCE_INCLUSION_FOR_GGA_MODE ) { - if( haplotype == null ) { return false; } - - final SWPairwiseAlignment swConsensus = new SWPairwiseAlignment( ref, haplotype.getBases(), SWParameterSet.STANDARD_NGS ); - haplotype.setAlignmentStartHapwrtRef( swConsensus.getAlignmentStart2wrt1() ); - - if( swConsensus.getCigar().toString().contains("S") || swConsensus.getCigar().getReferenceLength() < 60 || swConsensus.getAlignmentStart2wrt1() < 0 ) { // protect against unhelpful haplotype alignments - return false; - } - - haplotype.setCigar( AlignmentUtils.leftAlignIndel(swConsensus.getCigar(), ref, haplotype.getBases(), swConsensus.getAlignmentStart2wrt1(), 0, true) ); - - final int hapStart = ReadUtils.getReadCoordinateForReferenceCoordinate(haplotype.getAlignmentStartHapwrtRef(), haplotype.getCigar(), activeRegionStart, ReadUtils.ClippingTail.LEFT_TAIL, true); - int hapStop = ReadUtils.getReadCoordinateForReferenceCoordinate( haplotype.getAlignmentStartHapwrtRef(), haplotype.getCigar(), activeRegionStop, ReadUtils.ClippingTail.RIGHT_TAIL, true ); - if( hapStop == ReadUtils.CLIPPING_GOAL_NOT_REACHED && activeRegionStop == haplotype.getAlignmentStartHapwrtRef() + haplotype.getCigar().getReferenceLength() ) { - hapStop = activeRegionStop; // contract for getReadCoordinateForReferenceCoordinate function says that if read ends at boundary then it is outside of the clipping goal - } - byte[] newHaplotypeBases; - // extend partial haplotypes to contain the full active region sequence - if( hapStart == ReadUtils.CLIPPING_GOAL_NOT_REACHED && hapStop == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) { - newHaplotypeBases = ArrayUtils.addAll( ArrayUtils.addAll( ArrayUtils.subarray(ref, activeRegionStart, swConsensus.getAlignmentStart2wrt1()), - haplotype.getBases()), - ArrayUtils.subarray(ref, swConsensus.getAlignmentStart2wrt1() + swConsensus.getCigar().getReferenceLength(), activeRegionStop) ); - } else if( hapStart == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) { - newHaplotypeBases = ArrayUtils.addAll( ArrayUtils.subarray(ref, activeRegionStart, swConsensus.getAlignmentStart2wrt1()), ArrayUtils.subarray(haplotype.getBases(), 0, hapStop) ); - } else if( hapStop == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) { - newHaplotypeBases = ArrayUtils.addAll( ArrayUtils.subarray(haplotype.getBases(), hapStart, haplotype.getBases().length), ArrayUtils.subarray(ref, swConsensus.getAlignmentStart2wrt1() + swConsensus.getCigar().getReferenceLength(), activeRegionStop) ); - } else { - newHaplotypeBases = ArrayUtils.subarray(haplotype.getBases(), hapStart, hapStop); - } - - final Haplotype h = new Haplotype( newHaplotypeBases ); - final SWPairwiseAlignment swConsensus2 = new SWPairwiseAlignment( ref, h.getBases(), SWParameterSet.STANDARD_NGS ); - - h.setAlignmentStartHapwrtRef( swConsensus2.getAlignmentStart2wrt1() ); - if ( haplotype.isArtificialHaplotype() ) { - h.setArtificialEvent(haplotype.getArtificialEvent()); - } - if( swConsensus2.getCigar().toString().contains("S") || swConsensus2.getCigar().getReferenceLength() != activeRegionStop - activeRegionStart || swConsensus2.getAlignmentStart2wrt1() < 0 ) { // protect against unhelpful haplotype alignments - return false; - } - - h.setCigar( AlignmentUtils.leftAlignIndel(swConsensus2.getCigar(), ref, h.getBases(), swConsensus2.getAlignmentStart2wrt1(), 0, true) ); - - if( FORCE_INCLUSION_FOR_GGA_MODE || !haplotypeList.contains(h) ) { - haplotypeList.add(h); - return true; - } else { - return false; - } + @Override + public String toString() { + return "DeBruijnAssembler{" + + "minKmer=" + minKmer + + '}'; } } \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java index 419ea378f..04173b64f 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java @@ -49,6 +49,7 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import com.google.java.contract.Ensures; import com.google.java.contract.Requires; import org.apache.log4j.Logger; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.annotator.VariantAnnotatorEngine; import org.broadinstitute.sting.gatk.walkers.genotyper.GenotypeLikelihoodsCalculationModel; import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedGenotyperEngine; @@ -71,7 +72,7 @@ public class GenotypingEngine { private final boolean DEBUG; private final boolean USE_FILTERED_READ_MAP_FOR_ANNOTATIONS; - private final static List noCall = new ArrayList(); // used to noCall all genotypes until the exact model is applied + private final static List noCall = new ArrayList<>(); // used to noCall all genotypes until the exact model is applied private final VariantAnnotatorEngine annotationEngine; private final MergeVariantsAcrossHaplotypes crossHaplotypeEventMerger; @@ -146,6 +147,7 @@ public class GenotypingEngine { final GenomeLoc refLoc, final GenomeLoc activeRegionWindow, final GenomeLocParser genomeLocParser, + final RefMetaDataTracker tracker, final List activeAllelesToGenotype ) { // sanity check input arguments if (UG_engine == null) throw new IllegalArgumentException("UG_Engine input can't be null, got "+UG_engine); @@ -162,8 +164,8 @@ public class GenotypingEngine { final TreeSet startPosKeySet = decomposeHaplotypesIntoVariantContexts(haplotypes, haplotypeReadMap, ref, refLoc, activeAllelesToGenotype); // Walk along each position in the key set and create each event to be outputted - final Set calledHaplotypes = new HashSet(); - final List returnCalls = new ArrayList(); + final Set calledHaplotypes = new HashSet<>(); + final List returnCalls = new ArrayList<>(); for( final int loc : startPosKeySet ) { if( loc >= activeRegionWindow.getStart() && loc <= activeRegionWindow.getStop() ) { // genotyping an event inside this active region final List eventsAtThisLoc = getVCsAtThisLocation(haplotypes, loc, activeAllelesToGenotype); @@ -183,7 +185,7 @@ public class GenotypingEngine { if( eventsAtThisLoc.size() != mergedVC.getAlternateAlleles().size() ) { throw new ReviewedStingException("Record size mismatch! Something went wrong in the merging of alleles."); } - final Map mergeMap = new LinkedHashMap(); + final Map mergeMap = new LinkedHashMap<>(); mergeMap.put(null, mergedVC.getReference()); // the reference event (null) --> the reference allele for(int iii = 0; iii < mergedVC.getAlternateAlleles().size(); iii++) { mergeMap.put(eventsAtThisLoc.get(iii), mergedVC.getAlternateAllele(iii)); // BUGBUG: This is assuming that the order of alleles is the same as the priority list given to simpleMerge function @@ -204,13 +206,12 @@ public class GenotypingEngine { convertHaplotypeReadMapToAlleleReadMap( haplotypeReadMap, alleleMapper, 0.0 ) ); final Map stratifiedReadMap = filterToOnlyOverlappingReads( genomeLocParser, alleleReadMap_annotations, perSampleFilteredReadList, call ); - VariantContext annotatedCall = call; - if( annotatedCall.getAlleles().size() != mergedVC.getAlleles().size() ) { // some alleles were removed so reverseTrimming might be necessary! + VariantContext annotatedCall = annotationEngine.annotateContextForActiveRegion(tracker, stratifiedReadMap, call); + + if( call.getAlleles().size() != mergedVC.getAlleles().size() ) { // some alleles were removed so reverseTrimming might be necessary! annotatedCall = GATKVariantContextUtils.reverseTrimAlleles(annotatedCall); } - annotatedCall = annotationEngine.annotateContext(stratifiedReadMap, annotatedCall); - // maintain the set of all called haplotypes for ( final Allele calledAllele : call.getAlleles() ) calledHaplotypes.addAll(alleleMapper.get(calledAllele)); @@ -244,7 +245,7 @@ public class GenotypingEngine { if ( in_GGA_mode ) startPosKeySet.clear(); - cleanUpSymbolicUnassembledEvents( haplotypes ); + //cleanUpSymbolicUnassembledEvents( haplotypes ); // We don't make symbolic alleles so this isn't needed currently if ( !in_GGA_mode ) { // run the event merger if we're not in GGA mode final boolean mergedAnything = crossHaplotypeEventMerger.merge(haplotypes, haplotypeReadMap, startPosKeySet, ref, refLoc); @@ -267,7 +268,7 @@ public class GenotypingEngine { * @return the list of the sources of vcs in the same order */ private List makePriorityList(final List vcs) { - final List priorityList = new LinkedList(); + final List priorityList = new LinkedList<>(); for ( final VariantContext vc : vcs ) priorityList.add(vc.getSource()); return priorityList; } @@ -276,7 +277,7 @@ public class GenotypingEngine { final int loc, final List activeAllelesToGenotype) { // the overlapping events to merge into a common reference view - final List eventsAtThisLoc = new ArrayList(); + final List eventsAtThisLoc = new ArrayList<>(); if( activeAllelesToGenotype.isEmpty() ) { for( final Haplotype h : haplotypes ) { @@ -292,7 +293,7 @@ public class GenotypingEngine { if( compVC.getStart() == loc ) { int alleleCount = 0; for( final Allele compAltAllele : compVC.getAlternateAlleles() ) { - List alleleSet = new ArrayList(2); + List alleleSet = new ArrayList<>(2); alleleSet.add(compVC.getReference()); alleleSet.add(compAltAllele); final String vcSourceName = "Comp" + compCount + "Allele" + alleleCount; @@ -348,7 +349,7 @@ public class GenotypingEngine { final Map> perSampleFilteredReadList, final VariantContext call ) { - final Map returnMap = new LinkedHashMap(); + final Map returnMap = new LinkedHashMap<>(); final GenomeLoc callLoc = parser.createGenomeLoc(call); for( final Map.Entry sample : perSampleReadMap.entrySet() ) { final PerReadAlleleLikelihoodMap likelihoodMap = new PerReadAlleleLikelihoodMap(); @@ -384,7 +385,7 @@ public class GenotypingEngine { // TODO - split into input haplotypes and output haplotypes as not to share I/O arguments @Requires("haplotypes != null") protected static void cleanUpSymbolicUnassembledEvents( final List haplotypes ) { - final List haplotypesToRemove = new ArrayList(); + final List haplotypesToRemove = new ArrayList<>(); for( final Haplotype h : haplotypes ) { for( final VariantContext vc : h.getEventMap().getVariantContexts() ) { if( vc.isSymbolic() ) { @@ -407,7 +408,7 @@ public class GenotypingEngine { final Map> alleleMapper, final double downsamplingFraction ) { - final Map alleleReadMap = new LinkedHashMap(); + final Map alleleReadMap = new LinkedHashMap<>(); for( final Map.Entry haplotypeReadMapEntry : haplotypeReadMap.entrySet() ) { // for each sample final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap = new PerReadAlleleLikelihoodMap(); for( final Map.Entry> alleleMapperEntry : alleleMapper.entrySet() ) { // for each output allele @@ -430,7 +431,7 @@ public class GenotypingEngine { } protected static Map> createAlleleMapper( final Map mergeMap, final Map> eventMap ) { - final Map> alleleMapper = new LinkedHashMap>(); + final Map> alleleMapper = new LinkedHashMap<>(); for( final Map.Entry entry : mergeMap.entrySet() ) { alleleMapper.put(entry.getValue(), eventMap.get(new Event(entry.getKey()))); } @@ -441,100 +442,33 @@ public class GenotypingEngine { @Ensures({"result.size() == eventsAtThisLoc.size() + 1"}) protected static Map> createEventMapper( final int loc, final List eventsAtThisLoc, final List haplotypes ) { - final Map> eventMapper = new LinkedHashMap>(eventsAtThisLoc.size()+1); - VariantContext refVC = eventsAtThisLoc.get(0); // the genome loc is the only safe thing to pull out of this VC because ref/alt pairs might change reference basis - eventMapper.put(new Event(null), new ArrayList()); + final Map> eventMapper = new LinkedHashMap<>(eventsAtThisLoc.size()+1); + final Event refEvent = new Event(null); + eventMapper.put(refEvent, new ArrayList()); for( final VariantContext vc : eventsAtThisLoc ) { eventMapper.put(new Event(vc), new ArrayList()); } - final List undeterminedHaplotypes = new ArrayList(haplotypes.size()); for( final Haplotype h : haplotypes ) { - if( h.isArtificialHaplotype() && loc == h.getArtificialAllelePosition() ) { - final List alleles = new ArrayList(2); - alleles.add(h.getArtificialRefAllele()); - alleles.add(h.getArtificialAltAllele()); - final Event artificialVC = new Event( (new VariantContextBuilder()).source("artificialHaplotype") - .alleles(alleles) - .loc(refVC.getChr(), refVC.getStart(), refVC.getStart() + h.getArtificialRefAllele().length() - 1).make() ); - if( eventMapper.containsKey(artificialVC) ) { - eventMapper.get(artificialVC).add(h); - } - } else if( h.getEventMap().get(loc) == null ) { // no event at this location so let's investigate later - undeterminedHaplotypes.add(h); + if( h.getEventMap().get(loc) == null ) { + eventMapper.get(refEvent).add(h); } else { - boolean haplotypeIsDetermined = false; for( final VariantContext vcAtThisLoc : eventsAtThisLoc ) { if( h.getEventMap().get(loc).hasSameAllelesAs(vcAtThisLoc) ) { eventMapper.get(new Event(vcAtThisLoc)).add(h); - haplotypeIsDetermined = true; break; } } - - if( !haplotypeIsDetermined ) - undeterminedHaplotypes.add(h); } } - for( final Haplotype h : undeterminedHaplotypes ) { - Event matchingEvent = new Event(null); - for( final Map.Entry> eventToTest : eventMapper.entrySet() ) { - // don't test against the reference allele - if( eventToTest.getKey().equals(new Event(null)) ) - continue; - - // only try to disambiguate for alleles that have had haplotypes previously assigned above - if( eventToTest.getValue().isEmpty() ) - continue; - - final Haplotype artificialHaplotype = eventToTest.getValue().get(0); - if( isSubSetOf(artificialHaplotype.getEventMap(), h.getEventMap(), true) ) { - matchingEvent = eventToTest.getKey(); - break; - } - } - - eventMapper.get(matchingEvent).add(h); - } - return eventMapper; } - protected static boolean isSubSetOf(final Map subset, final Map superset, final boolean resolveSupersetToSubset) { - - for ( final Map.Entry fromSubset : subset.entrySet() ) { - final VariantContext fromSuperset = superset.get(fromSubset.getKey()); - if ( fromSuperset == null ) - return false; - - List supersetAlleles = fromSuperset.getAlternateAlleles(); - if ( resolveSupersetToSubset ) - supersetAlleles = resolveAlternateAlleles(fromSubset.getValue().getReference(), fromSuperset.getReference(), supersetAlleles); - - if ( !supersetAlleles.contains(fromSubset.getValue().getAlternateAllele(0)) ) - return false; - } - - return true; - } - - private static List resolveAlternateAlleles(final Allele targetReference, final Allele actualReference, final List currentAlleles) { - if ( targetReference.length() <= actualReference.length() ) - return currentAlleles; - - final List newAlleles = new ArrayList(currentAlleles.size()); - final byte[] extraBases = Arrays.copyOfRange(targetReference.getBases(), actualReference.length(), targetReference.length()); - for ( final Allele a : currentAlleles ) { - newAlleles.add(Allele.extend(a, extraBases)); - } - return newAlleles; - } - @Ensures({"result.size() == haplotypeAllelesForSample.size()"}) protected static List findEventAllelesInSample( final List eventAlleles, final List haplotypeAlleles, final List haplotypeAllelesForSample, final List> alleleMapper, final List haplotypes ) { if( haplotypeAllelesForSample.contains(Allele.NO_CALL) ) { return noCall; } - final List eventAllelesForSample = new ArrayList(); + final List eventAllelesForSample = new ArrayList<>(); for( final Allele a : haplotypeAllelesForSample ) { final Haplotype haplotype = haplotypes.get(haplotypeAlleles.indexOf(a)); for( int iii = 0; iii < alleleMapper.size(); iii++ ) { diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index 6ea543f25..9b9c3924b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -47,6 +47,10 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import com.google.java.contract.Ensures; +import net.sf.samtools.Cigar; +import net.sf.samtools.CigarElement; +import net.sf.samtools.CigarOperator; +import net.sf.samtools.SAMFileWriter; import org.broadinstitute.sting.commandline.*; import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.arguments.DbsnpArgumentCollection; @@ -68,6 +72,7 @@ import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedArgumentCollection import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedGenotyperEngine; import org.broadinstitute.sting.gatk.walkers.genotyper.VariantCallContext; import org.broadinstitute.sting.gatk.walkers.genotyper.afcalc.AFCalcFactory; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading.ReadThreadingAssembler; import org.broadinstitute.sting.utils.*; import org.broadinstitute.sting.utils.activeregion.ActiveRegion; import org.broadinstitute.sting.utils.activeregion.ActiveRegionReadState; @@ -75,8 +80,6 @@ import org.broadinstitute.sting.utils.activeregion.ActivityProfileState; import org.broadinstitute.sting.utils.clipping.ReadClipper; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; -import org.broadinstitute.sting.utils.fragments.FragmentCollection; -import org.broadinstitute.sting.utils.fragments.FragmentUtils; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.haplotype.*; import org.broadinstitute.sting.utils.haplotypeBAMWriter.HaplotypeBAMWriter; @@ -135,10 +138,14 @@ import java.util.*; @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARDISC, extraDocs = {CommandLineGATK.class} ) @PartitionBy(PartitionType.LOCUS) @BAQMode(ApplicationTime = ReadTransformer.ApplicationTime.FORBIDDEN) -@ActiveRegionTraversalParameters(extension=200, maxRegion=300) +@ActiveRegionTraversalParameters(extension=100, maxRegion=300) @ReadFilters({HCMappingQualityFilter.class}) @Downsample(by= DownsampleType.BY_SAMPLE, toCoverage=250) -public class HaplotypeCaller extends ActiveRegionWalker implements AnnotatorCompatible { +public class HaplotypeCaller extends ActiveRegionWalker, Integer> implements AnnotatorCompatible, NanoSchedulable { + // ----------------------------------------------------------------------------------------------- + // general haplotype caller arguments + // ----------------------------------------------------------------------------------------------- + /** * A raw, unfiltered, highly sensitive callset in VCF format. */ @@ -185,64 +192,6 @@ public class HaplotypeCaller extends ActiveRegionWalker implem @Argument(fullName="bamWriterType", shortName="bamWriterType", doc="How should haplotypes be written to the BAM?", required = false) public HaplotypeBAMWriter.Type bamWriterType = HaplotypeBAMWriter.Type.CALLED_HAPLOTYPES; - /** - * The PairHMM implementation to use for genotype likelihood calculations. The various implementations balance a tradeoff of accuracy and runtime. - */ - @Advanced - @Argument(fullName = "pair_hmm_implementation", shortName = "pairHMM", doc = "The PairHMM implementation to use for genotype likelihood calculations", required = false) - public PairHMM.HMM_IMPLEMENTATION pairHMM = PairHMM.HMM_IMPLEMENTATION.LOGLESS_CACHING; - - @Hidden - @Argument(fullName="keepRG", shortName="keepRG", doc="Only use read from this read group when making calls (but use all reads to build the assembly)", required = false) - protected String keepRG = null; - - @Advanced - @Argument(fullName="minPruning", shortName="minPruning", doc = "The minimum allowed pruning factor in assembly graph. Paths with <= X supporting kmers are pruned from the graph", required = false) - protected int MIN_PRUNE_FACTOR = 0; - - @Advanced - @Argument(fullName="gcpHMM", shortName="gcpHMM", doc="Flat gap continuation penalty for use in the Pair HMM", required = false) - protected int gcpHMM = 10; - - @Advanced - @Argument(fullName="maxNumHaplotypesInPopulation", shortName="maxNumHaplotypesInPopulation", doc="Maximum number of haplotypes to consider for your population. This number will probably need to be increased when calling organisms with high heterozygosity.", required = false) - protected int maxNumHaplotypesInPopulation = 25; - - @Advanced - @Argument(fullName="minKmer", shortName="minKmer", doc="Minimum kmer length to use in the assembly graph", required = false) - protected int minKmer = 11; - - /** - * If this flag is provided, the haplotype caller will include unmapped reads in the assembly and calling - * when these reads occur in the region being analyzed. Typically, for paired end analyses, one pair of the - * read can map, but if its pair is too divergent then it may be unmapped and placed next to its mate, taking - * the mates contig and alignment start. If this flag is provided the haplotype caller will see such reads, - * and may make use of them in assembly and calling, where possible. - */ - @Hidden - @Argument(fullName="includeUmappedReads", shortName="unmapped", doc="If provided, unmapped reads with chromosomal coordinates (i.e., those placed to their maps) will be included in the assembly and calling", required = false) - protected boolean includeUnmappedReads = false; - - @Advanced - @Argument(fullName="useAllelesTrigger", shortName="allelesTrigger", doc = "If specified, use additional trigger on variants found in an external alleles file", required=false) - protected boolean USE_ALLELES_TRIGGER = false; - - @Advanced - @Argument(fullName="useFilteredReadsForAnnotations", shortName="useFilteredReadsForAnnotations", doc = "If specified, use the contamination-filtered read maps for the purposes of annotating variants", required=false) - protected boolean USE_FILTERED_READ_MAP_FOR_ANNOTATIONS = false; - - @Hidden - @Argument(fullName="justDetermineActiveRegions", shortName="justDetermineActiveRegions", doc = "If specified, the HC won't actually do any assembly or calling, it'll just run the upfront active region determination code. Useful for benchmarking and scalability testing", required=false) - protected boolean justDetermineActiveRegions = false; - - @Hidden - @Argument(fullName="dontGenotype", shortName="dontGenotype", doc = "If specified, the HC will do any assembly but won't do calling. Useful for benchmarking and scalability testing", required=false) - protected boolean dontGenotype = false; - - @Hidden - @Argument(fullName="errorCorrectKmers", shortName="errorCorrectKmers", doc = "Use an exploratory algorithm to error correct the kmers used during assembly. May cause fundamental problems with the assembly graph itself", required=false) - protected boolean errorCorrectKmers = false; - /** * rsIDs from this file are used to populate the ID column of the output. Also, the DB INFO flag will be set when appropriate. * dbSNP is not used in any way for the calculations themselves. @@ -272,7 +221,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem */ @Advanced @Argument(fullName="annotation", shortName="A", doc="One or more specific annotations to apply to variant calls", required=false) - protected List annotationsToUse = new ArrayList(Arrays.asList(new String[]{"ClippingRankSumTest"})); + protected List annotationsToUse = new ArrayList<>(Arrays.asList(new String[]{"ClippingRankSumTest", "DepthPerSampleHC"})); /** * Which annotations to exclude from output in the VCF file. Note that this argument has higher priority than the -A or -G arguments, @@ -282,10 +231,6 @@ public class HaplotypeCaller extends ActiveRegionWalker implem @Argument(fullName="excludeAnnotation", shortName="XA", doc="One or more specific annotations to exclude", required=false) protected List annotationsToExclude = new ArrayList(Arrays.asList(new String[]{"SpanningDeletions", "TandemRepeatAnnotator"})); - @Advanced - @Argument(fullName="mergeVariantsViaLD", shortName="mergeVariantsViaLD", doc="If specified, we will merge variants together into block substitutions that are in strong local LD", required = false) - protected boolean mergeVariantsViaLD = false; - /** * Which groups of annotations to add to the output VCF file. See the VariantAnnotator -list argument to view available groups. */ @@ -295,13 +240,147 @@ public class HaplotypeCaller extends ActiveRegionWalker implem @ArgumentCollection private StandardCallerArgumentCollection SCAC = new StandardCallerArgumentCollection(); + // ----------------------------------------------------------------------------------------------- + // arguments to control internal behavior of the debruijn assembler + // ----------------------------------------------------------------------------------------------- + + @Advanced + @Argument(fullName="useDebruijnAssembler", shortName="useDebruijnAssembler", doc="If specified, we will use the old DeBruijn assembler. Depreciated as of 2.6", required = false) + protected boolean useDebruijnAssembler = false; + + @Advanced + @Argument(fullName="minKmerForDebruijnAssembler", shortName="minKmerForDebruijnAssembler", doc="Minimum kmer length to use in the debruijn assembly graph", required = false) + protected int minKmerForDebruijnAssembler = 11; + + @Advanced + @Argument(fullName="onlyUseKmerSizeForDebruijnAssembler", shortName="onlyUseKmerSizeForDebruijnAssembler", doc="If specified, we will only build kmer graphs with this kmer size in the debruijn", required = false) + protected int onlyUseKmerSizeForDebruijnAssembler = -1; + + // ----------------------------------------------------------------------------------------------- + // arguments to control internal behavior of the read threading assembler + // ----------------------------------------------------------------------------------------------- + + @Advanced + @Argument(fullName="kmerSize", shortName="kmerSize", doc="Kmer size to use in the read threading assembler", required = false) + protected List kmerSizes = Arrays.asList(10, 25); + + @Advanced + @Argument(fullName="dontIncreaseKmerSizesForCycles", shortName="dontIncreaseKmerSizesForCycles", doc="Should we disable the iterating over kmer sizes when graph cycles are detected?", required = false) + protected boolean dontIncreaseKmerSizesForCycles = false; + + @Advanced + @Argument(fullName="numPruningSamples", shortName="numPruningSamples", doc="The number of samples that must pass the minPuning factor in order for the path to be kept", required = false) + protected int numPruningSamples = 1; + + /** + * Assembly graph can be quite complex, and could imply a very large number of possible haplotypes. Each haplotype + * considered requires N PairHMM evaluations if there are N reads across all samples. In order to control the + * run of the haplotype caller we only take maxPathsPerSample * nSample paths from the graph, in order of their + * weights, no matter how many paths are possible to generate from the graph. Putting this number too low + * will result in dropping true variation because paths that include the real variant are not even considered. + */ + @Advanced + @Argument(fullName="maxPathsPerSample", shortName="maxPathsPerSample", doc="Max number of paths to consider for the read threading assembler per sample.", required = false) + protected int maxPathsPerSample = 10; + + /** + * The minimum number of paths to advance forward for genotyping, regardless of the + * number of samples + */ + private final static int MIN_PATHS_PER_GRAPH = 128; + + @Hidden + @Argument(fullName="dontRecoverDanglingTails", shortName="dontRecoverDanglingTails", doc="Should we disable dangling tail recovery in the read threading assembler?", required = false) + protected boolean dontRecoverDanglingTails = false; + + // ----------------------------------------------------------------------------------------------- + // general advanced arguments to control haplotype caller behavior + // ----------------------------------------------------------------------------------------------- + + @Advanced + @Argument(fullName="minPruning", shortName="minPruning", doc = "The minimum allowed pruning factor in assembly graph. Paths with <= X supporting kmers are pruned from the graph", required = false) + protected int MIN_PRUNE_FACTOR = 2; + + @Advanced + @Argument(fullName="gcpHMM", shortName="gcpHMM", doc="Flat gap continuation penalty for use in the Pair HMM", required = false) + protected int gcpHMM = 10; + + /** + * If this flag is provided, the haplotype caller will include unmapped reads in the assembly and calling + * when these reads occur in the region being analyzed. Typically, for paired end analyses, one pair of the + * read can map, but if its pair is too divergent then it may be unmapped and placed next to its mate, taking + * the mates contig and alignment start. If this flag is provided the haplotype caller will see such reads, + * and may make use of them in assembly and calling, where possible. + */ + @Hidden + @Argument(fullName="includeUmappedReads", shortName="unmapped", doc="If provided, unmapped reads with chromosomal coordinates (i.e., those placed to their maps) will be included in the assembly and calling", required = false) + protected boolean includeUnmappedReads = false; + + @Advanced + @Argument(fullName="useAllelesTrigger", shortName="allelesTrigger", doc = "If specified, use additional trigger on variants found in an external alleles file", required=false) + protected boolean USE_ALLELES_TRIGGER = false; + + @Advanced + @Argument(fullName="useFilteredReadsForAnnotations", shortName="useFilteredReadsForAnnotations", doc = "If specified, use the contamination-filtered read maps for the purposes of annotating variants", required=false) + protected boolean USE_FILTERED_READ_MAP_FOR_ANNOTATIONS = false; + + /** + * The phredScaledGlobalReadMismappingRate reflects the average global mismapping rate of all reads, regardless of their + * mapping quality. This term effects the probability that a read originated from the reference haplotype, regardless of + * its edit distance from the reference, in that the read could have originated from the reference haplotype but + * from another location in the genome. Suppose a read has many mismatches from the reference, say like 5, but + * has a very high mapping quality of 60. Without this parameter, the read would contribute 5 * Q30 evidence + * in favor of its 5 mismatch haplotype compared to reference, potentially enough to make a call off that single + * read for all of these events. With this parameter set to Q30, though, the maximum evidence against the reference + * that this (and any) read could contribute against reference is Q30. + * + * Set this term to any negative number to turn off the global mapping rate + */ + @Advanced + @Argument(fullName="phredScaledGlobalReadMismappingRate", shortName="globalMAPQ", doc="The global assumed mismapping rate for reads", required = false) + protected int phredScaledGlobalReadMismappingRate = 45; + + @Advanced + @Argument(fullName="maxNumHaplotypesInPopulation", shortName="maxNumHaplotypesInPopulation", doc="Maximum number of haplotypes to consider for your population. This number will probably need to be increased when calling organisms with high heterozygosity.", required = false) + protected int maxNumHaplotypesInPopulation = 25; + + @Advanced + @Argument(fullName="mergeVariantsViaLD", shortName="mergeVariantsViaLD", doc="If specified, we will merge variants together into block substitutions that are in strong local LD", required = false) + protected boolean mergeVariantsViaLD = false; + + // ----------------------------------------------------------------------------------------------- + // arguments for debugging / developing the haplotype caller + // ----------------------------------------------------------------------------------------------- + /** + * The PairHMM implementation to use for genotype likelihood calculations. The various implementations balance a tradeoff of accuracy and runtime. + */ + @Hidden + @Argument(fullName = "pair_hmm_implementation", shortName = "pairHMM", doc = "The PairHMM implementation to use for genotype likelihood calculations", required = false) + public PairHMM.HMM_IMPLEMENTATION pairHMM = PairHMM.HMM_IMPLEMENTATION.LOGLESS_CACHING; + + @Hidden + @Argument(fullName="keepRG", shortName="keepRG", doc="Only use read from this read group when making calls (but use all reads to build the assembly)", required = false) + protected String keepRG = null; + + @Hidden + @Argument(fullName="justDetermineActiveRegions", shortName="justDetermineActiveRegions", doc = "If specified, the HC won't actually do any assembly or calling, it'll just run the upfront active region determination code. Useful for benchmarking and scalability testing", required=false) + protected boolean justDetermineActiveRegions = false; + + @Hidden + @Argument(fullName="dontGenotype", shortName="dontGenotype", doc = "If specified, the HC will do any assembly but won't do calling. Useful for benchmarking and scalability testing", required=false) + protected boolean dontGenotype = false; + + @Hidden + @Argument(fullName="errorCorrectKmers", shortName="errorCorrectKmers", doc = "Use an exploratory algorithm to error correct the kmers used during assembly. May cause fundamental problems with the assembly graph itself", required=false) + protected boolean errorCorrectKmers = false; + @Advanced @Argument(fullName="debug", shortName="debug", doc="If specified, print out very verbose debug information about each triggering active region", required = false) protected boolean DEBUG; - @Advanced + @Hidden @Argument(fullName="debugGraphTransformations", shortName="debugGraphTransformations", doc="If specified, we will write DOT formatted graph files out of the assembler for only this graph size", required = false) - protected int debugGraphTransformations = -1; + protected boolean debugGraphTransformations = false; @Hidden // TODO -- not currently useful @Argument(fullName="useLowQualityBasesForAssembly", shortName="useLowQualityBasesForAssembly", doc="If specified, we will include low quality bases when doing the assembly", required = false) @@ -311,10 +390,35 @@ public class HaplotypeCaller extends ActiveRegionWalker implem @Argument(fullName="dontTrimActiveRegions", shortName="dontTrimActiveRegions", doc="If specified, we will not trim down the active region from the full region (active + extension) to just the active interval for genotyping", required = false) protected boolean dontTrimActiveRegions = false; + @Hidden + @Argument(fullName="dontUseSoftClippedBases", shortName="dontUseSoftClippedBases", doc="If specified, we will not analyze soft clipped bases in the reads", required = false) + protected boolean dontUseSoftClippedBases = false; + + @Hidden + @Argument(fullName="captureAssemblyFailureBAM", shortName="captureAssemblyFailureBAM", doc="If specified, we will write a BAM called assemblyFailure.bam capturing all of the reads that were in the active region when the assembler failed for any reason", required = false) + protected boolean captureAssemblyFailureBAM = false; + @Hidden @Argument(fullName="allowCyclesInKmerGraphToGeneratePaths", shortName="allowCyclesInKmerGraphToGeneratePaths", doc="If specified, we will allow cycles in the kmer graphs to generate paths with multiple copies of the path sequenece rather than just the shortest paths", required = false) protected boolean allowCyclesInKmerGraphToGeneratePaths = false; + // Parameters to control read error correction + @Hidden + @Argument(fullName="errorCorrectReads", shortName="errorCorrectReads", doc = "Use an exploratory algorithm to error correct the kmers used during assembly. May cause fundamental problems with the assembly graph itself", required=false) + protected boolean errorCorrectReads = false; + + @Hidden + @Argument(fullName="kmerLengthForReadErrorCorrection", shortName="kmerLengthForReadErrorCorrection", doc = "Use an exploratory algorithm to error correct the kmers used during assembly. May cause fundamental problems with the assembly graph itself", required=false) + protected int kmerLengthForReadErrorCorrection = 25; + + @Hidden + @Argument(fullName="minObservationsForKmerToBeSolid", shortName="minObservationsForKmerToBeSolid", doc = "A k-mer must be seen at least these times for it considered to be solid", required=false) + protected int minObservationsForKmerToBeSolid = 20; + + + // ----------------------------------------------------------------------------------------------- + // done with Haplotype caller parameters + // ----------------------------------------------------------------------------------------------- // the UG engines private UnifiedGenotyperEngine UG_engine = null; @@ -342,7 +446,10 @@ public class HaplotypeCaller extends ActiveRegionWalker implem private final static int PADDING_AROUND_OTHERS_FOR_CALLING = 150; // the maximum extent into the full active region extension that we're willing to go in genotyping our events - private final static int MAX_GENOTYPING_ACTIVE_REGION_EXTENSION = 25; + private final static int MAX_DISCOVERY_ACTIVE_REGION_EXTENSION = 25; + private final static int MAX_GGA_ACTIVE_REGION_EXTENSION = 100; + + private ActiveRegionTrimmer trimmer = null; private final static int maxReadsInRegionPerSample = 1000; // TODO -- should be an argument private final static int minReadsPerAlignmentStart = 5; // TODO -- should be an argument @@ -350,10 +457,11 @@ public class HaplotypeCaller extends ActiveRegionWalker implem // bases with quality less than or equal to this value are trimmed off the tails of the reads private static final byte MIN_TAIL_QUALITY = 20; - private List samplesList = new ArrayList(); - private final static double LOG_ONE_HALF = -Math.log10(2.0); - private final static double LOG_ONE_THIRD = -Math.log10(3.0); - private final List allelesToGenotype = new ArrayList(); + private static final byte MIN_TAIL_QUALITY_WITH_ERROR_CORRECTION = 6; + // the minimum length of a read we'd consider using for genotyping + private final static int MIN_READ_LENGTH = 10; + + private List samplesList = new ArrayList<>(); private final static Allele FAKE_REF_ALLELE = Allele.create("N", true); // used in isActive function to call into UG Engine. Should never appear anywhere in a VCF file private final static Allele FAKE_ALT_ALLELE = Allele.create("", false); // used in isActive function to call into UG Engine. Should never appear anywhere in a VCF file @@ -373,6 +481,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem // get all of the unique sample names Set samples = SampleUtils.getSAMFileSamples(getToolkit().getSAMFileHeader()); samplesList.addAll( samples ); + final int nSamples = samples.size(); // initialize the UnifiedGenotyper Engine which is used to call into the exact model final UnifiedArgumentCollection UAC = new UnifiedArgumentCollection( SCAC ); // this adapter is used so that the full set of unused UG arguments aren't exposed to the HC user UG_engine = new UnifiedGenotyperEngine(getToolkit(), UAC, logger, null, null, samples, GATKVariantContextUtils.DEFAULT_PLOIDY); @@ -428,14 +537,36 @@ public class HaplotypeCaller extends ActiveRegionWalker implem throw new UserException.CouldNotReadInputFile(getToolkit().getArguments().referenceFile, e); } - // setup the assembler - assemblyEngine = new DeBruijnAssembler(DEBUG, debugGraphTransformations, minKmer, allowCyclesInKmerGraphToGeneratePaths); + // create and setup the assembler + final int maxAllowedPathsForReadThreadingAssembler = Math.max(maxPathsPerSample * nSamples, MIN_PATHS_PER_GRAPH); + assemblyEngine = useDebruijnAssembler + ? new DeBruijnAssembler(minKmerForDebruijnAssembler, onlyUseKmerSizeForDebruijnAssembler) + : new ReadThreadingAssembler(maxAllowedPathsForReadThreadingAssembler, kmerSizes, dontIncreaseKmerSizesForCycles, numPruningSamples); + assemblyEngine.setErrorCorrectKmers(errorCorrectKmers); assemblyEngine.setPruneFactor(MIN_PRUNE_FACTOR); + assemblyEngine.setDebug(DEBUG); + assemblyEngine.setDebugGraphTransformations(debugGraphTransformations); + assemblyEngine.setAllowCyclesInKmerGraphToGeneratePaths(allowCyclesInKmerGraphToGeneratePaths); + assemblyEngine.setRecoverDanglingTails(!dontRecoverDanglingTails); + if ( graphWriter != null ) assemblyEngine.setGraphWriter(graphWriter); if ( useLowQualityBasesForAssembly ) assemblyEngine.setMinBaseQualityToUseInAssembly((byte)1); - likelihoodCalculationEngine = new LikelihoodCalculationEngine( (byte)gcpHMM, DEBUG, pairHMM ); + // setup the likelihood calculation engine + if ( phredScaledGlobalReadMismappingRate < 0 ) phredScaledGlobalReadMismappingRate = -1; + + // configure the global mismapping rate + final double log10GlobalReadMismappingRate; + if ( phredScaledGlobalReadMismappingRate < 0 ) { + log10GlobalReadMismappingRate = - Double.MAX_VALUE; + } else { + log10GlobalReadMismappingRate = QualityUtils.qualToErrorProbLog10(phredScaledGlobalReadMismappingRate); + logger.info("Using global mismapping rate of " + phredScaledGlobalReadMismappingRate + " => " + log10GlobalReadMismappingRate + " in log10 likelihood units"); + } + + // create our likelihood calculation engine + likelihoodCalculationEngine = new LikelihoodCalculationEngine( (byte)gcpHMM, DEBUG, pairHMM, log10GlobalReadMismappingRate ); final MergeVariantsAcrossHaplotypes variantMerger = mergeVariantsViaLD ? new LDMerger(DEBUG, 10, 1) : new MergeVariantsAcrossHaplotypes(); @@ -443,6 +574,10 @@ public class HaplotypeCaller extends ActiveRegionWalker implem if ( bamWriter != null ) haplotypeBAMWriter = HaplotypeBAMWriter.create(bamWriterType, bamWriter, getToolkit().getSAMFileHeader()); + + trimmer = new ActiveRegionTrimmer(DEBUG, PADDING_AROUND_SNPS_FOR_CALLING, PADDING_AROUND_OTHERS_FOR_CALLING, + UAC.GenotypingMode.equals(GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES) ? MAX_GGA_ACTIVE_REGION_EXTENSION : MAX_DISCOVERY_ACTIVE_REGION_EXTENSION, + getToolkit().getGenomeLocParser()); } //--------------------------------------------------------------------------------------------------------------- @@ -481,7 +616,6 @@ public class HaplotypeCaller extends ActiveRegionWalker implem if( UG_engine.getUAC().GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) { final VariantContext vcFromAllelesRod = UnifiedGenotyperEngine.getVCFromAllelesRod(tracker, ref, ref.getLocus(), false, logger, UG_engine.getUAC().alleles); if( vcFromAllelesRod != null ) { - allelesToGenotype.add(vcFromAllelesRod); // save for later for processing during the ActiveRegion's map call. Should be folded into a RefMetaDataTracker object return new ActivityProfileState(ref.getLocus(), 1.0); } } @@ -494,7 +628,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem // if we don't have any data, just abort early return new ActivityProfileState(ref.getLocus(), 0.0); - final List noCall = new ArrayList(); // used to noCall all genotypes until the exact model is applied + final List noCall = new ArrayList<>(); // used to noCall all genotypes until the exact model is applied noCall.add(Allele.NO_CALL); final Map splitContexts = AlignmentContextUtils.splitContextBySampleName(context); @@ -516,14 +650,14 @@ public class HaplotypeCaller extends ActiveRegionWalker implem } } genotypeLikelihoods[AA] += p.getRepresentativeCount() * QualityUtils.qualToProbLog10(qual); - genotypeLikelihoods[AB] += p.getRepresentativeCount() * MathUtils.approximateLog10SumLog10( QualityUtils.qualToProbLog10(qual) + LOG_ONE_HALF, QualityUtils.qualToErrorProbLog10(qual) + LOG_ONE_THIRD + LOG_ONE_HALF ); - genotypeLikelihoods[BB] += p.getRepresentativeCount() * QualityUtils.qualToErrorProbLog10(qual) + LOG_ONE_THIRD; + genotypeLikelihoods[AB] += p.getRepresentativeCount() * MathUtils.approximateLog10SumLog10( QualityUtils.qualToProbLog10(qual) + MathUtils.LOG_ONE_HALF, QualityUtils.qualToErrorProbLog10(qual) + MathUtils.LOG_ONE_THIRD + MathUtils.LOG_ONE_HALF ); + genotypeLikelihoods[BB] += p.getRepresentativeCount() * QualityUtils.qualToErrorProbLog10(qual) + MathUtils.LOG_ONE_THIRD; } } genotypes.add( new GenotypeBuilder(sample.getKey()).alleles(noCall).PL(genotypeLikelihoods).make() ); } - final List alleles = new ArrayList(); + final List alleles = new ArrayList<>(); alleles.add( FAKE_REF_ALLELE ); alleles.add( FAKE_ALT_ALLELE ); final VariantCallContext vcOut = UG_engine_simple_genotyper.calculateGenotypes(new VariantContextBuilder("HCisActive!", context.getContig(), context.getLocation().getStart(), context.getLocation().getStop(), alleles).genotypes(genotypes).make(), GenotypeLikelihoodsCalculationModel.Model.INDEL); @@ -538,74 +672,73 @@ public class HaplotypeCaller extends ActiveRegionWalker implem // //--------------------------------------------------------------------------------------------------------------- + private final static List NO_CALLS = Collections.emptyList(); @Override - public Integer map( final ActiveRegion originalActiveRegion, final RefMetaDataTracker metaDataTracker ) { + public List map( final ActiveRegion originalActiveRegion, final RefMetaDataTracker metaDataTracker ) { if ( justDetermineActiveRegions ) // we're benchmarking ART and/or the active region determination code in the HC, just leave without doing any work - return 1; + return NO_CALLS; - if( !originalActiveRegion.isActive() ) { return 0; } // Not active so nothing to do! + if( !originalActiveRegion.isActive() ) { return NO_CALLS; } // Not active so nothing to do! - final List activeAllelesToGenotype = new ArrayList(); + final List activeAllelesToGenotype = new ArrayList<>(); if( UG_engine.getUAC().GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) { - for( final VariantContext vc : allelesToGenotype ) { - if( originalActiveRegion.getLocation().overlapsP( getToolkit().getGenomeLocParser().createGenomeLoc(vc) ) ) { + for ( final VariantContext vc : metaDataTracker.getValues(UG_engine.getUAC().alleles) ) { + if ( vc.isNotFiltered() ) { activeAllelesToGenotype.add(vc); // do something with these VCs during GGA mode } } - allelesToGenotype.removeAll( activeAllelesToGenotype ); // No alleles found in this region so nothing to do! - if ( activeAllelesToGenotype.isEmpty() ) { return 0; } + if ( activeAllelesToGenotype.isEmpty() ) { return NO_CALLS; } } else { - if( originalActiveRegion.size() == 0 ) { return 0; } // No reads here so nothing to do! + if( originalActiveRegion.size() == 0 ) { return NO_CALLS; } // No reads here so nothing to do! } // run the local assembler, getting back a collection of information on how we should proceed final AssemblyResult assemblyResult = assembleReads(originalActiveRegion, activeAllelesToGenotype); // abort early if something is out of the acceptable range - if( assemblyResult.haplotypes.size() == 1 ) { return 1; } // only the reference haplotype remains so nothing else to do! - if (dontGenotype) return 1; // user requested we not proceed + if( ! assemblyResult.isVariationPresent() ) { return NO_CALLS; } // only the reference haplotype remains so nothing else to do! + if (dontGenotype) return NO_CALLS; // user requested we not proceed // filter out reads from genotyping which fail mapping quality based criteria - final List filteredReads = filterNonPassingReads( assemblyResult.regionForGenotyping ); + final Collection filteredReads = filterNonPassingReads( assemblyResult.regionForGenotyping ); final Map> perSampleFilteredReadList = splitReadsBySample( filteredReads ); - if( assemblyResult.regionForGenotyping.size() == 0 ) { return 1; } // no reads remain after filtering so nothing else to do! + if( assemblyResult.regionForGenotyping.size() == 0 ) { return NO_CALLS; } // no reads remain after filtering so nothing else to do! // evaluate each sample's reads against all haplotypes //logger.info("Computing read likelihoods with " + assemblyResult.regionForGenotyping.size() + " reads"); final Map stratifiedReadMap = likelihoodCalculationEngine.computeReadLikelihoods( assemblyResult.haplotypes, splitReadsBySample( assemblyResult.regionForGenotyping.getReads() ) ); - // subset down to only the best haplotypes to be genotyped in all samples ( in GGA mode use all discovered haplotypes ) - final List bestHaplotypes = selectBestHaplotypesForGenotyping(assemblyResult.haplotypes, stratifiedReadMap); + // Note: we used to subset down at this point to only the "best" haplotypes in all samples for genotyping, but there + // was a bad interaction between that selection and the marginalization that happens over each event when computing + // GLs. In particular, for samples that are heterozygous non-reference (B/C) the marginalization for B treats the + // haplotype containing C as reference (and vice versa). Now this is fine if all possible haplotypes are included + // in the genotyping, but we lose information if we select down to a few haplotypes. [EB] final GenotypingEngine.CalledHaplotypes calledHaplotypes = genotypingEngine.assignGenotypeLikelihoods( UG_engine, - bestHaplotypes, + assemblyResult.haplotypes, stratifiedReadMap, perSampleFilteredReadList, assemblyResult.fullReferenceWithPadding, assemblyResult.paddedReferenceLoc, assemblyResult.regionForGenotyping.getLocation(), getToolkit().getGenomeLocParser(), + metaDataTracker, activeAllelesToGenotype ); - for( final VariantContext call : calledHaplotypes.getCalls() ) { - // TODO -- uncomment this line once ART-based walkers have a proper RefMetaDataTracker. - // annotationEngine.annotateDBs(metaDataTracker, getToolkit().getGenomeLocParser().createGenomeLoc(call), call); - vcfWriter.add( call ); - } - + // TODO -- must disable if we are doing NCT, or set the output type of ! presorted if ( bamWriter != null ) { haplotypeBAMWriter.writeReadsAlignedToHaplotypes(assemblyResult.haplotypes, assemblyResult.paddedReferenceLoc, - bestHaplotypes, + assemblyResult.haplotypes, calledHaplotypes.getCalledHaplotypes(), stratifiedReadMap); } if( DEBUG ) { logger.info("----------------------------------------------------------------------------------"); } - return 1; // One active region was processed during this map call + return calledHaplotypes.getCalls(); } private final static class AssemblyResult { @@ -613,12 +746,18 @@ public class HaplotypeCaller extends ActiveRegionWalker implem final ActiveRegion regionForGenotyping; final byte[] fullReferenceWithPadding; final GenomeLoc paddedReferenceLoc; + final boolean variationPresent; - private AssemblyResult(List haplotypes, ActiveRegion regionForGenotyping, byte[] fullReferenceWithPadding, GenomeLoc paddedReferenceLoc) { + private AssemblyResult(List haplotypes, ActiveRegion regionForGenotyping, byte[] fullReferenceWithPadding, GenomeLoc paddedReferenceLoc, boolean variationPresent) { this.haplotypes = haplotypes; this.regionForGenotyping = regionForGenotyping; this.fullReferenceWithPadding = fullReferenceWithPadding; this.paddedReferenceLoc = paddedReferenceLoc; + this.variationPresent = variationPresent; + } + + public boolean isVariationPresent() { + return variationPresent && haplotypes.size() > 1; } } @@ -635,70 +774,49 @@ public class HaplotypeCaller extends ActiveRegionWalker implem // Create the reference haplotype which is the bases from the reference that make up the active region finalizeActiveRegion(activeRegion); // merge overlapping fragments, clip adapter and low qual tails - final Haplotype referenceHaplotype = new Haplotype(activeRegion.getActiveRegionReference(referenceReader), true); final byte[] fullReferenceWithPadding = activeRegion.getActiveRegionReference(referenceReader, REFERENCE_PADDING); final GenomeLoc paddedReferenceLoc = getPaddedLoc(activeRegion); + final Haplotype referenceHaplotype = createReferenceHaplotype(activeRegion, paddedReferenceLoc); - final List haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, paddedReferenceLoc, activeAllelesToGenotype ); + // Create ReadErrorCorrector object if requested - will be used within assembly engine. + ReadErrorCorrector readErrorCorrector = null; + if (errorCorrectReads) + readErrorCorrector = new ReadErrorCorrector(kmerLengthForReadErrorCorrection, MIN_TAIL_QUALITY_WITH_ERROR_CORRECTION, minObservationsForKmerToBeSolid, DEBUG,fullReferenceWithPadding); - if ( ! dontTrimActiveRegions ) { - return trimActiveRegion(activeRegion, haplotypes, fullReferenceWithPadding, paddedReferenceLoc); - } else { - // we don't want to or cannot create a trimmed active region, so go ahead and use the old one - return new AssemblyResult(haplotypes, activeRegion, fullReferenceWithPadding, paddedReferenceLoc); + try { + final List haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, paddedReferenceLoc, activeAllelesToGenotype,readErrorCorrector ); + if ( ! dontTrimActiveRegions ) { + return trimActiveRegion(activeRegion, haplotypes, activeAllelesToGenotype, fullReferenceWithPadding, paddedReferenceLoc); + } else { + // we don't want to trim active regions, so go ahead and use the old one + return new AssemblyResult(haplotypes, activeRegion, fullReferenceWithPadding, paddedReferenceLoc, true); + } + } catch ( Exception e ) { + // Capture any exception that might be thrown, and write out the assembly failure BAM if requested + if ( captureAssemblyFailureBAM ) { + final SAMFileWriter writer = ReadUtils.createSAMFileWriterWithCompression(getToolkit().getSAMFileHeader(), true, "assemblyFailure.bam", 5); + for ( final GATKSAMRecord read : activeRegion.getReads() ) { + writer.addAlignment(read); + } + writer.close(); + } + throw e; } } /** - * Trim down the active region to just enough to properly genotype the events among the haplotypes - * - * This function merely creates the region, but it doesn't populate the reads back into the region - * - * @param region our full active region - * @param haplotypes the list of haplotypes we've created from assembly - * @param ref the reference bases over the full padded location - * @param refLoc the span of the reference bases - * @return a new ActiveRegion trimmed down to just what's needed for genotyping, or null if we couldn't do this successfully + * Helper function to create the reference haplotype out of the active region and a padded loc + * @param activeRegion the active region from which to generate the reference haplotype + * @param paddedReferenceLoc the GenomeLoc which includes padding and shows how big the reference haplotype should be + * @return a non-null haplotype */ - private ActiveRegion createTrimmedRegion(final ActiveRegion region, final List haplotypes, final byte[] ref, final GenomeLoc refLoc) { - EventMap.buildEventMapsForHaplotypes(haplotypes, ref, refLoc, DEBUG); - final TreeSet allContexts = EventMap.getAllVariantContexts(haplotypes); - final GenomeLocParser parser = getToolkit().getGenomeLocParser(); - - if ( allContexts.isEmpty() ) // no variants, so just return the current region - return null; - - final List withinActiveRegion = new LinkedList(); - int pad = PADDING_AROUND_SNPS_FOR_CALLING; - GenomeLoc trimLoc = null; - for ( final VariantContext vc : allContexts ) { - final GenomeLoc vcLoc = parser.createGenomeLoc(vc); - if ( region.getLocation().overlapsP(vcLoc) ) { - if ( ! vc.isSNP() ) // if anything isn't a SNP use the bigger padding - pad = PADDING_AROUND_OTHERS_FOR_CALLING; - trimLoc = trimLoc == null ? vcLoc : trimLoc.endpointSpan(vcLoc); - withinActiveRegion.add(vc); - } - } - - // we don't actually have anything in the region after removing variants that don't overlap the region's full location - if ( trimLoc == null ) return null; - - final GenomeLoc maxSpan = getToolkit().getGenomeLocParser().createPaddedGenomeLoc(region.getLocation(), MAX_GENOTYPING_ACTIVE_REGION_EXTENSION); - final GenomeLoc idealSpan = getToolkit().getGenomeLocParser().createPaddedGenomeLoc(trimLoc, pad); - final GenomeLoc finalSpan = maxSpan.intersect(idealSpan); - - final ActiveRegion trimmedRegion = region.trim(finalSpan); - if ( DEBUG ) { - logger.info("events : " + withinActiveRegion); - logger.info("trimLoc : " + trimLoc); - logger.info("pad : " + pad); - logger.info("idealSpan : " + idealSpan); - logger.info("maxSpan : " + maxSpan); - logger.info("finalSpan : " + finalSpan); - logger.info("regionSpan : " + trimmedRegion.getExtendedLoc() + " size is " + trimmedRegion.getExtendedLoc().size()); - } - return trimmedRegion; + private Haplotype createReferenceHaplotype(final ActiveRegion activeRegion, final GenomeLoc paddedReferenceLoc) { + final Haplotype refHaplotype = new Haplotype(activeRegion.getActiveRegionReference(referenceReader), true); + refHaplotype.setAlignmentStartHapwrtRef(activeRegion.getExtendedLoc().getStart() - paddedReferenceLoc.getStart()); + final Cigar c = new Cigar(); + c.add(new CigarElement(refHaplotype.getBases().length, CigarOperator.M)); + refHaplotype.setCigar(c); + return refHaplotype; } /** @@ -706,23 +824,33 @@ public class HaplotypeCaller extends ActiveRegionWalker implem * * @param originalActiveRegion our full active region * @param haplotypes the list of haplotypes we've created from assembly + * @param activeAllelesToGenotype additional alleles we might need to genotype (can be empty) * @param fullReferenceWithPadding the reference bases over the full padded location * @param paddedReferenceLoc the span of the reference bases * @return an AssemblyResult containing the trimmed active region with all of the reads we should use - * trimmed down as well, and a revised set of haplotypes. If trimming failed this function - * may choose to use the originalActiveRegion without modification + * trimmed down as well, and a revised set of haplotypes. If trimming down the active region results + * in only the reference haplotype over the non-extended active region, returns null. */ private AssemblyResult trimActiveRegion(final ActiveRegion originalActiveRegion, final List haplotypes, + final List activeAllelesToGenotype, final byte[] fullReferenceWithPadding, final GenomeLoc paddedReferenceLoc) { - final ActiveRegion trimmedActiveRegion = createTrimmedRegion(originalActiveRegion, haplotypes, fullReferenceWithPadding, paddedReferenceLoc); + if ( DEBUG ) logger.info("Trimming active region " + originalActiveRegion + " with " + haplotypes.size() + " haplotypes"); - if ( trimmedActiveRegion == null ) - return new AssemblyResult(haplotypes, originalActiveRegion, fullReferenceWithPadding, paddedReferenceLoc); + EventMap.buildEventMapsForHaplotypes(haplotypes, fullReferenceWithPadding, paddedReferenceLoc, DEBUG); + final TreeSet allVariantsWithinFullActiveRegion = EventMap.getAllVariantContexts(haplotypes); + allVariantsWithinFullActiveRegion.addAll(activeAllelesToGenotype); + final ActiveRegion trimmedActiveRegion = trimmer.trimRegion(originalActiveRegion, allVariantsWithinFullActiveRegion); + + if ( trimmedActiveRegion == null ) { + // there were no variants found within the active region itself, so just return null + if ( DEBUG ) logger.info("No variation found within the active region, skipping the region :-)"); + return new AssemblyResult(haplotypes, originalActiveRegion, fullReferenceWithPadding, paddedReferenceLoc, false); + } // trim down the haplotypes - final Set haplotypeSet = new HashSet(haplotypes.size()); + final Set haplotypeSet = new HashSet<>(haplotypes.size()); for ( final Haplotype h : haplotypes ) { final Haplotype trimmed = h.trim(trimmedActiveRegion.getExtendedLoc()); if ( trimmed != null ) { @@ -733,13 +861,13 @@ public class HaplotypeCaller extends ActiveRegionWalker implem } // create the final list of trimmed haplotypes - final List trimmedHaplotypes = new ArrayList(haplotypeSet); + final List trimmedHaplotypes = new ArrayList<>(haplotypeSet); // sort haplotypes to take full advantage of haplotype start offset optimizations in PairHMM Collections.sort( trimmedHaplotypes, new HaplotypeBaseComparator() ); + if ( DEBUG ) logger.info("Trimmed region to " + trimmedActiveRegion.getLocation() + " size " + trimmedActiveRegion.getLocation().size() + " reduced number of haplotypes from " + haplotypes.size() + " to only " + trimmedHaplotypes.size()); if ( DEBUG ) { - logger.info("Trimming haplotypes reduced number of haplotypes from " + haplotypes.size() + " to only " + trimmedHaplotypes.size()); for ( final Haplotype remaining: trimmedHaplotypes ) { logger.info(" Remains: " + remaining + " cigar " + remaining.getCigar()); } @@ -747,7 +875,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem // trim down the reads and add them to the trimmed active region - final List trimmedReads = new ArrayList(originalActiveRegion.getReads().size()); + final List trimmedReads = new ArrayList<>(originalActiveRegion.getReads().size()); for( final GATKSAMRecord read : originalActiveRegion.getReads() ) { final GATKSAMRecord clippedRead = ReadClipper.hardClipToRegion( read, trimmedActiveRegion.getExtendedLoc().getStart(), trimmedActiveRegion.getExtendedLoc().getStop() ); if( trimmedActiveRegion.readOverlapsRegion(clippedRead) && clippedRead.getReadLength() > 0 ) { @@ -757,22 +885,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem trimmedActiveRegion.clearReads(); trimmedActiveRegion.addAll(ReadUtils.sortReadsByCoordinate(trimmedReads)); - return new AssemblyResult(trimmedHaplotypes, trimmedActiveRegion, fullReferenceWithPadding, paddedReferenceLoc); - } - - /** - * Select the best N haplotypes according to their likelihoods, if appropriate - * - * @param haplotypes a list of haplotypes to consider - * @param stratifiedReadMap a map from samples -> read likelihoods - * @return the list of haplotypes to genotype - */ - protected List selectBestHaplotypesForGenotyping(final List haplotypes, final Map stratifiedReadMap) { - if ( UG_engine.getUAC().GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) { - return haplotypes; - } else { - return likelihoodCalculationEngine.selectBestHaplotypesFromEachSample(haplotypes, stratifiedReadMap, maxNumHaplotypesInPopulation); - } + return new AssemblyResult(trimmedHaplotypes, trimmedActiveRegion, fullReferenceWithPadding, paddedReferenceLoc, true); } //--------------------------------------------------------------------------------------------------------------- @@ -787,12 +900,16 @@ public class HaplotypeCaller extends ActiveRegionWalker implem } @Override - public Integer reduce(Integer cur, Integer sum) { - return cur + sum; + public Integer reduce(List callsInRegion, Integer numCalledRegions) { + for( final VariantContext call : callsInRegion ) { + vcfWriter.add( call ); + } + return (callsInRegion.isEmpty() ? 0 : 1) + numCalledRegions; } @Override public void onTraversalDone(Integer result) { + likelihoodCalculationEngine.close(); logger.info("Ran local assembly on " + result + " active regions"); } @@ -804,32 +921,31 @@ public class HaplotypeCaller extends ActiveRegionWalker implem private void finalizeActiveRegion( final ActiveRegion activeRegion ) { if( DEBUG ) { logger.info("Assembling " + activeRegion.getLocation() + " with " + activeRegion.size() + " reads: (with overlap region = " + activeRegion.getExtendedLoc() + ")"); } - final List finalizedReadList = new ArrayList(); - final FragmentCollection fragmentCollection = FragmentUtils.create( activeRegion.getReads() ); - activeRegion.clearReads(); - - // Join overlapping paired reads to create a single longer read - finalizedReadList.addAll( fragmentCollection.getSingletonReads() ); - for( final List overlappingPair : fragmentCollection.getOverlappingPairs() ) { - finalizedReadList.addAll( FragmentUtils.mergeOverlappingPairedFragments(overlappingPair) ); - } // Loop through the reads hard clipping the adaptor and low quality tails - final List readsToUse = new ArrayList(finalizedReadList.size()); - for( final GATKSAMRecord myRead : finalizedReadList ) { + final List readsToUse = new ArrayList<>(activeRegion.getReads().size()); + for( final GATKSAMRecord myRead : activeRegion.getReads() ) { final GATKSAMRecord postAdapterRead = ( myRead.getReadUnmappedFlag() ? myRead : ReadClipper.hardClipAdaptorSequence( myRead ) ); if( postAdapterRead != null && !postAdapterRead.isEmpty() && postAdapterRead.getCigar().getReadLength() > 0 ) { - GATKSAMRecord clippedRead = useLowQualityBasesForAssembly ? postAdapterRead : ReadClipper.hardClipLowQualEnds( postAdapterRead, MIN_TAIL_QUALITY ); + GATKSAMRecord clippedRead; + if (errorCorrectReads) + clippedRead = ReadClipper.hardClipLowQualEnds( postAdapterRead, MIN_TAIL_QUALITY_WITH_ERROR_CORRECTION ); + else if (useLowQualityBasesForAssembly) + clippedRead = postAdapterRead; + else // default case: clip low qual ends of reads + clippedRead= ReadClipper.hardClipLowQualEnds( postAdapterRead, MIN_TAIL_QUALITY ); - // revert soft clips so that we see the alignment start and end assuming the soft clips are all matches - // TODO -- WARNING -- still possibility that unclipping the soft clips will introduce bases that aren't - // TODO -- truly in the extended region, as the unclipped bases might actually include a deletion - // TODO -- w.r.t. the reference. What really needs to happen is that kmers that occur before the - // TODO -- reference haplotype start must be removed - clippedRead = ReadClipper.revertSoftClippedBases(clippedRead); - - // uncomment to remove hard clips from consideration at all - //clippedRead = ReadClipper.hardClipSoftClippedBases(clippedRead); + if ( dontUseSoftClippedBases ) { + // uncomment to remove hard clips from consideration at all + clippedRead = ReadClipper.hardClipSoftClippedBases(clippedRead); + } else { + // revert soft clips so that we see the alignment start and end assuming the soft clips are all matches + // TODO -- WARNING -- still possibility that unclipping the soft clips will introduce bases that aren't + // TODO -- truly in the extended region, as the unclipped bases might actually include a deletion + // TODO -- w.r.t. the reference. What really needs to happen is that kmers that occur before the + // TODO -- reference haplotype start must be removed + clippedRead = ReadClipper.revertSoftClippedBases(clippedRead); + } clippedRead = ReadClipper.hardClipToRegion( clippedRead, activeRegion.getExtendedLoc().getStart(), activeRegion.getExtendedLoc().getStop() ); if( activeRegion.readOverlapsRegion(clippedRead) && clippedRead.getReadLength() > 0 ) { @@ -839,13 +955,14 @@ public class HaplotypeCaller extends ActiveRegionWalker implem } } + activeRegion.clearReads(); activeRegion.addAll(DownsamplingUtils.levelCoverageByPosition(ReadUtils.sortReadsByCoordinate(readsToUse), maxReadsInRegionPerSample, minReadsPerAlignmentStart)); } - private List filterNonPassingReads( final org.broadinstitute.sting.utils.activeregion.ActiveRegion activeRegion ) { - final List readsToRemove = new ArrayList(); + private Set filterNonPassingReads( final org.broadinstitute.sting.utils.activeregion.ActiveRegion activeRegion ) { + final Set readsToRemove = new LinkedHashSet<>(); for( final GATKSAMRecord rec : activeRegion.getReads() ) { - if( rec.getReadLength() < 10 || rec.getMappingQuality() < 20 || BadMateFilter.hasBadMate(rec) || (keepRG != null && !rec.getReadGroup().getId().equals(keepRG)) ) { + if( rec.getReadLength() < MIN_READ_LENGTH || rec.getMappingQuality() < 20 || BadMateFilter.hasBadMate(rec) || (keepRG != null && !rec.getReadGroup().getId().equals(keepRG)) ) { readsToRemove.add(rec); } } @@ -859,12 +976,12 @@ public class HaplotypeCaller extends ActiveRegionWalker implem return getToolkit().getGenomeLocParser().createGenomeLoc(activeRegion.getExtendedLoc().getContig(), padLeft, padRight); } - private Map> splitReadsBySample( final List reads ) { - final Map> returnMap = new HashMap>(); + private Map> splitReadsBySample( final Collection reads ) { + final Map> returnMap = new HashMap<>(); for( final String sample : samplesList) { List readList = returnMap.get( sample ); if( readList == null ) { - readList = new ArrayList(); + readList = new ArrayList<>(); returnMap.put(sample, readList); } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounter.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounter.java index a7194f85f..aad8407dd 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounter.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounter.java @@ -46,9 +46,7 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; +import java.util.*; /** * generic utility class that counts kmers @@ -97,6 +95,20 @@ public class KMerCounter { return countsByKMer.values(); } + /** + * Get kmers that have minCount or greater in this counter + * @param minCount only return kmers with count >= this value + * @return a non-null collection of kmers + */ + public Collection getKmersWithCountsAtLeast(final int minCount) { + final List result = new LinkedList(); + for ( final CountedKmer countedKmer : getCountedKmers() ) { + if ( countedKmer.count >= minCount ) + result.add(countedKmer.kmer); + } + return result; + } + /** * Remove all current counts, resetting the counter to an empty state */ diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/Kmer.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/Kmer.java index 9b0e1ac0a..2e757722b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/Kmer.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/Kmer.java @@ -46,7 +46,11 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; +import com.google.java.contract.Requires; + import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; /** * Fast wrapper for byte[] kmers @@ -149,6 +153,23 @@ public class Kmer { return bases; } + /** + * Backdoor method for fast base peeking: avoids copying like bases() and doesn't modify internal state. + * Intended to be used for fast computation of neighboring kmers + * @return Reference to complete bases stores in this kmer + * WARNING: UNSAFE, caller should NEVER modify bases. Speed/safety tradeoff!! + */ + private byte[] unsafePeekAtBases() { + return bases; + } + /** + * Get a string representation of the bases of this kmer + * @return a non-null string + */ + public String baseString() { + return new String(bases()); + } + /** * The length of this kmer * @return an integer >= 0 @@ -157,6 +178,45 @@ public class Kmer { return length; } + /** + * Gets a set of differing positions and bases from another k-mer, limiting up to a max distance. + * For example, if this = "ACATT" and other = "ACGGT": + * - if maxDistance < 2 then -1 will be returned, since distance between kmers is 2. + * - If maxDistance >=2, then 2 will be returned, and arrays will be filled as follows: + * differingIndeces = {2,3} + * differingBases = {'G','G'} + * @param other Other k-mer to test + * @param maxDistance Maximum distance to search. If this and other k-mers are beyond this Hamming distance, + * search is aborted and a null is returned + * @param differingIndeces Array with indices of differing bytes in array + * @param differingBases Actual differing bases + * @return Set of mappings of form (int->byte), where each elements represents index + * of k-mer array where bases mismatch, and the byte is the base from other kmer. + * If both k-mers differ by more than maxDistance, returns null + */ + @Requires({"other != null","differingIndeces != null","differingBases != null", + "differingIndeces.size>=maxDistance","differingBases.size>=maxDistance"}) + public int getDifferingPositions(final Kmer other, + final int maxDistance, + final int[] differingIndeces, + final byte[] differingBases) { + + + int dist = 0; + if (length == other.length()) { + final byte[] f2 = other.unsafePeekAtBases(); + for (int i=0; i < length; i++) + if(bases[start+i] != f2[i]) { + differingIndeces[dist] = i; + differingBases[dist++] = f2[i]; + if (dist > maxDistance) + return -1; + } + + } + return dist; + } + @Override public String toString() { return "Kmer{" + new String(bases()) + "}"; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java index 8697833a6..4a1a5993a 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java @@ -48,58 +48,104 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import com.google.java.contract.Ensures; import com.google.java.contract.Requires; +import net.sf.samtools.SAMUtils; import org.apache.log4j.Logger; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; -import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.QualityUtils; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; +import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.haplotype.HaplotypeScoreComparator; -import org.broadinstitute.sting.utils.pairhmm.*; +import org.broadinstitute.sting.utils.pairhmm.Log10PairHMM; +import org.broadinstitute.sting.utils.pairhmm.LoglessPairHMM; +import org.broadinstitute.sting.utils.pairhmm.PairHMM; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.sam.ReadUtils; import org.broadinstitute.variant.variantcontext.Allele; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.PrintStream; import java.util.*; public class LikelihoodCalculationEngine { private final static Logger logger = Logger.getLogger(LikelihoodCalculationEngine.class); - private static final double LOG_ONE_HALF = -Math.log10(2.0); private final byte constantGCP; + private final double log10globalReadMismappingRate; private final boolean DEBUG; - private final PairHMM pairHMM; - private final int minReadLength = 20; + + private final PairHMM.HMM_IMPLEMENTATION hmmType; + + private final ThreadLocal pairHMM = new ThreadLocal() { + @Override + protected PairHMM initialValue() { + switch (hmmType) { + case EXACT: return new Log10PairHMM(true); + case ORIGINAL: return new Log10PairHMM(false); + case LOGLESS_CACHING: return new LoglessPairHMM(); + default: + throw new UserException.BadArgumentValue("pairHMM", "Specified pairHMM implementation is unrecognized or incompatible with the HaplotypeCaller. Acceptable options are ORIGINAL, EXACT, CACHING, and LOGLESS_CACHING."); + } + } + }; + + private final static boolean WRITE_LIKELIHOODS_TO_FILE = false; + private final static String LIKELIHOODS_FILENAME = "likelihoods.txt"; + private final PrintStream likelihoodsStream; /** * The expected rate of random sequencing errors for a read originating from its true haplotype. * * For example, if this is 0.01, then we'd expect 1 error per 100 bp. */ - private final double EXPECTED_ERROR_RATE_PER_BASE = 0.02; - - public LikelihoodCalculationEngine( final byte constantGCP, final boolean debug, final PairHMM.HMM_IMPLEMENTATION hmmType ) { - - switch (hmmType) { - case EXACT: - pairHMM = new Log10PairHMM(true); - break; - case ORIGINAL: - pairHMM = new Log10PairHMM(false); - break; - case LOGLESS_CACHING: - pairHMM = new LoglessPairHMM(); - break; - default: - throw new UserException.BadArgumentValue("pairHMM", "Specified pairHMM implementation is unrecognized or incompatible with the HaplotypeCaller. Acceptable options are ORIGINAL, EXACT, CACHING, and LOGLESS_CACHING."); - } + private final static double EXPECTED_ERROR_RATE_PER_BASE = 0.02; + /** + * Create a new LikelihoodCalculationEngine using provided parameters and hmm to do its calculations + * + * @param constantGCP the gap continuation penalty to use with the PairHMM + * @param debug should we emit debugging information during the calculation? + * @param hmmType the type of the HMM to use + * @param log10globalReadMismappingRate the global mismapping probability, in log10(prob) units. A value of + * -3 means that the chance that a read doesn't actually belong at this + * location in the genome is 1 in 1000. The effect of this parameter is + * to cap the maximum likelihood difference between the reference haplotype + * and the best alternative haplotype by -3 log units. So if the best + * haplotype is at -10 and this parameter has a value of -3 then even if the + * reference haplotype gets a score of -100 from the pairhmm it will be + * assigned a likelihood of -13. + */ + public LikelihoodCalculationEngine( final byte constantGCP, final boolean debug, final PairHMM.HMM_IMPLEMENTATION hmmType, final double log10globalReadMismappingRate ) { + this.hmmType = hmmType; this.constantGCP = constantGCP; - DEBUG = debug; + this.DEBUG = debug; + this.log10globalReadMismappingRate = log10globalReadMismappingRate; + + if ( WRITE_LIKELIHOODS_TO_FILE ) { + try { + likelihoodsStream = new PrintStream(new FileOutputStream(new File(LIKELIHOODS_FILENAME))); + } catch ( FileNotFoundException e ) { + throw new RuntimeException(e); + } + } else { + likelihoodsStream = null; + } } + public LikelihoodCalculationEngine() { + this((byte)10, false, PairHMM.HMM_IMPLEMENTATION.LOGLESS_CACHING, -3); + } + + public void close() { + if ( likelihoodsStream != null ) likelihoodsStream.close(); + } + + + /** * Initialize our pairHMM with parameters appropriate to the haplotypes and reads we're going to evaluate * @@ -124,7 +170,7 @@ public class LikelihoodCalculationEngine { } // initialize arrays to hold the probabilities of being in the match, insertion and deletion cases - pairHMM.initialize(X_METRIC_LENGTH, Y_METRIC_LENGTH); + pairHMM.get().initialize(X_METRIC_LENGTH, Y_METRIC_LENGTH); } public Map computeReadLikelihoods( final List haplotypes, final Map> perSampleReadList ) { @@ -132,9 +178,8 @@ public class LikelihoodCalculationEngine { initializePairHMM(haplotypes, perSampleReadList); // Add likelihoods for each sample's reads to our stratifiedReadMap - final Map stratifiedReadMap = new HashMap(); + final Map stratifiedReadMap = new LinkedHashMap<>(); for( final Map.Entry> sampleEntry : perSampleReadList.entrySet() ) { - //if( DEBUG ) { System.out.println("Evaluating sample " + sample + " with " + perSampleReadList.get( sample ).size() + " passing reads"); } // evaluate the likelihood of the reads given those haplotypes final PerReadAlleleLikelihoodMap map = computeReadLikelihoods(haplotypes, sampleEntry.getValue()); @@ -152,17 +197,16 @@ public class LikelihoodCalculationEngine { private PerReadAlleleLikelihoodMap computeReadLikelihoods( final List haplotypes, final List reads) { // first, a little set up to get copies of the Haplotypes that are Alleles (more efficient than creating them each time) final int numHaplotypes = haplotypes.size(); - final Map alleleVersions = new HashMap(numHaplotypes); + final Map alleleVersions = new LinkedHashMap<>(numHaplotypes); + Allele refAllele = null; for ( final Haplotype haplotype : haplotypes ) { - alleleVersions.put(haplotype, Allele.create(haplotype, true)); + final Allele allele = Allele.create(haplotype, true); + alleleVersions.put(haplotype, allele); + if ( haplotype.isReference() ) refAllele = allele; } final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap = new PerReadAlleleLikelihoodMap(); for( final GATKSAMRecord read : reads ) { - if ( read.getReadLength() < minReadLength ) - // don't consider any reads that have a read length < the minimum - continue; - final byte[] overallGCP = new byte[read.getReadLength()]; Arrays.fill( overallGCP, constantGCP ); // Is there a way to derive empirical estimates for this from the data? // NOTE -- must clone anything that gets modified here so we don't screw up future uses of the read @@ -177,14 +221,45 @@ public class LikelihoodCalculationEngine { readQuals[kkk] = ( readQuals[kkk] < (byte) 18 ? QualityUtils.MIN_USABLE_Q_SCORE : readQuals[kkk] ); } + // keep track of the reference likelihood and the best non-ref likelihood + double refLog10l = Double.NEGATIVE_INFINITY; + double bestNonReflog10L = Double.NEGATIVE_INFINITY; + + // iterate over all haplotypes, calculating the likelihood of the read for each haplotype for( int jjj = 0; jjj < numHaplotypes; jjj++ ) { final Haplotype haplotype = haplotypes.get(jjj); final boolean isFirstHaplotype = jjj == 0; - final double log10l = pairHMM.computeReadLikelihoodGivenHaplotypeLog10(haplotype.getBases(), + final double log10l = pairHMM.get().computeReadLikelihoodGivenHaplotypeLog10(haplotype.getBases(), read.getReadBases(), readQuals, readInsQuals, readDelQuals, overallGCP, isFirstHaplotype); + if ( WRITE_LIKELIHOODS_TO_FILE ) { + likelihoodsStream.printf("%s %s %s %s %s %s %f%n", + haplotype.getBaseString(), + new String(read.getReadBases()), + SAMUtils.phredToFastq(readQuals), + SAMUtils.phredToFastq(readInsQuals), + SAMUtils.phredToFastq(readDelQuals), + SAMUtils.phredToFastq(overallGCP), + log10l); + } + + if ( haplotype.isNonReference() ) + bestNonReflog10L = Math.max(bestNonReflog10L, log10l); + else + refLog10l = log10l; + perReadAlleleLikelihoodMap.add(read, alleleVersions.get(haplotype), log10l); } + + // ensure that the reference haplotype is no worse than the best non-ref haplotype minus the global + // mismapping rate. This protects us from the case where the assembly has produced haplotypes + // that are very divergent from reference, but are supported by only one read. In effect + // we capping how badly scoring the reference can be for any read by the chance that the read + // itself just doesn't belong here + final double worstRefLog10Allowed = bestNonReflog10L + log10globalReadMismappingRate; + if ( refLog10l < (worstRefLog10Allowed) ) { + perReadAlleleLikelihoodMap.add(read, refAllele, worstRefLog10Allowed); + } } return perReadAlleleLikelihoodMap; @@ -223,7 +298,7 @@ public class LikelihoodCalculationEngine { // Compute log10(10^x1/2 + 10^x2/2) = log10(10^x1+10^x2)-log10(2) // First term is approximated by Jacobian log with table lookup. haplotypeLikelihood += ReadUtils.getMeanRepresentativeReadCount( entry.getKey() ) * - ( MathUtils.approximateLog10SumLog10(entry.getValue().get(iii_allele), entry.getValue().get(jjj_allele)) + LOG_ONE_HALF ); + ( MathUtils.approximateLog10SumLog10(entry.getValue().get(iii_allele), entry.getValue().get(jjj_allele)) + MathUtils.LOG_ONE_HALF ); } } haplotypeLikelihoodMatrix[iii][jjj] = haplotypeLikelihood; @@ -321,11 +396,11 @@ public class LikelihoodCalculationEngine { if ( haplotypes.size() == 2 ) return haplotypes; // fast path -- we'll always want to use 2 haplotypes // all of the haplotypes that at least one sample called as one of the most likely - final Set selectedHaplotypes = new HashSet(); + final Set selectedHaplotypes = new HashSet<>(); selectedHaplotypes.add(findReferenceHaplotype(haplotypes)); // ref is always one of the selected // our annoying map from allele -> haplotype - final Map allele2Haplotype = new HashMap(); + final Map allele2Haplotype = new HashMap<>(); for ( final Haplotype h : haplotypes ) { h.setScore(h.isReference() ? Double.MAX_VALUE : 0.0); // set all of the scores to 0 (lowest value) for all non-ref haplotypes allele2Haplotype.put(Allele.create(h, h.isReference()), h); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java index 4c0483ad6..c889d7995 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java @@ -46,28 +46,345 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; +import com.google.java.contract.Ensures; +import com.google.java.contract.Requires; +import net.sf.samtools.Cigar; +import net.sf.samtools.CigarElement; +import net.sf.samtools.CigarOperator; +import org.apache.commons.lang.ArrayUtils; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.activeregion.ActiveRegion; +import org.broadinstitute.sting.utils.sam.AlignmentUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.sam.ReadUtils; +import org.broadinstitute.sting.utils.smithwaterman.SWPairwiseAlignment; +import org.broadinstitute.sting.utils.smithwaterman.SWParameterSet; +import org.broadinstitute.variant.variantcontext.Allele; import org.broadinstitute.variant.variantcontext.VariantContext; +import java.io.File; import java.io.PrintStream; -import java.util.List; +import java.util.*; /** - * Created by IntelliJ IDEA. + * Abstract base class for all HaplotypeCaller assemblers + * * User: ebanks * Date: Mar 14, 2011 */ public abstract class LocalAssemblyEngine { - public static final byte DEFAULT_MIN_BASE_QUALITY_TO_USE = (byte) 8; + private final static Logger logger = Logger.getLogger(LocalAssemblyEngine.class); + + /** + * If false, we will only write out a region around the reference source + */ + private final static boolean PRINT_FULL_GRAPH_FOR_DEBUGGING = true; + public static final byte DEFAULT_MIN_BASE_QUALITY_TO_USE = (byte) 8; + private static final int MIN_HAPLOTYPE_REFERENCE_LENGTH = 30; + + protected final int numBestHaplotypesPerGraph; + + protected boolean debug = false; + protected boolean allowCyclesInKmerGraphToGeneratePaths = false; + protected boolean debugGraphTransformations = false; + protected boolean recoverDanglingTails = true; - protected PrintStream graphWriter = null; protected byte minBaseQualityToUseInAssembly = DEFAULT_MIN_BASE_QUALITY_TO_USE; protected int pruneFactor = 2; protected boolean errorCorrectKmers = false; - protected LocalAssemblyEngine() { } + private PrintStream graphWriter = null; + + /** + * Create a new LocalAssemblyEngine with all default parameters, ready for use + * @param numBestHaplotypesPerGraph the number of haplotypes to generate for each assembled graph + */ + protected LocalAssemblyEngine(final int numBestHaplotypesPerGraph) { + if ( numBestHaplotypesPerGraph < 1 ) throw new IllegalArgumentException("numBestHaplotypesPerGraph should be >= 1 but got " + numBestHaplotypesPerGraph); + this.numBestHaplotypesPerGraph = numBestHaplotypesPerGraph; + } + + /** + * Main subclass function: given reads and a reference haplotype give us graphs to use for constructing + * non-reference haplotypes. + * + * @param reads the reads we're going to assemble + * @param refHaplotype the reference haplotype + * @return a non-null list of reads + */ + protected abstract List assemble(List reads, Haplotype refHaplotype, List activeAlleleHaplotypes); + + protected List assemble(List reads, Haplotype refHaplotype) { + return assemble(reads, refHaplotype, Collections.emptyList()); + } + + /** + * Main entry point into the assembly engine. Build a set of deBruijn graphs out of the provided reference sequence and list of reads + * @param activeRegion ActiveRegion object holding the reads which are to be used during assembly + * @param refHaplotype reference haplotype object + * @param fullReferenceWithPadding byte array holding the reference sequence with padding + * @param refLoc GenomeLoc object corresponding to the reference sequence with padding + * @param activeAllelesToGenotype the alleles to inject into the haplotypes during GGA mode + * @param readErrorCorrector a ReadErrorCorrector object, if read are to be corrected before assembly. Can be null if no error corrector is to be used. + * @return a non-empty list of all the haplotypes that are produced during assembly + */ + public List runLocalAssembly(final ActiveRegion activeRegion, + final Haplotype refHaplotype, + final byte[] fullReferenceWithPadding, + final GenomeLoc refLoc, + final List activeAllelesToGenotype, + final ReadErrorCorrector readErrorCorrector) { + if( activeRegion == null ) { throw new IllegalArgumentException("Assembly engine cannot be used with a null ActiveRegion."); } + if( refHaplotype == null ) { throw new IllegalArgumentException("Reference haplotype cannot be null."); } + if( fullReferenceWithPadding.length != refLoc.size() ) { throw new IllegalArgumentException("Reference bases and reference loc must be the same size."); } + if( pruneFactor < 0 ) { throw new IllegalArgumentException("Pruning factor cannot be negative"); } + + // create the list of artificial haplotypes that should be added to the graph for GGA mode + final List activeAlleleHaplotypes = createActiveAlleleHaplotypes(refHaplotype, activeAllelesToGenotype, activeRegion.getExtendedLoc()); + + + // error-correct reads before clipping low-quality tails: some low quality bases might be good and we want to recover them + final List correctedReads; + if (readErrorCorrector != null) { + // now correct all reads in active region after filtering/downsampling + // Note that original reads in active region are NOT modified by default, since they will be used later for GL computation, + // and we only want the read-error corrected reads for graph building. + readErrorCorrector.addReadsToKmers(activeRegion.getReads()); + correctedReads = new ArrayList<>(readErrorCorrector.correctReads(activeRegion.getReads())); + } + else correctedReads = activeRegion.getReads(); + + // create the graphs by calling our subclass assemble method + final List graphs = assemble(correctedReads, refHaplotype, activeAlleleHaplotypes); + + // do some QC on the graphs + for ( final SeqGraph graph : graphs ) { sanityCheckGraph(graph, refHaplotype); } + + // print the graphs if the appropriate debug option has been turned on + if ( graphWriter != null ) { printGraphs(graphs); } + + // find the best paths in the graphs and return them as haplotypes + return findBestPaths( graphs, refHaplotype, refLoc, activeRegion.getExtendedLoc() ); + } + + /** + * Create the list of artificial GGA-mode haplotypes by injecting each of the provided alternate alleles into the reference haplotype + * @param refHaplotype the reference haplotype + * @param activeAllelesToGenotype the list of alternate alleles in VariantContexts + * @param activeRegionWindow the window containing the reference haplotype + * @return a non-null list of haplotypes + */ + private List createActiveAlleleHaplotypes(final Haplotype refHaplotype, final List activeAllelesToGenotype, final GenomeLoc activeRegionWindow) { + final Set returnHaplotypes = new LinkedHashSet<>(); + final int activeRegionStart = refHaplotype.getAlignmentStartHapwrtRef(); + + for( final VariantContext compVC : activeAllelesToGenotype ) { + for( final Allele compAltAllele : compVC.getAlternateAlleles() ) { + final Haplotype insertedRefHaplotype = refHaplotype.insertAllele(compVC.getReference(), compAltAllele, activeRegionStart + compVC.getStart() - activeRegionWindow.getStart(), compVC.getStart()); + if( insertedRefHaplotype != null ) { // can be null if the requested allele can't be inserted into the haplotype + returnHaplotypes.add(insertedRefHaplotype); + } + } + } + + return new ArrayList<>(returnHaplotypes); + } + + @Ensures({"result.contains(refHaplotype)"}) + protected List findBestPaths(final List graphs, final Haplotype refHaplotype, final GenomeLoc refLoc, final GenomeLoc activeRegionWindow) { + // add the reference haplotype separately from all the others to ensure that it is present in the list of haplotypes + final Set returnHaplotypes = new LinkedHashSet<>(); + returnHaplotypes.add( refHaplotype ); + + final int activeRegionStart = refHaplotype.getAlignmentStartHapwrtRef(); + + for( final SeqGraph graph : graphs ) { + final SeqVertex source = graph.getReferenceSourceVertex(); + final SeqVertex sink = graph.getReferenceSinkVertex(); + if ( source == null || sink == null ) throw new IllegalArgumentException("Both source and sink cannot be null but got " + source + " and sink " + sink + " for graph "+ graph); + + final KBestPaths pathFinder = new KBestPaths<>(allowCyclesInKmerGraphToGeneratePaths); + for ( final Path path : pathFinder.getKBestPaths(graph, numBestHaplotypesPerGraph, source, sink) ) { + Haplotype h = new Haplotype( path.getBases() ); + if( !returnHaplotypes.contains(h) ) { + final Cigar cigar = path.calculateCigar(refHaplotype.getBases()); + + if ( cigar == null ) { + // couldn't produce a meaningful alignment of haplotype to reference, fail quietly + continue; + } else if( cigar.isEmpty() ) { + throw new IllegalStateException("Smith-Waterman alignment failure. Cigar = " + cigar + " with reference length " + cigar.getReferenceLength() + + " but expecting reference length of " + refHaplotype.getCigar().getReferenceLength()); + } else if ( pathIsTooDivergentFromReference(cigar) || cigar.getReferenceLength() < MIN_HAPLOTYPE_REFERENCE_LENGTH ) { + // N cigar elements means that a bubble was too divergent from the reference so skip over this path + continue; + } else if( cigar.getReferenceLength() != refHaplotype.getCigar().getReferenceLength() ) { // SW failure + throw new IllegalStateException("Smith-Waterman alignment failure. Cigar = " + cigar + " with reference length " + + cigar.getReferenceLength() + " but expecting reference length of " + refHaplotype.getCigar().getReferenceLength() + + " ref = " + refHaplotype + " path " + new String(path.getBases())); + } + + h.setCigar(cigar); + h.setAlignmentStartHapwrtRef(activeRegionStart); + h.setScore(path.getScore()); + returnHaplotypes.add(h); + + if ( debug ) + logger.info("Adding haplotype " + h.getCigar() + " from graph with kmer " + graph.getKmerSize()); + } + } + } + + // add genome locs to the haplotypes + for ( final Haplotype h : returnHaplotypes ) h.setGenomeLocation(activeRegionWindow); + + if ( returnHaplotypes.size() < returnHaplotypes.size() ) + logger.info("Found " + returnHaplotypes.size() + " candidate haplotypes of " + returnHaplotypes.size() + " possible combinations to evaluate every read against at " + refLoc); + + if( debug ) { + if( returnHaplotypes.size() > 1 ) { + logger.info("Found " + returnHaplotypes.size() + " candidate haplotypes of " + returnHaplotypes.size() + " possible combinations to evaluate every read against."); + } else { + logger.info("Found only the reference haplotype in the assembly graph."); + } + for( final Haplotype h : returnHaplotypes ) { + logger.info( h.toString() ); + logger.info( "> Cigar = " + h.getCigar() + " : " + h.getCigar().getReferenceLength() + " score " + h.getScore() + " ref " + h.isReference()); + } + } + + return new ArrayList<>(returnHaplotypes); + } + + /** + * We use CigarOperator.N as the signal that an incomplete or too divergent bubble was found during bubble traversal + * @param c the cigar to test + * @return true if we should skip over this path + */ + @Requires("c != null") + private boolean pathIsTooDivergentFromReference( final Cigar c ) { + for( final CigarElement ce : c.getCigarElements() ) { + if( ce.getOperator().equals(CigarOperator.N) ) { + return true; + } + } + return false; + } + + /** + * Print graph to file if debugGraphTransformations is enabled + * @param graph the graph to print + * @param file the destination file + */ + protected void printDebugGraphTransform(final BaseGraph graph, final File file) { + if ( debugGraphTransformations ) { + if ( PRINT_FULL_GRAPH_FOR_DEBUGGING ) + graph.printGraph(file, pruneFactor); + else + graph.subsetToRefSource().printGraph(file, pruneFactor); + } + } + + protected SeqGraph cleanupSeqGraph(final SeqGraph seqGraph) { + printDebugGraphTransform(seqGraph, new File("sequenceGraph.1.dot")); + + // the very first thing we need to do is zip up the graph, or pruneGraph will be too aggressive + seqGraph.zipLinearChains(); + printDebugGraphTransform(seqGraph, new File("sequenceGraph.2.zipped.dot")); + + // now go through and prune the graph, removing vertices no longer connected to the reference chain + // IMPORTANT: pruning must occur before we call simplifyGraph, as simplifyGraph adds 0 weight + // edges to maintain graph connectivity. + seqGraph.pruneGraph(pruneFactor); + seqGraph.removeVerticesNotConnectedToRefRegardlessOfEdgeDirection(); + + printDebugGraphTransform(seqGraph, new File("sequenceGraph.3.pruned.dot")); + seqGraph.simplifyGraph(); + printDebugGraphTransform(seqGraph, new File("sequenceGraph.4.merged.dot")); + + // The graph has degenerated in some way, so the reference source and/or sink cannot be id'd. Can + // happen in cases where for example the reference somehow manages to acquire a cycle, or + // where the entire assembly collapses back into the reference sequence. + if ( seqGraph.getReferenceSourceVertex() == null || seqGraph.getReferenceSinkVertex() == null ) + return null; + + seqGraph.removePathsNotConnectedToRef(); + seqGraph.simplifyGraph(); + if ( seqGraph.vertexSet().size() == 1 ) { + // we've perfectly assembled into a single reference haplotype, add a empty seq vertex to stop + // the code from blowing up. + // TODO -- ref properties should really be on the vertices, not the graph itself + final SeqVertex complete = seqGraph.vertexSet().iterator().next(); + final SeqVertex dummy = new SeqVertex(""); + seqGraph.addVertex(dummy); + seqGraph.addEdge(complete, dummy, new BaseEdge(true, 0)); + } + printDebugGraphTransform(seqGraph, new File("sequenceGraph.5.final.dot")); + + return seqGraph; + } + + /** + * Perform general QC on the graph to make sure something hasn't gone wrong during assembly + * @param graph the graph to check + * @param refHaplotype the reference haplotype + */ + private void sanityCheckGraph(final BaseGraph graph, final Haplotype refHaplotype) { + sanityCheckReferenceGraph(graph, refHaplotype); + } + + /** + * Make sure the reference sequence is properly represented in the provided graph + * + * @param graph the graph to check + * @param refHaplotype the reference haplotype + */ + private void sanityCheckReferenceGraph(final BaseGraph graph, final Haplotype refHaplotype) { + if( graph.getReferenceSourceVertex() == null ) { + throw new IllegalStateException("All reference graphs must have a reference source vertex."); + } + if( graph.getReferenceSinkVertex() == null ) { + throw new IllegalStateException("All reference graphs must have a reference sink vertex."); + } + if( !Arrays.equals(graph.getReferenceBytes(graph.getReferenceSourceVertex(), graph.getReferenceSinkVertex(), true, true), refHaplotype.getBases()) ) { + throw new IllegalStateException("Mismatch between the reference haplotype and the reference assembly graph path. for graph " + graph + + " graph = " + new String(graph.getReferenceBytes(graph.getReferenceSourceVertex(), graph.getReferenceSinkVertex(), true, true)) + + " haplotype = " + new String(refHaplotype.getBases()) + ); + } + } + + /** + * Print the generated graphs to the graphWriter + * @param graphs a non-null list of graphs to print out + */ + private void printGraphs(final List graphs) { + final int writeFirstGraphWithSizeSmallerThan = 50; + + graphWriter.println("digraph assemblyGraphs {"); + for( final SeqGraph graph : graphs ) { + if ( debugGraphTransformations && graph.getKmerSize() >= writeFirstGraphWithSizeSmallerThan ) { + logger.info("Skipping writing of graph with kmersize " + graph.getKmerSize()); + continue; + } + + graph.printGraph(graphWriter, false, pruneFactor); + + if ( debugGraphTransformations ) + break; + } + + graphWriter.println("}"); + } + + // ----------------------------------------------------------------------------------------------- + // + // getter / setter routines for generic assembler properties + // + // ----------------------------------------------------------------------------------------------- public int getPruneFactor() { return pruneFactor; @@ -85,10 +402,6 @@ public abstract class LocalAssemblyEngine { this.errorCorrectKmers = errorCorrectKmers; } - public PrintStream getGraphWriter() { - return graphWriter; - } - public void setGraphWriter(PrintStream graphWriter) { this.graphWriter = graphWriter; } @@ -101,5 +414,35 @@ public abstract class LocalAssemblyEngine { this.minBaseQualityToUseInAssembly = minBaseQualityToUseInAssembly; } - public abstract List runLocalAssembly(ActiveRegion activeRegion, Haplotype refHaplotype, byte[] fullReferenceWithPadding, GenomeLoc refLoc, List activeAllelesToGenotype); + public boolean isDebug() { + return debug; + } + + public void setDebug(boolean debug) { + this.debug = debug; + } + + public boolean isAllowCyclesInKmerGraphToGeneratePaths() { + return allowCyclesInKmerGraphToGeneratePaths; + } + + public void setAllowCyclesInKmerGraphToGeneratePaths(boolean allowCyclesInKmerGraphToGeneratePaths) { + this.allowCyclesInKmerGraphToGeneratePaths = allowCyclesInKmerGraphToGeneratePaths; + } + + public boolean isDebugGraphTransformations() { + return debugGraphTransformations; + } + + public void setDebugGraphTransformations(boolean debugGraphTransformations) { + this.debugGraphTransformations = debugGraphTransformations; + } + + public boolean isRecoverDanglingTails() { + return recoverDanglingTails; + } + + public void setRecoverDanglingTails(boolean recoverDanglingTails) { + this.recoverDanglingTails = recoverDanglingTails; + } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrector.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrector.java new file mode 100644 index 000000000..e1471ab33 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrector.java @@ -0,0 +1,526 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller; + +import com.google.java.contract.Requires; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.utils.BaseUtils; +import org.broadinstitute.sting.utils.QualityUtils; +import org.broadinstitute.sting.utils.clipping.ReadClipper; +import org.broadinstitute.sting.utils.collections.Pair; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; + +import java.util.*; + +/** + * Utility class that error-corrects reads. + * Main idea: An error in a read will appear as a bubble in a k-mer (de Bruijn) graph and such bubble will have very low multiplicity. + * Hence, read errors will appear as "sparse" kmers with very little support. + * Historically, the most common approach to error-correct reads before assembly has been to first compute the kmer spectrum of the reads, + * defined as the kmer composition of a set of reads along with the multiplicity of each kmer. + * First-generation correctors like the Euler corrector (Pevzner 2001) mapped low frequency kmers (kmers appearing say below N times) + * into high frequency ones that lied within a certain Hamming or edit distance. + * This is doable, but has some drawbacks: + * - Kmers used for error correction become tied to kmers used for graph building. + * - Hence, large kmers (desireable for graph building because they can resolve repeats better) are a hindrance for error correction, + * because they are seen less often. + * - After error correction, there is no guarantee that a sequence of kmers corresponds to an "actual" read. + * + * An error-corrected set of reads also makes a much smoother graph without the need to resolving so many bubbles. + * + * Idea hence is to correct reads based on their kmer content, but in a context independent from graph building. + * In order to do this, the following steps are taken: + * - The k-mer spectrum of a set of reads in computed. However, we are at freedom to choose the most convenient k-mer size (typicially around + * read length /2). + * - We partition the set of observed k-mers into "solid" kmers which have multiplicity > M, and "insolid" ones otherwise (Pevzner 2001). + * + * - Main idea of the algorithm is to try to substitute a sequence of bases in a read by a sequence better supported by kmers. + * - For each "unsolid" kmer observed in reads, we try to find a "solid" kmer within a maximum Hamming distance. + * - If such solid kmer exists, then this unsolid kmer is "correctable", otherwise, uncorrectable. + * - For each read, then: + * -- Walk through read and visit all kmers. + * -- If kmer is solid, continue to next kmer. + * -- If not, and if it's correctable (i.e. there exists a mapping from an unsolid kmer to a solid kmer within a given Hamming distance), + * add the bases and offsets corresponding to differing positions between unsolid and solid kmer to correction list. + * -- At the end, each base in read will have a list of corrections associated with it. We can then choose to correct or not. + * If read has only consistent corrections, then we can correct base to common base in corrections. + * + * TODO: + * todo Q: WHAT QUALITY TO USE?? + * todo how do we deal with mate pairs? + * + * + + + */ +public class ReadErrorCorrector { + private final static Logger logger = Logger.getLogger(ReadErrorCorrector.class); + /** + * A map of for each kmer to its num occurrences in addKmers + */ + KMerCounter countsByKMer; + + Map kmerCorrectionMap = new HashMap<>(); + Map> kmerDifferingBases = new HashMap<>(); + private final int kmerLength; + private final boolean debug; + private final boolean trimLowQualityBases; + private final byte minTailQuality; + private final int maxMismatchesToCorrect; + private final byte qualityOfCorrectedBases; + private final int maxObservationsForKmerToBeCorrectable; + private final int maxHomopolymerLengthInRegion; + private final int minObservationsForKmerToBeSolid; + + // default values, for debugging + private final static boolean doInplaceErrorCorrection = false; // currently not used, since we want corrected reads to be used only for assembly + private final static int MAX_MISMATCHES_TO_CORRECT = 2; + private final static byte QUALITY_OF_CORRECTED_BASES = 30; // what's a reasonable value here? + private final static int MAX_OBSERVATIONS_FOR_KMER_TO_BE_CORRECTABLE = 1; + private final static boolean TRIM_LOW_QUAL_TAILS = false; + private final static boolean DONT_CORRECT_IN_LONG_HOMOPOLYMERS = false; + private final static int MAX_HOMOPOLYMER_THRESHOLD = 12; + + // debug counter structure + private final ReadErrorCorrectionStats readErrorCorrectionStats = new ReadErrorCorrectionStats(); + + /** + * Create a new kmer corrector + * + * @param kmerLength the length of kmers we'll be counting to error correct, must be >= 1 + * @param maxMismatchesToCorrect e >= 0 + * @param qualityOfCorrectedBases Bases to be corrected will be assigned this quality + */ + public ReadErrorCorrector(final int kmerLength, + final int maxMismatchesToCorrect, + final int maxObservationsForKmerToBeCorrectable, + final byte qualityOfCorrectedBases, + final int minObservationsForKmerToBeSolid, + final boolean trimLowQualityBases, + final byte minTailQuality, + final boolean debug, + final byte[] fullReferenceWithPadding) { + if ( kmerLength < 1 ) throw new IllegalArgumentException("kmerLength must be > 0 but got " + kmerLength); + if ( maxMismatchesToCorrect < 1 ) + throw new IllegalArgumentException("maxMismatchesToCorrect must be >= 1 but got " + maxMismatchesToCorrect); + if ( qualityOfCorrectedBases < 2 || qualityOfCorrectedBases > QualityUtils.MAX_REASONABLE_Q_SCORE) + throw new IllegalArgumentException("qualityOfCorrectedBases must be >= 2 and <= MAX_REASONABLE_Q_SCORE but got " + qualityOfCorrectedBases); + + countsByKMer = new KMerCounter(kmerLength); + this.kmerLength = kmerLength; + this.maxMismatchesToCorrect = maxMismatchesToCorrect; + this.qualityOfCorrectedBases = qualityOfCorrectedBases; + this.minObservationsForKmerToBeSolid = minObservationsForKmerToBeSolid; + this.trimLowQualityBases = trimLowQualityBases; + this.minTailQuality = minTailQuality; + this.debug = debug; + this.maxObservationsForKmerToBeCorrectable = maxObservationsForKmerToBeCorrectable; + + // when region has long homopolymers, we may want not to correct reads, since assessment is complicated, + // so we may decide to skip error correction in these regions + maxHomopolymerLengthInRegion = computeMaxHLen(fullReferenceWithPadding); + } + + /** + * Simple constructor with sensible defaults + * @param kmerLength K-mer length for error correction (not necessarily the same as for assembly graph) + * @param minTailQuality Minimum tail quality: remaining bases with Q's below this value are hard-clipped after correction + * @param debug Output debug information + */ + public ReadErrorCorrector(final int kmerLength, final byte minTailQuality, final int minObservationsForKmerToBeSolid, final boolean debug,final byte[] fullReferenceWithPadding) { + this(kmerLength, MAX_MISMATCHES_TO_CORRECT, MAX_OBSERVATIONS_FOR_KMER_TO_BE_CORRECTABLE, QUALITY_OF_CORRECTED_BASES, minObservationsForKmerToBeSolid, TRIM_LOW_QUAL_TAILS, minTailQuality, debug,fullReferenceWithPadding); + } + + /** + * Main entry routine to add all kmers in a read to the read map counter + * @param read Read to add bases + */ + @Requires("read != null") + protected void addReadKmers(final GATKSAMRecord read) { + if (DONT_CORRECT_IN_LONG_HOMOPOLYMERS && maxHomopolymerLengthInRegion > MAX_HOMOPOLYMER_THRESHOLD) + return; + + final byte[] readBases = read.getReadBases(); + for (int offset = 0; offset <= readBases.length-kmerLength; offset++ ) { + countsByKMer.addKmer(new Kmer(readBases,offset,kmerLength),1); + + } + } + + /** + * Correct a collection of reads based on stored k-mer counts + * @param reads + */ + public final List correctReads(final Collection reads) { + + final List correctedReads = new ArrayList<>(reads.size()); + if (DONT_CORRECT_IN_LONG_HOMOPOLYMERS && maxHomopolymerLengthInRegion > MAX_HOMOPOLYMER_THRESHOLD) { + // just copy reads into output and exit + correctedReads.addAll(reads); + } + else { + computeKmerCorrectionMap(); + for (final GATKSAMRecord read: reads) { + final GATKSAMRecord correctedRead = correctRead(read); + if (trimLowQualityBases) + correctedReads.add(ReadClipper.hardClipLowQualEnds(correctedRead, minTailQuality)); + else + correctedReads.add(correctedRead); + } + if (debug) { + logger.info("Number of corrected bases:"+readErrorCorrectionStats.numBasesCorrected); + logger.info("Number of corrected reads:"+readErrorCorrectionStats.numReadsCorrected); + logger.info("Number of skipped reads:"+readErrorCorrectionStats.numReadsUncorrected); + logger.info("Number of solid kmers:"+readErrorCorrectionStats.numSolidKmers); + logger.info("Number of corrected kmers:"+readErrorCorrectionStats.numCorrectedKmers); + logger.info("Number of uncorrectable kmers:"+readErrorCorrectionStats.numUncorrectableKmers); + } + } + return correctedReads; + } + + + /** + * Do actual read correction based on k-mer map. First, loop through stored k-mers to get a list of possible corrections + * for each position in the read. Then correct read based on all possible consistent corrections. + * @param inputRead Read to correct + * @return Corrected read (can be same reference as input if doInplaceErrorCorrection is set) + */ + @Requires("inputRead != null") + private GATKSAMRecord correctRead(final GATKSAMRecord inputRead) { + // no support for reduced reads (which shouldn't need to be error-corrected anyway!) + if (inputRead.isReducedRead()) + return inputRead; + + // do actual correction + boolean corrected = false; + final byte[] correctedBases = inputRead.getReadBases(); + final byte[] correctedQuals = inputRead.getBaseQualities(); + + // array to store list of possible corrections for read + final CorrectionSet correctionSet = buildCorrectionMap(correctedBases); + + for (int offset = 0; offset < correctedBases.length; offset++) { + final Byte b = correctionSet.getConsensusCorrection(offset); + if (b != null && b != correctedBases[offset]) { + correctedBases[offset] = b; + correctedQuals[offset] = qualityOfCorrectedBases; + corrected = true; + } + readErrorCorrectionStats.numBasesCorrected++; + } + + if (corrected) { + readErrorCorrectionStats.numReadsCorrected++; + if (doInplaceErrorCorrection) { + inputRead.setReadBases(correctedBases); + inputRead.setBaseQualities(correctedQuals); + return inputRead; + } + else { + GATKSAMRecord correctedRead = new GATKSAMRecord(inputRead); + + // do the actual correction + // todo - do we need to clone anything else from read? + correctedRead.setBaseQualities(inputRead.getBaseQualities()); + correctedRead.setIsStrandless(inputRead.isStrandless()); + correctedRead.setReadBases(inputRead.getReadBases()); + correctedRead.setReadString(inputRead.getReadString()); + correctedRead.setReadGroup(inputRead.getReadGroup()); + return correctedRead; + } + } + else { + readErrorCorrectionStats.numReadsUncorrected++; + return inputRead; + } + } + + /** + * Build correction map for each of the bases in read. + * For each of the constituent kmers in read: + * a) See whether the kmer has been mapped to a corrected kmer. + * b) If so, get list of differing positions and corresponding bases. + * c) Add then list of new bases to index in correction list. + * Correction list is of read size, and holds a list of bases to correct. + * @param correctedBases Bases to attempt to correct + * @return CorrectionSet object. + */ + @Requires("correctedBases != null") + private CorrectionSet buildCorrectionMap(final byte[] correctedBases) { + // array to store list of possible corrections for read + final CorrectionSet correctionSet = new CorrectionSet(correctedBases.length); + + for (int offset = 0; offset <= correctedBases.length-kmerLength; offset++ ) { + final Kmer kmer = new Kmer(correctedBases,offset,kmerLength); + final Kmer newKmer = kmerCorrectionMap.get(kmer); + if (newKmer != null && !newKmer.equals(kmer)){ + final Pair differingPositions = kmerDifferingBases.get(kmer); + final int[] differingIndeces = differingPositions.first; + final byte[] differingBases = differingPositions.second; + + for (int k=0; k < differingIndeces.length; k++) { + // get list of differing positions for corrected kmer + // for each of these, add correction candidate to correction set + correctionSet.add(offset + differingIndeces[k],differingBases[k]); + } + } + } + return correctionSet; + } + + + /** + * Top-level entry point that adds a collection of reads to our kmer list. + * For each read in list, its constituent kmers will be logged in our kmer table. + * @param reads + */ + @Requires("reads != null") + public void addReadsToKmers(final Collection reads) { + for (final GATKSAMRecord read: reads) + addReadKmers(read); + + if (debug) + for ( final KMerCounter.CountedKmer countedKmer: countsByKMer.getCountedKmers() ) + logger.info(String.format("%s\t%d\n", countedKmer.kmer, countedKmer.count)); + } + + + /** + * For each kmer we've seen, do the following: + * a) If kmer count > threshold1, this kmer is good, so correction map will be to itself. + * b) If kmer count <= threshold2, this kmer is bad. + * In that case, loop through all other kmers. If kmer is good, compute distance, and get minimal distance. + * If such distance is < some threshold, map to this kmer, and record differing positions and bases. + * + */ + private void computeKmerCorrectionMap() { + for (final KMerCounter.CountedKmer storedKmer : countsByKMer.getCountedKmers()) { + if (storedKmer.getCount() >= minObservationsForKmerToBeSolid) { + // this kmer is good: map to itself + kmerCorrectionMap.put(storedKmer.getKmer(),storedKmer.getKmer()); + kmerDifferingBases.put(storedKmer.getKmer(),new Pair<>(new int[0],new byte[0])); // dummy empty array + readErrorCorrectionStats.numSolidKmers++; + } + else if (storedKmer.getCount() <= maxObservationsForKmerToBeCorrectable) { + // loop now thru all other kmers to find nearest neighbor + final Pair> nearestNeighbor = findNearestNeighbor(storedKmer.getKmer(),countsByKMer,maxMismatchesToCorrect); + + // check if nearest neighbor lies in a close vicinity. If so, log the new bases and the correction map + if (nearestNeighbor != null) { // ok, found close neighbor + kmerCorrectionMap.put(storedKmer.getKmer(), nearestNeighbor.first); + kmerDifferingBases.put(storedKmer.getKmer(), nearestNeighbor.second); + readErrorCorrectionStats.numCorrectedKmers++; +// if (debug) +// logger.info("Original kmer:"+storedKmer + "\tCorrected kmer:"+nearestNeighbor.first+"\tDistance:"+dist); + } + else + readErrorCorrectionStats.numUncorrectableKmers++; + + } + } + } + + /** + * Finds nearest neighbor of a given k-mer, among a list of counted K-mers, up to a given distance. + * If many k-mers share same closest distance, an arbitrary k-mer is picked + * @param kmer K-mer of interest + * @param countsByKMer KMerCounter storing set of counted k-mers (may include kmer of interest) + * @param maxDistance Maximum distance to search + * @return Pair of values: closest K-mer in Hamming distance and list of differing bases. + * If no neighbor can be found up to given distance, returns null + */ + @Requires({"kmer != null", "countsByKMer != null","maxDistance >= 1"}) + private Pair> findNearestNeighbor(final Kmer kmer, + final KMerCounter countsByKMer, + final int maxDistance) { + int minimumDistance = Integer.MAX_VALUE; + Kmer closestKmer = null; + + final int[] differingIndeces = new int[maxDistance+1]; + final byte[] differingBases = new byte[maxDistance+1]; + + final int[] closestDifferingIndices = new int[maxDistance+1]; + final byte[] closestDifferingBases = new byte[maxDistance+1]; + + for (final KMerCounter.CountedKmer candidateKmer : countsByKMer.getCountedKmers()) { + // skip if candidate set includes test kmer + if (candidateKmer.getKmer().equals(kmer)) + continue; + + final int hammingDistance = kmer.getDifferingPositions(candidateKmer.getKmer(), maxDistance, differingIndeces, differingBases); + if (hammingDistance < 0) // can't compare kmer? skip + continue; + + if (hammingDistance < minimumDistance) { + minimumDistance = hammingDistance; + closestKmer = candidateKmer.getKmer(); + System.arraycopy(differingBases,0,closestDifferingBases,0,differingBases.length); + System.arraycopy(differingIndeces,0,closestDifferingIndices,0,differingIndeces.length); + } + } + return new Pair<>(closestKmer, new Pair<>(closestDifferingIndices,closestDifferingBases)); + } + + + /** + * experimental function to compute max homopolymer length in a given reference context + * @param fullReferenceWithPadding Reference context of interest + * @return Max homopolymer length in region + */ + @Requires("fullReferenceWithPadding != null") + private static int computeMaxHLen(final byte[] fullReferenceWithPadding) { + + int leftRun = 1; + int maxRun = 1; + for ( int i = 1; i < fullReferenceWithPadding.length; i++) { + if ( fullReferenceWithPadding[i] == fullReferenceWithPadding[i-1] ) + leftRun++; + else + leftRun = 1; + } + if (leftRun > maxRun) + maxRun = leftRun; + + + return maxRun; + } + + private static final class ReadErrorCorrectionStats { + public int numReadsCorrected; + public int numReadsUncorrected; + public int numBasesCorrected; + public int numSolidKmers; + public int numUncorrectableKmers; + public int numCorrectedKmers; + } + + /** + * Wrapper utility class that holds, for each position in read, a list of bytes representing candidate corrections. + * So, a read ACAGT where the middle A has found to be errorful might look like: + * 0: {} + * 1: {} + * 2: {'C','C','C'} + * 3: {} + * 4: {} + * + * It's up to the method getConsensusCorrection() to decide how to use the correction sets for each position. + * By default, only strict consensus is allowed right now. + * + */ + protected static class CorrectionSet { + private final int size; + private ArrayList> corrections; + + /** + * Main class constructor. + * @param size Size of correction set, needs to be set equal to the read being corrected + */ + public CorrectionSet(final int size) { + this.size = size; + corrections = new ArrayList<>(size); + for (int k=0; k < size; k++) + corrections.add(k,new ArrayList()); + } + + /** + * Add a base to this correction set at a particular offset, measured from the start of the read + * @param offset Offset from start of read + * @param base base to be added to list of corrections at this offset + */ + public void add(final int offset, final byte base) { + if (offset >= size || offset < 0) + throw new IllegalStateException("Bad entry into CorrectionSet: offset > size"); + if (!BaseUtils.isRegularBase(base)) + return; // no irregular base correction + + final List storedBytes = corrections.get(offset); + storedBytes.add(base); + } + + /** + * Get list of corrections for a particular offset + * @param offset Offset of interest + * @return List of bases representing possible corrections at this offset + */ + public List get(final int offset) { + if (offset >= size || offset < 0) + throw new IllegalArgumentException("Illegal call of CorrectionSet.get(): offset must be < size"); + return corrections.get(offset); + } + + /** + * Get consensus correction for a particular offset. In this implementation, it just boils down to seeing if + * byte list associated with offset has identical values. If so, return this base, otherwise return null. + * @param offset + * @return Consensus base, or null if no consensus possible. + */ + public Byte getConsensusCorrection(final int offset) { + if (offset >= size || offset < 0) + throw new IllegalArgumentException("Illegal call of CorrectionSet.getConsensusCorrection(): offset must be < size"); + final List storedBytes = corrections.get(offset); + if (storedBytes.isEmpty()) + return null; + + // todo - is there a cheaper/nicer way to compare if all elements in list are identical?? + final byte lastBase = storedBytes.remove(storedBytes.size()-1); + for (final Byte b: storedBytes) { + // strict correction rule: all bases must match + if (b != lastBase) + return null; + } + + // all bytes then are equal: + return lastBase; + + } + + + + } +} diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseEdge.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseEdge.java index be5a431c4..a6ef0d1c2 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseEdge.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseEdge.java @@ -76,12 +76,10 @@ public class BaseEdge { } /** - * Copy constructor - * - * @param toCopy + * Create a new copy of this BaseEdge */ - public BaseEdge(final BaseEdge toCopy) { - this(toCopy.isRef(), toCopy.getMultiplicity()); + public BaseEdge copy() { + return new BaseEdge(isRef(), getMultiplicity()); } /** @@ -92,6 +90,34 @@ public class BaseEdge { return multiplicity; } + /** + * Get the DOT format label for this edge, to be displayed when printing this edge to a DOT file + * @return a non-null string + */ + public String getDotLabel() { + return Integer.toString(getMultiplicity()); + } + + /** + * Increase the multiplicity of this edge by incr + * @param incr the change in this multiplicity, must be >= 0 + */ + public void incMultiplicity(final int incr) { + if ( incr < 0 ) throw new IllegalArgumentException("incr must be >= 0 but got " + incr); + multiplicity += incr; + } + + /** + * A special assessor that returns the multiplicity that should be used by pruning algorithm + * + * Can be overloaded by subclasses + * + * @return the multiplicity value that should be used for pruning + */ + public int getPruningMultiplicity() { + return getMultiplicity(); + } + /** * Set the multiplicity of this edge to value * @param value an integer >= 0 @@ -117,23 +143,6 @@ public class BaseEdge { this.isRef = isRef; } - /** - * Does this and edge have the same source and target vertices in graph? - * - * @param graph the graph containing both this and edge - * @param edge our comparator edge - * @param - * @return true if we have the same source and target vertices - */ - public boolean hasSameSourceAndTarget(final BaseGraph graph, final BaseEdge edge) { - return (graph.getEdgeSource(this).equals(graph.getEdgeSource(edge))) && (graph.getEdgeTarget(this).equals(graph.getEdgeTarget(edge))); - } - - // For use when comparing edges across graphs! - public boolean seqEquals( final BaseGraph graph, final BaseEdge edge, final BaseGraph graph2 ) { - return (graph.getEdgeSource(this).seqEquals(graph2.getEdgeSource(edge))) && (graph.getEdgeTarget(this).seqEquals(graph2.getEdgeTarget(edge))); - } - /** * Sorts a collection of BaseEdges in decreasing order of weight, so that the most * heavily weighted is at the start of the list @@ -187,4 +196,12 @@ public class BaseEdge { if ( edge == null ) throw new IllegalArgumentException("edge cannot be null"); return new BaseEdge(isRef() || edge.isRef(), Math.max(getMultiplicity(), edge.getMultiplicity())); } + + @Override + public String toString() { + return "BaseEdge{" + + "multiplicity=" + multiplicity + + ", isRef=" + isRef + + '}'; + } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java index 7ce57e2e7..2b37d90c2 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java @@ -66,34 +66,16 @@ import java.util.*; * Date: 2/6/13 */ @Invariant("!this.isAllowingMultipleEdges()") -public class BaseGraph extends DefaultDirectedGraph { +public class BaseGraph extends DefaultDirectedGraph { protected final static Logger logger = Logger.getLogger(BaseGraph.class); private final int kmerSize; - /** - * Construct an empty BaseGraph - */ - public BaseGraph() { - this(11); - } - - /** - * Edge factory that creates non-reference multiplicity 1 edges - * @param the new of our vertices - */ - private static class MyEdgeFactory implements EdgeFactory { - @Override - public BaseEdge createEdge(T sourceVertex, T targetVertex) { - return new BaseEdge(false, 1); - } - } - /** * Construct a DeBruijnGraph with kmerSize * @param kmerSize */ - public BaseGraph(final int kmerSize) { - super(new MyEdgeFactory()); + public BaseGraph(final int kmerSize, final EdgeFactory edgeFactory) { + super(edgeFactory); if ( kmerSize < 1 ) throw new IllegalArgumentException("kmerSize must be >= 1 but got " + kmerSize); this.kmerSize = kmerSize; @@ -111,7 +93,7 @@ public class BaseGraph extends DefaultDirectedGraph extends DefaultDirectedGraph extends DefaultDirectedGraph extends DefaultDirectedGraph getSources() { - final Set set = new LinkedHashSet(); - for ( final T v : vertexSet() ) + public Set getSources() { + final Set set = new LinkedHashSet(); + for ( final V v : vertexSet() ) if ( isSource(v) ) set.add(v); return set; @@ -153,9 +135,9 @@ public class BaseGraph extends DefaultDirectedGraph getSinks() { - final Set set = new LinkedHashSet(); - for ( final T v : vertexSet() ) + public Set getSinks() { + final Set set = new LinkedHashSet(); + for ( final V v : vertexSet() ) if ( isSink(v) ) set.add(v); return set; @@ -167,7 +149,7 @@ public class BaseGraph extends DefaultDirectedGraph extends DefaultDirectedGraph extends DefaultDirectedGraph extends DefaultDirectedGraph extends DefaultDirectedGraph extends DefaultDirectedGraph extends DefaultDirectedGraph extends DefaultDirectedGraph extends DefaultDirectedGraph extends DefaultDirectedGraph extends DefaultDirectedGraph vertices) { - for ( final T v : vertices ) + public void addVertices(final Collection vertices) { + for ( final V v : vertices ) addVertex(v); } @@ -349,8 +341,12 @@ public class BaseGraph extends DefaultDirectedGraph extends DefaultDirectedGraph extends DefaultDirectedGraph outgoingVerticesOf(final T v) { - final Set s = new LinkedHashSet(); - for ( final BaseEdge e : outgoingEdgesOf(v) ) { + public Set outgoingVerticesOf(final V v) { + final Set s = new LinkedHashSet(); + for ( final E e : outgoingEdgesOf(v) ) { s.add(getEdgeTarget(e)); } return s; @@ -384,14 +380,25 @@ public class BaseGraph extends DefaultDirectedGraph v */ - public Set incomingVerticesOf(final T v) { - final Set s = new LinkedHashSet(); - for ( final BaseEdge e : incomingEdgesOf(v) ) { + public Set incomingVerticesOf(final V v) { + final Set s = new LinkedHashSet(); + for ( final E e : incomingEdgesOf(v) ) { s.add(getEdgeSource(e)); } return s; } + /** + * Get the set of vertices connected to v by incoming or outgoing edges + * @param v a non-null vertex + * @return a set of vertices {X} connected X -> v or v -> Y + */ + public Set neighboringVerticesOf(final V v) { + final Set s = incomingVerticesOf(v); + s.addAll(outgoingVerticesOf(v)); + return s; + } + /** * Print out the graph in the dot language for visualization * @param destination File to write to @@ -413,15 +420,16 @@ public class BaseGraph extends DefaultDirectedGraph " + getEdgeTarget(edge).toString() + " [" + (edge.getMultiplicity() > 0 && edge.getMultiplicity() <= pruneFactor ? "style=dotted,color=grey," : "") + "label=\"" + edge.getMultiplicity() + "\"];"); + for( final E edge : edgeSet() ) { + graphWriter.println("\t" + getEdgeSource(edge).toString() + " -> " + getEdgeTarget(edge).toString() + " [" + (edge.getMultiplicity() > 0 && edge.getMultiplicity() <= pruneFactor ? "style=dotted,color=grey," : "") + "label=\"" + edge.getDotLabel() + "\"];"); if( edge.isRef() ) { graphWriter.println("\t" + getEdgeSource(edge).toString() + " -> " + getEdgeTarget(edge).toString() + " [color=red];"); } } - for( final T v : vertexSet() ) { - graphWriter.println("\t" + v.toString() + " [label=\"" + new String(getAdditionalSequence(v)) + "\",shape=box]"); + for( final V v : vertexSet() ) { +// graphWriter.println("\t" + v.toString() + " [label=\"" + v + "\",shape=box]"); + graphWriter.println("\t" + v.toString() + " [label=\"" + new String(getAdditionalSequence(v)) + v.additionalInfo() + "\",shape=box]"); } if ( writeHeader ) @@ -439,10 +447,10 @@ public class BaseGraph extends DefaultDirectedGraph edgesToCheck = new HashSet(); + final Set edgesToCheck = new HashSet(); edgesToCheck.addAll(incomingEdgesOf(getReferenceSourceVertex())); while( !edgesToCheck.isEmpty() ) { - final BaseEdge e = edgesToCheck.iterator().next(); + final E e = edgesToCheck.iterator().next(); if( !e.isRef() ) { edgesToCheck.addAll( incomingEdgesOf(getEdgeSource(e)) ); removeEdge(e); @@ -452,7 +460,7 @@ public class BaseGraph extends DefaultDirectedGraph extends DefaultDirectedGraph edgesToRemove = new ArrayList(); - for( final BaseEdge e : edgeSet() ) { - if( e.getMultiplicity() <= pruneFactor && !e.isRef() ) { // remove non-reference edges with weight less than or equal to the pruning factor + final List edgesToRemove = new ArrayList<>(); + for( final E e : edgeSet() ) { + if( e.getPruningMultiplicity() <= pruneFactor && !e.isRef() ) { // remove non-reference edges with weight less than or equal to the pruning factor edgesToRemove.add(e); } } @@ -480,13 +488,25 @@ public class BaseGraph extends DefaultDirectedGraph pruner = new LowWeightChainPruner<>(pruneFactor); + pruner.pruneLowWeightChains(this); + } + /** * Remove all vertices in the graph that have in and out degree of 0 */ protected void removeSingletonOrphanVertices() { // Run through the graph and clean up singular orphaned nodes - final List verticesToRemove = new LinkedList(); - for( final T v : vertexSet() ) { + final List verticesToRemove = new LinkedList<>(); + for( final V v : vertexSet() ) { if( inDegreeOf(v) == 0 && outDegreeOf(v) == 0 ) { verticesToRemove.add(v); } @@ -499,11 +519,11 @@ public class BaseGraph extends DefaultDirectedGraph toRemove = new HashSet(vertexSet()); + final HashSet toRemove = new HashSet<>(vertexSet()); - final T refV = getReferenceSourceVertex(); + final V refV = getReferenceSourceVertex(); if ( refV != null ) { - for ( final T v : new BaseGraphIterator(this, refV, true, true) ) { + for ( final V v : new BaseGraphIterator<>(this, refV, true, true) ) { toRemove.remove(v); } } @@ -524,22 +544,31 @@ public class BaseGraph extends DefaultDirectedGraph onPathFromRefSource = new HashSet(vertexSet().size()); - for ( final T v : new BaseGraphIterator(this, getReferenceSourceVertex(), false, true) ) { + final Set onPathFromRefSource = new HashSet<>(vertexSet().size()); + for ( final V v : new BaseGraphIterator<>(this, getReferenceSourceVertex(), false, true) ) { onPathFromRefSource.add(v); } // get the set of vertices we can reach by going backward from the ref sink - final Set onPathFromRefSink = new HashSet(vertexSet().size()); - for ( final T v : new BaseGraphIterator(this, getReferenceSinkVertex(), true, false) ) { + final Set onPathFromRefSink = new HashSet<>(vertexSet().size()); + for ( final V v : new BaseGraphIterator<>(this, getReferenceSinkVertex(), true, false) ) { onPathFromRefSink.add(v); } // we want to remove anything that's not in both the sink and source sets - final Set verticesToRemove = new HashSet(vertexSet()); + final Set verticesToRemove = new HashSet<>(vertexSet()); onPathFromRefSource.retainAll(onPathFromRefSink); verticesToRemove.removeAll(onPathFromRefSource); removeAllVertices(verticesToRemove); + + // simple sanity checks that this algorithm is working. + if ( getSinks().size() > 1 ) { + throw new IllegalStateException("Should have eliminated all but the reference sink, but found " + getSinks()); + } + + if ( getSources().size() > 1 ) { + throw new IllegalStateException("Should have eliminated all but the reference source, but found " + getSources()); + } } /** @@ -555,11 +584,11 @@ public class BaseGraph extends DefaultDirectedGraph the type of the nodes in those graphs * @return true if g1 and g2 are equals */ - public static boolean graphEquals(final BaseGraph g1, BaseGraph g2) { + public static boolean graphEquals(final BaseGraph g1, BaseGraph g2) { final Set vertices1 = g1.vertexSet(); final Set vertices2 = g2.vertexSet(); - final Set edges1 = g1.edgeSet(); - final Set edges2 = g2.edgeSet(); + final Set edges1 = g1.edgeSet(); + final Set edges2 = g2.edgeSet(); if ( vertices1.size() != vertices2.size() || edges1.size() != edges2.size() ) return false; @@ -571,29 +600,35 @@ public class BaseGraph extends DefaultDirectedGraph graph2 ) { + return (this.getEdgeSource(edge1).seqEquals(graph2.getEdgeSource(edge2))) && (this.getEdgeTarget(edge1).seqEquals(graph2.getEdgeTarget(edge2))); + } + + /** * Get the incoming edge of v. Requires that there be only one such edge or throws an error * @param v our vertex * @return the single incoming edge to v, or null if none exists */ - public BaseEdge incomingEdgeOf(final T v) { + public E incomingEdgeOf(final V v) { return getSingletonEdge(incomingEdgesOf(v)); } @@ -602,7 +637,7 @@ public class BaseGraph extends DefaultDirectedGraph extends DefaultDirectedGraph edges) { + private E getSingletonEdge(final Collection edges) { if ( edges.size() > 1 ) throw new IllegalArgumentException("Cannot get a single incoming edge for a vertex with multiple incoming edges " + edges); return edges.isEmpty() ? null : edges.iterator().next(); } @@ -625,12 +660,87 @@ public class BaseGraph extends DefaultDirectedGraph path) { + if ( path == null ) throw new IllegalArgumentException("Path cannot be null"); + + final StringBuffer sb = new StringBuffer(); + for ( final DeBruijnVertex v : path ) + sb.append((char)v.getSuffix()); + + return sb.toString().getBytes(); + } + + /** + * Get the set of vertices within distance edges of source, regardless of edge direction + * + * @param source the source vertex to consider + * @param distance the distance + * @return a set of vertices within distance of source + */ + protected Set verticesWithinDistance(final V source, final int distance) { + if ( distance == 0 ) + return Collections.singleton(source); + + final Set found = new HashSet<>(); + found.add(source); + for ( final V v : neighboringVerticesOf(source) ) { + found.addAll(verticesWithinDistance(v, distance - 1)); + } + + return found; + } + + /** + * Get a graph containing only the vertices within distance edges of target + * @param target a vertex in graph + * @param distance the max distance + * @return a non-null graph + */ + public BaseGraph subsetToNeighbors(final V target, final int distance) { + if ( target == null ) throw new IllegalArgumentException("Target cannot be null"); + if ( ! containsVertex(target) ) throw new IllegalArgumentException("Graph doesn't contain vertex " + target); + if ( distance < 0 ) throw new IllegalArgumentException("Distance must be >= 0 but got " + distance); + + + final Set toKeep = verticesWithinDistance(target, distance); + final Set toRemove = new HashSet<>(vertexSet()); + toRemove.removeAll(toKeep); + + final BaseGraph result = (BaseGraph)clone(); + result.removeAllVertices(toRemove); + + return result; + } + + /** + * Get a subgraph of graph that contains only vertices within 10 edges of the ref source vertex + * @return a non-null subgraph of this graph + */ + public BaseGraph subsetToRefSource() { + return subsetToNeighbors(getReferenceSourceVertex(), 10); + } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphIterator.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphIterator.java index 7c33e060d..ee198185d 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphIterator.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphIterator.java @@ -60,10 +60,10 @@ import java.util.LinkedList; * Date: 3/24/13 * Time: 4:41 PM */ -public class BaseGraphIterator implements Iterator, Iterable { +public class BaseGraphIterator implements Iterator, Iterable { final HashSet visited = new HashSet(); final LinkedList toVisit = new LinkedList(); - final BaseGraph graph; + final BaseGraph graph; final boolean followIncomingEdges, followOutgoingEdges; /** @@ -78,7 +78,7 @@ public class BaseGraphIterator implements Iterator, Ite * traversal? (goes backward through the graph) * @param followOutgoingEdges should we follow outgoing edges during out traversal? */ - public BaseGraphIterator(final BaseGraph graph, final T start, + public BaseGraphIterator(final BaseGraph graph, final T start, final boolean followIncomingEdges, final boolean followOutgoingEdges) { if ( graph == null ) throw new IllegalArgumentException("graph cannot be null"); if ( start == null ) throw new IllegalArgumentException("start cannot be null"); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseVertex.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseVertex.java index b075a69a6..18a3ce1eb 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseVertex.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseVertex.java @@ -57,6 +57,8 @@ import java.util.Arrays; * @since 03/2013 */ public class BaseVertex { + /** placeholder to store additional information for debugging purposes */ + String additionalInfo = ""; final byte[] sequence; private final static int UNASSIGNED_HASHCODE = -1; int cachedHashCode = UNASSIGNED_HASHCODE; @@ -176,4 +178,18 @@ public class BaseVertex { public byte[] getAdditionalSequence(final boolean source) { return getSequence(); } + + /** + * Set additional debugging information for this vertex + * @param info + */ + public void setAdditionalInfo(final String info) { + if ( info == null ) throw new IllegalArgumentException("info cannot be null"); + additionalInfo = info; + } + + /** + * @return the additional information for display about this vertex + */ + public String additionalInfo() { return additionalInfo; } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixSplitter.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixSplitter.java index 0665186c6..69b42cee6 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixSplitter.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixSplitter.java @@ -126,10 +126,10 @@ public class CommonSuffixSplitter { edgesToRemove.add(out); } - graph.addEdge(suffixV, graph.getEdgeTarget(out), new BaseEdge(out)); + graph.addEdge(suffixV, graph.getEdgeTarget(out), out.copy()); for ( final BaseEdge in : graph.incomingEdgesOf(mid) ) { - graph.addEdge(graph.getEdgeSource(in), incomingTarget, new BaseEdge(in)); + graph.addEdge(graph.getEdgeSource(in), incomingTarget, in.copy()); edgesToRemove.add(in); } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnGraph.java index 13135ddce..0200ce4a2 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnGraph.java @@ -47,6 +47,7 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; import com.google.java.contract.Ensures; +import org.jgrapht.EdgeFactory; import java.util.Arrays; import java.util.HashMap; @@ -58,12 +59,22 @@ import java.util.Map; * User: rpoplin * Date: 2/6/13 */ -public final class DeBruijnGraph extends BaseGraph { +public final class DeBruijnGraph extends BaseGraph { + /** + * Edge factory that creates non-reference multiplicity 1 edges + */ + private static class MyEdgeFactory implements EdgeFactory { + @Override + public BaseEdge createEdge(DeBruijnVertex sourceVertex, DeBruijnVertex targetVertex) { + return new BaseEdge(false, 1); + } + } + /** * Create an empty DeBruijnGraph with default kmer size */ public DeBruijnGraph() { - super(); + this(11); } /** @@ -71,7 +82,7 @@ public final class DeBruijnGraph extends BaseGraph { * @param kmerSize kmer size, must be >= 1 */ public DeBruijnGraph(int kmerSize) { - super(kmerSize); + super(kmerSize, new MyEdgeFactory()); } /** diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnVertex.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnVertex.java index c240949d9..4d9441efe 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnVertex.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnVertex.java @@ -54,7 +54,7 @@ import com.google.java.contract.Ensures; * User: ebanks, mdepristo * Date: Mar 23, 2011 */ -public final class DeBruijnVertex extends BaseVertex { +public class DeBruijnVertex extends BaseVertex { private final static byte[][] sufficesAsByteArray = new byte[256][]; static { for ( int i = 0; i < sufficesAsByteArray.length; i++ ) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java index 30c5be190..73a1daa3e 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java @@ -48,6 +48,7 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; import com.google.java.contract.Ensures; import com.google.java.contract.Requires; +import org.broadinstitute.sting.utils.collections.PrimitivePair; import java.util.ArrayList; import java.util.Collection; @@ -60,7 +61,7 @@ import java.util.List; * Date: 3/25/13 * Time: 9:42 PM */ -final class GraphUtils { +final public class GraphUtils { private GraphUtils() {} /** @@ -135,4 +136,57 @@ final class GraphUtils { return min; } + /** + * Find the ending position of the longest uniquely matching + * run of bases of kmer in seq. + * + * for example, if seq = ACGT and kmer is NAC, this function returns 1,2 as we have the following + * match: + * + * 0123 + * .ACGT + * NAC.. + * + * @param seq a non-null sequence of bytes + * @param kmer a non-null kmer + * @return the ending position and length where kmer matches uniquely in sequence, or null if no + * unique longest match can be found + */ + public static PrimitivePair.Int findLongestUniqueSuffixMatch(final byte[] seq, final byte[] kmer) { + int longestPos = -1; + int length = 0; + boolean foundDup = false; + + for ( int i = 0; i < seq.length; i++ ) { + final int matchSize = longestSuffixMatch(seq, kmer, i); + if ( matchSize > length ) { + longestPos = i; + length = matchSize; + foundDup = false; + } else if ( matchSize == length ) { + foundDup = true; + } + } + + return foundDup ? null : new PrimitivePair.Int(longestPos, length); + } + + /** + * calculates the longest suffix match between a sequence and a smaller kmer + * + * @param seq the (reference) sequence + * @param kmer the smaller kmer sequence + * @param seqStart the index (inclusive) on seq to start looking backwards from + * @return the longest matching suffix + */ + public static int longestSuffixMatch(final byte[] seq, final byte[] kmer, final int seqStart) { + for ( int len = 1; len <= kmer.length; len++ ) { + final int seqI = seqStart - len + 1; + final int kmerI = kmer.length - len; + if ( seqI < 0 || seq[seqI] != kmer[kmerI] ) { + return len - 1; + } + } + return kmer.length; + } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPaths.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPaths.java index 466148588..3ba85dd92 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPaths.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPaths.java @@ -59,7 +59,7 @@ import java.util.*; * User: ebanks, rpoplin, mdepristo * Date: Mar 23, 2011 */ -public class KBestPaths { +public class KBestPaths { private final boolean allowCycles; /** @@ -93,7 +93,7 @@ public class KBestPaths { /** * @see #getKBestPaths(BaseGraph, int) retriving the best 1000 paths */ - public List> getKBestPaths( final BaseGraph graph ) { + public List> getKBestPaths( final BaseGraph graph ) { return getKBestPaths(graph, 1000); } @@ -101,28 +101,28 @@ public class KBestPaths { * @see #getKBestPaths(BaseGraph, int, java.util.Set, java.util.Set) retriving the first 1000 paths * starting from all source vertices and ending with all sink vertices */ - public List> getKBestPaths( final BaseGraph graph, final int k ) { + public List> getKBestPaths( final BaseGraph graph, final int k ) { return getKBestPaths(graph, k, graph.getSources(), graph.getSinks()); } /** * @see #getKBestPaths(BaseGraph, int, java.util.Set, java.util.Set) with k=1000 */ - public List> getKBestPaths( final BaseGraph graph, final Set sources, final Set sinks ) { + public List> getKBestPaths( final BaseGraph graph, final Set sources, final Set sinks ) { return getKBestPaths(graph, 1000, sources, sinks); } /** * @see #getKBestPaths(BaseGraph, int, java.util.Set, java.util.Set) with k=1000 */ - public List> getKBestPaths( final BaseGraph graph, final T source, final T sink ) { + public List> getKBestPaths( final BaseGraph graph, final T source, final T sink ) { return getKBestPaths(graph, 1000, source, sink); } /** * @see #getKBestPaths(BaseGraph, int, java.util.Set, java.util.Set) with singleton source and sink sets */ - public List> getKBestPaths( final BaseGraph graph, final int k, final T source, final T sink ) { + public List> getKBestPaths( final BaseGraph graph, final int k, final T source, final T sink ) { return getKBestPaths(graph, k, Collections.singleton(source), Collections.singleton(sink)); } @@ -136,20 +136,20 @@ public class KBestPaths { * @return a list with at most k top-scoring paths from the graph */ @Ensures({"result != null", "result.size() <= k"}) - public List> getKBestPaths( final BaseGraph graph, final int k, final Set sources, final Set sinks ) { + public List> getKBestPaths( final BaseGraph graph, final int k, final Set sources, final Set sinks ) { if( graph == null ) { throw new IllegalArgumentException("Attempting to traverse a null graph."); } // a min max queue that will collect the best k paths - final MinMaxPriorityQueue> bestPaths = MinMaxPriorityQueue.orderedBy(new PathComparatorTotalScore()).maximumSize(k).create(); + final MinMaxPriorityQueue> bestPaths = MinMaxPriorityQueue.orderedBy(new PathComparatorTotalScore()).maximumSize(k).create(); // run a DFS for best paths for ( final T source : sources ) { - final Path startingPath = new Path(source, graph); + final Path startingPath = new Path(source, graph); findBestPaths(startingPath, sinks, bestPaths, new MyInt()); } // the MinMaxPriorityQueue iterator returns items in an arbitrary order, so we need to sort the final result - final List> toReturn = new ArrayList>(bestPaths); + final List> toReturn = new ArrayList>(bestPaths); Collections.sort(toReturn, new PathComparatorTotalScore()); return toReturn; } @@ -161,21 +161,21 @@ public class KBestPaths { * @param bestPaths a path to collect completed paths. * @param n used to limit the search by tracking the number of vertices visited across all paths */ - private void findBestPaths( final Path path, final Set sinks, final Collection> bestPaths, final MyInt n ) { + private void findBestPaths( final Path path, final Set sinks, final Collection> bestPaths, final MyInt n ) { if ( sinks.contains(path.getLastVertex())) { bestPaths.add(path); } else if( n.val > 10000 ) { // do nothing, just return, as we've done too much work already } else { // recursively run DFS - final ArrayList edgeArrayList = new ArrayList(path.getOutgoingEdgesOfLastVertex()); + final ArrayList edgeArrayList = new ArrayList(path.getOutgoingEdgesOfLastVertex()); Collections.sort(edgeArrayList, new BaseEdge.EdgeWeightComparator()); - for ( final BaseEdge edge : edgeArrayList ) { + for ( final E edge : edgeArrayList ) { final T target = path.getGraph().getEdgeTarget(edge); // make sure the edge is not already in the path final boolean alreadyVisited = allowCycles ? path.containsEdge(edge) : path.containsVertex(target); if ( ! alreadyVisited ) { - final Path newPath = new Path(path, edge); + final Path newPath = new Path(path, edge); n.val++; findBestPaths(newPath, sinks, bestPaths, n); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/LowWeightChainPruner.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/LowWeightChainPruner.java new file mode 100644 index 000000000..27b6bd902 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/LowWeightChainPruner.java @@ -0,0 +1,174 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; + +import java.util.*; + +/** + /** + * Prune all chains from this graph where all edges in the path have multiplicity <= pruneFactor + * + * Unlike pruneGraph, this function will remove only linear chains in the graph where all edges have weight <= pruneFactor. + * + * For A -[1]> B -[1]> C -[1]> D would be removed with pruneFactor 1 + * but A -[1]> B -[2]> C -[1]> D would not be because the linear chain includes an edge with weight >= 2 + * + * User: depristo + * Date: 5/2/13 + * Time: 10:38 AM + */ +public class LowWeightChainPruner { + private final int pruneFactor; + + public LowWeightChainPruner(int pruneFactor) { + if ( pruneFactor < 0 ) throw new IllegalArgumentException("pruneFactor must be >= 0 but got " + pruneFactor); + this.pruneFactor = pruneFactor; + } + + /** + * Prune graph + * @param graph the graph to prune + */ + public void pruneLowWeightChains(final BaseGraph graph) { + if ( graph == null ) throw new IllegalArgumentException("Graph cannot be null"); + + if ( pruneFactor > 0 ) { + final Set edgesToKeep = new LinkedHashSet<>(); + + for ( final Path linearChain : getLinearChains(graph) ) { + if( mustBeKept(linearChain, pruneFactor) ) { + // we must keep edges in any path that contains a reference edge or an edge with weight > pruneFactor + edgesToKeep.addAll(linearChain.getEdges()); + } + } + + // we want to remove all edges not in the keep set + final Set edgesToRemove = new HashSet<>(graph.edgeSet()); + edgesToRemove.removeAll(edgesToKeep); + graph.removeAllEdges(edgesToRemove); + + graph.removeSingletonOrphanVertices(); + } + } + + /** + * Traverse the edges in the path and determine if any are either ref edges or have weight above + * the pruning factor and should therefore not be pruned away. + * + * @param path the path in question + * @param pruneFactor the integer pruning factor + * @return true if any edge in the path must be kept + */ + private boolean mustBeKept(final Path path, final int pruneFactor) { + for ( final E edge : path.getEdges() ) { + if ( edge.getPruningMultiplicity() >= pruneFactor || edge.isRef() ) + return true; + } + return false; + } + + /** + * Get all of the linear chains in graph + * + * A linear chain is a series of vertices that start from either a source of a vertex with + * out-degree > 1 and extend through all vertices accessible via an outgoing edge from this + * vertex that have in == 1 and out degree of 0 or 1. + * + * @param graph the graph + * @return a non-null collection of paths in graph + */ + protected final Collection> getLinearChains(final BaseGraph graph) { + final Set chainStarts = new LinkedHashSet<>(); + + for ( final V v : graph.vertexSet() ) { + // we want a list of all chain start vertices. These are all vertices with out + // degree > 1, or all source vertices. + final int outDegree = graph.outDegreeOf(v); + final int inDegree = graph.inDegreeOf(v); + if ( outDegree > 1 || inDegree > 1 || (inDegree == 0 && outDegree > 0)) // don't add isolated vertices + chainStarts.add(v); + } + + // must be after since we can add duplicate starts in the above finding algorithm + final List> linearChains = new LinkedList<>(); + for ( final V chainStart : chainStarts ) { + for ( final E outEdge : graph.outgoingEdgesOf(chainStart) ) { + // these chains are composed of the starts + their next vertices + linearChains.add(extendLinearChain(new Path<>(new Path<>(chainStart, graph), outEdge))); + } + } + + return linearChains; + } + + /** + * Extend path while the last vertex has in and out degrees of 1 or 0 + * @param path the path to extend + * @return a fully extended linear path + */ + protected final Path extendLinearChain(final Path path) { + final V last = path.getLastVertex(); + final Set outEdges = path.getGraph().outgoingEdgesOf(last); + + final int outDegree = outEdges.size(); + final int inDegree = path.getGraph().inDegreeOf(last); + + if ( outDegree != 1 || inDegree > 1 ) { + // out next vertex has multiple outgoing edges, so we are done with the linear path + return path; + } else { + final V next = path.getGraph().getEdgeTarget(outEdges.iterator().next()); + if ( path.containsVertex(next) ) { + // we are done if the path contains a cycle + return path; + } else { + // we now know that last has outdegree == 1, so we keep extending the chain + return extendLinearChain(new Path<>(path, outEdges.iterator().next())); + } + } + } +} diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdge.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdge.java new file mode 100644 index 000000000..978d83eb4 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdge.java @@ -0,0 +1,128 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; + +import java.util.PriorityQueue; + +/** + * edge class for connecting nodes in the graph that tracks some per-sample information + * + * This class extends BaseEdge with the additional functionality of tracking the maximum + * multiplicity seen within any single sample. The workflow for using this class is: + * + * MultiSampleEdge e = new MultiSampleEdge(ref, 1) + * e.incMultiplicity(1) // total is 2, per sample is 2, max per sample is 1 + * e.getPruningMultiplicity() // = 1 + * e.flushSingleSampleMultiplicity() // total is 2, per sample is 0, max per sample is 2 + * e.getPruningMultiplicity() // = 2 + * e.incMultiplicity(3) // total is 5, per sample is 3, max per sample is 2 + * e.getPruningMultiplicity() // = 2 + * e.flushSingleSampleMultiplicity() // total is 5, per sample is 0, max per sample is 3 + * e.getPruningMultiplicity() // = 3 + */ +public class MultiSampleEdge extends BaseEdge { + private int currentSingleSampleMultiplicity; + private final int singleSampleCapacity; + private final PriorityQueue singleSampleMultiplicities; + + /** + * Create a new MultiSampleEdge with weight multiplicity and, if isRef == true, indicates a path through the reference + * + * @param isRef indicates whether this edge is a path through the reference + * @param multiplicity the number of observations of this edge in this sample + * @param singleSampleCapacity the max number of samples to track edge multiplicities + */ + public MultiSampleEdge(final boolean isRef, final int multiplicity, final int singleSampleCapacity) { + super(isRef, multiplicity); + + if( singleSampleCapacity <= 0 ) { throw new IllegalArgumentException("singleSampleCapacity must be > 0 but found: " + singleSampleCapacity); } + singleSampleMultiplicities = new PriorityQueue<>(singleSampleCapacity); + singleSampleMultiplicities.add(multiplicity); + currentSingleSampleMultiplicity = multiplicity; + this.singleSampleCapacity = singleSampleCapacity; + } + + @Override + public MultiSampleEdge copy() { + return new MultiSampleEdge(isRef(), getMultiplicity(), singleSampleCapacity); // TODO -- should I copy values for other features? + } + + /** + * update the single sample multiplicities by adding the current single sample multiplicity to the priority queue, and + * reset the current single sample multiplicity to 0. + */ + public void flushSingleSampleMultiplicity() { + singleSampleMultiplicities.add(currentSingleSampleMultiplicity); + if( singleSampleMultiplicities.size() == singleSampleCapacity + 1 ) { + singleSampleMultiplicities.poll(); // remove the lowest multiplicity from the list + } else if( singleSampleMultiplicities.size() > singleSampleCapacity + 1 ) { + throw new IllegalStateException("Somehow the per sample multiplicity list has grown too big: " + singleSampleMultiplicities); + } + currentSingleSampleMultiplicity = 0; + } + + @Override + public void incMultiplicity(final int incr) { + super.incMultiplicity(incr); + currentSingleSampleMultiplicity += incr; + } + + @Override + public int getPruningMultiplicity() { + return singleSampleMultiplicities.peek(); + } + + @Override + public String getDotLabel() { + return super.getDotLabel() + "/" + getPruningMultiplicity(); + } + + /** only provided for testing purposes */ + protected int getCurrentSingleSampleMultiplicity() { + return currentSingleSampleMultiplicity; + } +} diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java index 47676a498..2e84e1d22 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java @@ -47,13 +47,12 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; import com.google.java.contract.Ensures; -import com.google.java.contract.Requires; import net.sf.samtools.Cigar; import net.sf.samtools.CigarElement; import net.sf.samtools.CigarOperator; import org.apache.commons.lang.ArrayUtils; -import org.broadinstitute.sting.utils.smithwaterman.Parameters; -import org.broadinstitute.sting.utils.smithwaterman.SWPairwiseAlignment; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.utils.smithwaterman.*; import org.broadinstitute.sting.utils.sam.AlignmentUtils; import java.util.*; @@ -68,40 +67,39 @@ import java.util.*; * Time: 2:34 PM * */ -public class Path { - private final static int MAX_CIGAR_ELEMENTS_BEFORE_FAILING_SW = 20; +public class Path { + private final static String SW_PAD = "NNNNNNNNNN"; + private final static Logger logger = Logger.getLogger(Path.class); // the last vertex seen in the path private final T lastVertex; // the list of edges comprising the path - private Set edgesAsSet = null; - private final LinkedList edgesInOrder; + private Set edgesAsSet = null; + private final LinkedList edgesInOrder; // the scores for the path private final int totalScore; // the graph from which this path originated - private final BaseGraph graph; + private final BaseGraph graph; // used in the bubble state machine to apply Smith-Waterman to the bubble sequence // these values were chosen via optimization against the NA12878 knowledge base public static final Parameters NEW_SW_PARAMETERS = new Parameters(20.0, -15.0, -26.0, -1.1); - private static final byte[] STARTING_SW_ANCHOR_BYTES = "XXXXXXXXX".getBytes(); - /** * Create a new Path containing no edges and starting at initialVertex * @param initialVertex the starting vertex of the path - * @param graph the graph this path with follow through + * @param graph the graph this path will follow through */ - public Path(final T initialVertex, final BaseGraph graph) { + public Path(final T initialVertex, final BaseGraph graph) { if ( initialVertex == null ) throw new IllegalArgumentException("initialVertex cannot be null"); if ( graph == null ) throw new IllegalArgumentException("graph cannot be null"); if ( ! graph.containsVertex(initialVertex) ) throw new IllegalArgumentException("Vertex " + initialVertex + " must be part of graph " + graph); lastVertex = initialVertex; - edgesInOrder = new LinkedList(); + edgesInOrder = new LinkedList(); totalScore = 0; this.graph = graph; } @@ -109,10 +107,10 @@ public class Path { /** * Convenience constructor for testing that creates a path through vertices in graph */ - protected static Path makePath(final List vertices, final BaseGraph graph) { - Path path = new Path(vertices.get(0), graph); + protected static Path makePath(final List vertices, final BaseGraph graph) { + Path path = new Path(vertices.get(0), graph); for ( int i = 1; i < vertices.size(); i++ ) - path = new Path(path, graph.getEdge(path.lastVertex, vertices.get(i))); + path = new Path(path, graph.getEdge(path.lastVertex, vertices.get(i))); return path; } @@ -122,7 +120,7 @@ public class Path { * @param p the path to extend * @param edge the edge to extend path by */ - public Path(final Path p, final BaseEdge edge) { + public Path(final Path p, final E edge) { if ( p == null ) throw new IllegalArgumentException("Path cannot be null"); if ( edge == null ) throw new IllegalArgumentException("Edge cannot be null"); if ( ! p.graph.containsEdge(edge) ) throw new IllegalArgumentException("Graph must contain edge " + edge + " but it doesn't"); @@ -130,7 +128,7 @@ public class Path { graph = p.graph; lastVertex = p.graph.getEdgeTarget(edge); - edgesInOrder = new LinkedList(p.getEdges()); + edgesInOrder = new LinkedList(p.getEdges()); edgesInOrder.add(edge); totalScore = p.totalScore + edge.getMultiplicity(); } @@ -139,7 +137,7 @@ public class Path { * Get the collection of edges leaving the last vertex of this path * @return a non-null collection */ - public Collection getOutgoingEdgesOfLastVertex() { + public Collection getOutgoingEdgesOfLastVertex() { return getGraph().outgoingEdgesOf(getLastVertex()); } @@ -148,12 +146,12 @@ public class Path { * @param edge the given edge to test * @return true if the edge is found in this path */ - public boolean containsEdge( final BaseEdge edge ) { + public boolean containsEdge( final E edge ) { if( edge == null ) { throw new IllegalArgumentException("Attempting to test null edge."); } if ( edgesInOrder.isEmpty() ) return false; // initialize contains cache if necessary - if ( edgesAsSet == null ) edgesAsSet = new HashSet(edgesInOrder); + if ( edgesAsSet == null ) edgesAsSet = new HashSet(edgesInOrder); return edgesAsSet.contains(edge); } @@ -175,7 +173,7 @@ public class Path { * @param path the other path we might be the same as * @return true if this and path are the same */ - protected boolean pathsAreTheSame(Path path) { + protected boolean pathsAreTheSame(Path path) { return totalScore == path.totalScore && edgesInOrder.equals(path.edgesInOrder); } @@ -199,7 +197,7 @@ public class Path { * @return a non-null graph */ @Ensures("result != null") - public BaseGraph getGraph() { + public BaseGraph getGraph() { return graph; } @@ -208,7 +206,7 @@ public class Path { * @return a non-null list of edges */ @Ensures("result != null") - public List getEdges() { return edgesInOrder; } + public List getEdges() { return edgesInOrder; } /** * Get the list of vertices in this path in order defined by the edges of the path @@ -221,7 +219,7 @@ public class Path { else { final LinkedList vertices = new LinkedList(); boolean first = true; - for ( final BaseEdge e : getEdges() ) { + for ( final E e : getEdges() ) { if ( first ) { vertices.add(graph.getEdgeSource(e)); first = false; @@ -246,6 +244,14 @@ public class Path { @Ensures("result != null") public T getLastVertex() { return lastVertex; } + /** + * Get the first vertex in this path + * @return a non-null vertex + */ + public T getFirstVertex() { + return getGraph().getEdgeSource(edgesInOrder.pollFirst()); + } + /** * The base sequence for this path. Pull the full sequence for source nodes and then the suffix for all subsequent nodes * @return non-null sequence of bases corresponding to this path @@ -255,174 +261,114 @@ public class Path { if( getEdges().isEmpty() ) { return graph.getAdditionalSequence(lastVertex); } byte[] bases = graph.getAdditionalSequence(graph.getEdgeSource(edgesInOrder.getFirst())); - for( final BaseEdge e : edgesInOrder ) { + for( final E e : edgesInOrder ) { bases = ArrayUtils.addAll(bases, graph.getAdditionalSequence(graph.getEdgeTarget(e))); } return bases; } /** - * Calculate the cigar string for this path using a bubble traversal of the assembly graph and running a Smith-Waterman alignment on each bubble - * @return non-null Cigar string with reference length equal to the refHaplotype's reference length + * Calculate the cigar elements for this path against the reference sequence + * + * @param refSeq the reference sequence that all of the bases in this path should align to + * @return a Cigar mapping this path to refSeq, or null if no reasonable alignment could be found */ - @Ensures("result != null") - public Cigar calculateCigar() { - final Cigar cigar = new Cigar(); - // special case for paths that start on reference but not at the reference source node - if( edgesInOrder.getFirst().isRef() && !graph.isRefSource(edgesInOrder.getFirst()) ) { - for( final CigarElement ce : calculateCigarForCompleteBubble(null, null, graph.getEdgeSource(edgesInOrder.getFirst())).getCigarElements() ) { - cigar.add(ce); - } + public Cigar calculateCigar(final byte[] refSeq) { + if ( getBases().length == 0 ) { + // horrible edge case from the unit tests, where this path has no bases + return new Cigar(Arrays.asList(new CigarElement(refSeq.length, CigarOperator.D))); } - // reset the bubble state machine - final BubbleStateMachine bsm = new BubbleStateMachine(cigar); + final byte[] bases = getBases(); + final Cigar nonStandard; - for( final BaseEdge e : getEdges() ) { - if ( e.hasSameSourceAndTarget(graph, edgesInOrder.getFirst()) ) { - advanceBubbleStateMachine( bsm, graph.getEdgeSource(e), null ); - } - advanceBubbleStateMachine( bsm, graph.getEdgeTarget(e), e ); + final String paddedRef = SW_PAD + new String(refSeq) + SW_PAD; + final String paddedPath = SW_PAD + new String(bases) + SW_PAD; + final SmithWaterman alignment = new SWPairwiseAlignment( paddedRef.getBytes(), paddedPath.getBytes(), NEW_SW_PARAMETERS ); + + if ( isSWFailure(alignment) ) + return null; + + // cut off the padding bases + final int baseStart = SW_PAD.length(); + final int baseEnd = paddedPath.length() - SW_PAD.length() - 1; // -1 because it's inclusive + nonStandard = AlignmentUtils.trimCigarByBases(alignment.getCigar(), baseStart, baseEnd); + + if ( nonStandard.getReferenceLength() != refSeq.length ) { + nonStandard.add(new CigarElement(refSeq.length - nonStandard.getReferenceLength(), CigarOperator.D)); } - // special case for paths that don't end on reference - if( bsm.inBubble ) { - for( final CigarElement ce : calculateCigarForCompleteBubble(bsm.bubbleBytes, bsm.lastSeenReferenceNode, null).getCigarElements() ) { - bsm.cigar.add(ce); - } - } else if( edgesInOrder.getLast().isRef() && !graph.isRefSink(edgesInOrder.getLast()) ) { // special case for paths that end of the reference but haven't completed the entire reference circuit - for( final CigarElement ce : calculateCigarForCompleteBubble(bsm.bubbleBytes, graph.getEdgeTarget(edgesInOrder.getLast()), null).getCigarElements() ) { - bsm.cigar.add(ce); - } - } - - return AlignmentUtils.consolidateCigar(bsm.cigar); + // finally, return the cigar with all indels left aligned + return leftAlignCigarSequentially(nonStandard, refSeq, getBases(), 0, 0); } /** - * Advance the bubble state machine by incorporating the next node in the path. - * @param bsm the current bubble state machine - * @param node the node to be incorporated - * @param e the edge which generated this node in the path + * Make sure that the SW didn't fail in some terrible way, and throw exception if it did */ - @Requires({"bsm != null", "graph != null", "node != null"}) - private void advanceBubbleStateMachine( final BubbleStateMachine bsm, final T node, final BaseEdge e ) { - if( graph.isReferenceNode( node ) ) { - if( !bsm.inBubble ) { // just add the ref bases as M's in the Cigar string, and don't do anything else - if( e !=null && !e.isRef() ) { - if( graph.referencePathExists( graph.getEdgeSource(e), node) ) { - for( final CigarElement ce : calculateCigarForCompleteBubble(null, graph.getEdgeSource(e), node).getCigarElements() ) { - bsm.cigar.add(ce); - } - bsm.cigar.add( new CigarElement( graph.getAdditionalSequence(node).length, CigarOperator.M) ); - } else if ( graph.getEdgeSource(e).equals(graph.getEdgeTarget(e)) ) { // alt edge at ref node points to itself - bsm.cigar.add( new CigarElement( graph.getAdditionalSequence(node).length, CigarOperator.I) ); - } else { - bsm.inBubble = true; - bsm.bubbleBytes = null; - bsm.lastSeenReferenceNode = graph.getEdgeSource(e); - bsm.bubbleBytes = ArrayUtils.addAll( bsm.bubbleBytes, graph.getAdditionalSequence(node) ); - } - } else { - bsm.cigar.add( new CigarElement( graph.getAdditionalSequence(node).length, CigarOperator.M) ); - } - } else if( bsm.lastSeenReferenceNode != null && !graph.referencePathExists( bsm.lastSeenReferenceNode, node ) ) { // add bases to the bubble string until we get back to the reference path - bsm.bubbleBytes = ArrayUtils.addAll( bsm.bubbleBytes, graph.getAdditionalSequence(node) ); - } else { // close the bubble and use a local SW to determine the Cigar string - for( final CigarElement ce : calculateCigarForCompleteBubble(bsm.bubbleBytes, bsm.lastSeenReferenceNode, node).getCigarElements() ) { - bsm.cigar.add(ce); - } - bsm.inBubble = false; - bsm.bubbleBytes = null; - bsm.lastSeenReferenceNode = null; - bsm.cigar.add( new CigarElement( graph.getAdditionalSequence(node).length, CigarOperator.M) ); - } - } else { // non-ref vertex - if( bsm.inBubble ) { // just keep accumulating until we get back to the reference path - bsm.bubbleBytes = ArrayUtils.addAll( bsm.bubbleBytes, graph.getAdditionalSequence(node) ); - } else { // open up a bubble - bsm.inBubble = true; - bsm.bubbleBytes = null; - bsm.lastSeenReferenceNode = (e != null ? graph.getEdgeSource(e) : null ); - bsm.bubbleBytes = ArrayUtils.addAll( bsm.bubbleBytes, graph.getAdditionalSequence(node) ); - } + private boolean isSWFailure(final SmithWaterman alignment) { + // check that the alignment starts at the first base, which it should given the padding + if ( alignment.getAlignmentStart2wrt1() > 0 ) { + return true; +// throw new IllegalStateException("SW failure ref " + paddedRef + " vs. " + paddedPath + " should always start at 0, but got " + alignment.getAlignmentStart2wrt1() + " with cigar " + alignment.getCigar()); } + + // check that we aren't getting any S operators (which would be very bad downstream) + for ( final CigarElement ce : alignment.getCigar().getCigarElements() ) { + if ( ce.getOperator() == CigarOperator.S ) + return true; + // soft clips at the end of the alignment are really insertions +// throw new IllegalStateException("SW failure ref " + paddedRef + " vs. " + paddedPath + " should never contain S operators but got cigar " + alignment.getCigar()); + } + + return false; } /** - * Now that we have a completed bubble run a Smith-Waterman alignment to determine the cigar string for this bubble - * @param bubbleBytes the bytes that comprise the alternate allele path in this bubble - * @param fromVertex the vertex that marks the beginning of the reference path in this bubble (null indicates ref source vertex) - * @param toVertex the vertex that marks the end of the reference path in this bubble (null indicates ref sink vertex) - * @return the cigar string generated by running a SW alignment between the reference and alternate paths in this bubble + * Left align the given cigar sequentially. This is needed because AlignmentUtils doesn't accept cigars with more than one indel in them. + * This is a target of future work to incorporate and generalize into AlignmentUtils for use by others. + * @param cigar the cigar to left align + * @param refSeq the reference byte array + * @param readSeq the read byte array + * @param refIndex 0-based alignment start position on ref + * @param readIndex 0-based alignment start position on read + * @return the left-aligned cigar */ - @Requires({"graph != null"}) - @Ensures({"result != null"}) - private Cigar calculateCigarForCompleteBubble( final byte[] bubbleBytes, final T fromVertex, final T toVertex ) { - final byte[] refBytes = graph.getReferenceBytes(fromVertex == null ? graph.getReferenceSourceVertex() : fromVertex, toVertex == null ? graph.getReferenceSinkVertex() : toVertex, fromVertex == null, toVertex == null); - - final Cigar returnCigar = new Cigar(); - - // add padding to anchor ref/alt bases in the SW matrix - byte[] padding = STARTING_SW_ANCHOR_BYTES; - boolean goodAlignment = false; - SWPairwiseAlignment swConsensus = null; - while( !goodAlignment && padding.length < 1000 ) { - padding = ArrayUtils.addAll(padding, padding); // double the size of the padding each time - final byte[] reference = ArrayUtils.addAll( ArrayUtils.addAll(padding, refBytes), padding ); - final byte[] alternate = ArrayUtils.addAll( ArrayUtils.addAll(padding, bubbleBytes), padding ); - swConsensus = new SWPairwiseAlignment( reference, alternate, NEW_SW_PARAMETERS ); - if( swConsensus.getAlignmentStart2wrt1() == 0 && !swConsensus.getCigar().toString().contains("S") && swConsensus.getCigar().getReferenceLength() == reference.length ) { - goodAlignment = true; + @Ensures({"cigar != null", "refSeq != null", "readSeq != null", "refIndex >= 0", "readIndex >= 0"}) + protected static Cigar leftAlignCigarSequentially(final Cigar cigar, final byte[] refSeq, final byte[] readSeq, int refIndex, int readIndex) { + final Cigar cigarToReturn = new Cigar(); + Cigar cigarToAlign = new Cigar(); + for (int i = 0; i < cigar.numCigarElements(); i++) { + final CigarElement ce = cigar.getCigarElement(i); + if (ce.getOperator() == CigarOperator.D || ce.getOperator() == CigarOperator.I) { + cigarToAlign.add(ce); + final Cigar leftAligned = AlignmentUtils.leftAlignSingleIndel(cigarToAlign, refSeq, readSeq, refIndex, readIndex, false); + for ( final CigarElement toAdd : leftAligned.getCigarElements() ) { cigarToReturn.add(toAdd); } + refIndex += cigarToAlign.getReferenceLength(); + readIndex += cigarToAlign.getReadLength(); + cigarToAlign = new Cigar(); + } else { + cigarToAlign.add(ce); } } - if( !goodAlignment ) { - returnCigar.add(new CigarElement(1, CigarOperator.N)); - return returnCigar; - } - - final Cigar swCigar = swConsensus.getCigar(); - if( swCigar.numCigarElements() > MAX_CIGAR_ELEMENTS_BEFORE_FAILING_SW ) { // this bubble is too divergent from the reference - returnCigar.add(new CigarElement(1, CigarOperator.N)); - } else { - for( int iii = 0; iii < swCigar.numCigarElements(); iii++ ) { - // now we need to remove the padding from the cigar string - int length = swCigar.getCigarElement(iii).getLength(); - if( iii == 0 ) { length -= padding.length; } - if( iii == swCigar.numCigarElements() - 1 ) { length -= padding.length; } - if( length > 0 ) { - returnCigar.add(new CigarElement(length, swCigar.getCigarElement(iii).getOperator())); - } - } - if( (refBytes == null && returnCigar.getReferenceLength() != 0) || ( refBytes != null && returnCigar.getReferenceLength() != refBytes.length ) ) { - throw new IllegalStateException("SmithWaterman cigar failure: " + (refBytes == null ? "-" : new String(refBytes)) + " against " + new String(bubbleBytes) + " = " + swConsensus.getCigar()); + if( !cigarToAlign.isEmpty() ) { + for( final CigarElement toAdd : cigarToAlign.getCigarElements() ) { + cigarToReturn.add(toAdd); } } - return returnCigar; + final Cigar result = AlignmentUtils.consolidateCigar(cigarToReturn); + if( result.getReferenceLength() != cigar.getReferenceLength() ) + throw new IllegalStateException("leftAlignCigarSequentially failed to produce a valid CIGAR. Reference lengths differ. Initial cigar " + cigar + " left aligned into " + result); + return result; } - // class to keep track of the bubble state machine - private static class BubbleStateMachine { - public boolean inBubble = false; - public byte[] bubbleBytes = null; - public T lastSeenReferenceNode = null; - public Cigar cigar = null; - - public BubbleStateMachine( final Cigar initialCigar ) { - inBubble = false; - bubbleBytes = null; - lastSeenReferenceNode = null; - cigar = initialCigar; - } - } /** * Tests that this and other have the same score and vertices in the same order with the same seq * @param other the other path to consider. Cannot be null * @return true if this and path are equal, false otherwise */ - public boolean equalScoreAndSequence(final Path other) { + public boolean equalScoreAndSequence(final Path other) { if ( other == null ) throw new IllegalArgumentException("other cannot be null"); return getScore() == other.getScore() && equalSequence(other); } @@ -432,7 +378,7 @@ public class Path { * @param other the other path to consider. Cannot be null * @return true if this and path are equal, false otherwise */ - public boolean equalSequence(final Path other) { + public boolean equalSequence(final Path other) { final List mine = getVertices(); final List yours = other.getVertices(); if ( mine.size() == yours.size() ) { // hehehe diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java index bb4b26257..36c515073 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java @@ -48,6 +48,7 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; import com.google.java.contract.Ensures; import com.google.java.contract.Requires; +import org.jgrapht.EdgeFactory; import java.io.File; import java.util.HashSet; @@ -61,7 +62,17 @@ import java.util.Set; * @author: depristo * @since 03/2013 */ -public final class SeqGraph extends BaseGraph { +public final class SeqGraph extends BaseGraph { + /** + * Edge factory that creates non-reference multiplicity 1 edges + */ + private static class MyEdgeFactory implements EdgeFactory { + @Override + public BaseEdge createEdge(SeqVertex sourceVertex, SeqVertex targetVertex) { + return new BaseEdge(false, 1); + } + } + private final static boolean PRINT_SIMPLIFY_GRAPHS = false; /** @@ -82,7 +93,7 @@ public final class SeqGraph extends BaseGraph { * Construct an empty SeqGraph */ public SeqGraph() { - super(); + this(11); } /** @@ -94,7 +105,7 @@ public final class SeqGraph extends BaseGraph { * @param kmer kmer */ public SeqGraph(final int kmer) { - super(kmer); + super(kmer, new MyEdgeFactory()); } /** @@ -144,21 +155,29 @@ public final class SeqGraph extends BaseGraph { //logger.info("simplifyGraph iteration " + i); // iterate until we haven't don't anything useful boolean didSomeWork = false; - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + iteration + ".1.dot"), 0); + printGraphSimplification(new File("simplifyGraph." + iteration + ".1.dot")); didSomeWork |= new MergeDiamonds().transformUntilComplete(); didSomeWork |= new MergeTails().transformUntilComplete(); - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + iteration + ".2.diamonds_and_tails.dot"), 0); + printGraphSimplification(new File("simplifyGraph." + iteration + ".2.diamonds_and_tails.dot")); didSomeWork |= new SplitCommonSuffices().transformUntilComplete(); - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + iteration + ".3.split_suffix.dot"), 0); + printGraphSimplification(new File("simplifyGraph." + iteration + ".3.split_suffix.dot")); didSomeWork |= new MergeCommonSuffices().transformUntilComplete(); - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + iteration + ".4.merge_suffix.dot"), 0); + printGraphSimplification(new File("simplifyGraph." + iteration + ".4.merge_suffix.dot")); - didSomeWork |= new MergeHeadlessIncomingSources().transformUntilComplete(); didSomeWork |= zipLinearChains(); return didSomeWork; } + /** + * Print simplication step of this graph, if PRINT_SIMPLIFY_GRAPHS is enabled + * @param file the destination for the graph DOT file + */ + private void printGraphSimplification(final File file) { + if ( PRINT_SIMPLIFY_GRAPHS ) + subsetToNeighbors(getReferenceSourceVertex(), 5).printGraph(file, 0); + } + /** * Zip up all of the simple linear chains present in this graph. * @@ -289,8 +308,8 @@ public final class SeqGraph extends BaseGraph { final BaseEdge inc = new BaseEdge(false, sharedWeightAmongEdges); // template to make .add function call easy // update the incoming and outgoing edges to point to the new vertex - for( final BaseEdge edge : outEdges ) { addEdge(addedVertex, getEdgeTarget(edge), new BaseEdge(edge).add(inc)); } - for( final BaseEdge edge : inEdges ) { addEdge(getEdgeSource(edge), addedVertex, new BaseEdge(edge).add(inc)); } + for( final BaseEdge edge : outEdges ) { addEdge(addedVertex, getEdgeTarget(edge), edge.copy().add(inc)); } + for( final BaseEdge edge : inEdges ) { addEdge(getEdgeSource(edge), addedVertex, edge.copy().add(inc)); } removeAllVertices(linearChain); return true; @@ -342,7 +361,7 @@ public final class SeqGraph extends BaseGraph { * Merge until the graph has no vertices that are candidates for merging */ public boolean transformUntilComplete() { - boolean didAtLeastOneTranform = false; + boolean didAtLeastOneTransform = false; boolean foundNodesToMerge = true; while( foundNodesToMerge ) { foundNodesToMerge = false; @@ -350,13 +369,13 @@ public final class SeqGraph extends BaseGraph { for( final SeqVertex v : vertexSet() ) { foundNodesToMerge = tryToTransform(v); if ( foundNodesToMerge ) { - didAtLeastOneTranform = true; + didAtLeastOneTransform = true; break; } } } - return didAtLeastOneTranform; + return didAtLeastOneTransform; } /** @@ -505,40 +524,4 @@ public final class SeqGraph extends BaseGraph { } } } - - /** - * Merge headless configurations: - * - * Performs the transformation: - * - * { x + S_i + y -> Z } - * - * goes to: - * - * { x -> S_i -> y -> Z } - * - * for all nodes that match this configuration. - * - * Differs from the diamond transform in that no top node is required - */ - protected class MergeHeadlessIncomingSources extends VertexBasedTransformer { - @Override - boolean tryToTransform(final SeqVertex bottom) { - final Set incoming = incomingVerticesOf(bottom); - if ( incoming.size() <= 1 ) - return false; - - for ( final SeqVertex inc : incoming ) - if ( ! isSource(inc) || outDegreeOf(inc) > 1 ) - return false; - - if ( dontModifyGraphEvenIfPossible() ) return true; - - final SharedVertexSequenceSplitter splitter = new SharedVertexSequenceSplitter(SeqGraph.this, incoming); - if (splitter.meetsMinMergableSequenceForPrefix(MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES)) - return splitter.splitAndUpdate(null, bottom); - else - return false; - } - } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqVertex.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqVertex.java index f192b54aa..083747db4 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqVertex.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqVertex.java @@ -49,6 +49,7 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; import com.google.java.contract.Requires; import org.broadinstitute.sting.utils.Utils; import java.util.Arrays; +import java.util.concurrent.atomic.AtomicInteger; /** * A graph vertex containing a sequence of bases and a unique ID that @@ -71,8 +72,9 @@ import java.util.Arrays; * @since 03/2013 */ public final class SeqVertex extends BaseVertex { - private static int idCounter = 0; - public final int id; + // Note that using an AtomicInteger is critical to allow multi-threaded HaplotypeCaller + private static final AtomicInteger idCounter = new AtomicInteger(0); + private int id = idCounter.getAndIncrement(); /** * Create a new SeqVertex with sequence and the next available id @@ -80,7 +82,6 @@ public final class SeqVertex extends BaseVertex { */ public SeqVertex(final byte[] sequence) { super(sequence); - this.id = idCounter++; } /** @@ -89,7 +90,6 @@ public final class SeqVertex extends BaseVertex { */ public SeqVertex(final String sequence) { super(sequence); - this.id = idCounter++; } /** diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java index 1c53f2332..5d725b1dd 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java @@ -81,20 +81,20 @@ public class SharedSequenceMerger { else { // graph.printGraph(new File("csm." + counter + "." + v.getSequenceString() + "_pre.dot"), 0); - final List edgesToRemove = new LinkedList(); + final List edgesToRemove = new LinkedList<>(); final byte[] prevSeq = prevs.iterator().next().getSequence(); final SeqVertex newV = new SeqVertex(ArrayUtils.addAll(prevSeq, v.getSequence())); graph.addVertex(newV); for ( final SeqVertex prev : prevs ) { for ( final BaseEdge prevIn : graph.incomingEdgesOf(prev) ) { - graph.addEdge(graph.getEdgeSource(prevIn), newV, new BaseEdge(prevIn)); + graph.addEdge(graph.getEdgeSource(prevIn), newV, prevIn.copy()); edgesToRemove.add(prevIn); } } for ( final BaseEdge e : graph.outgoingEdgesOf(v) ) { - graph.addEdge(newV, graph.getEdgeTarget(e), new BaseEdge(e)); + graph.addEdge(newV, graph.getEdgeTarget(e), e.copy()); } graph.removeAllVertices(prevs); @@ -124,11 +124,17 @@ public class SharedSequenceMerger { final SeqVertex first = incomingVertices.iterator().next(); for ( final SeqVertex prev : incomingVertices) { if ( ! prev.seqEquals(first) ) + // cannot merge if our sequence isn't the same as the first sequence return false; final Collection prevOuts = graph.outgoingVerticesOf(prev); if ( prevOuts.size() != 1 ) + // prev -> v must be the only edge from prev return false; if ( prevOuts.iterator().next() != v ) + // don't allow cyles + return false; + if ( graph.inDegreeOf(prev) == 0 ) + // cannot merge when any of the incoming nodes are sources return false; } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitter.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitter.java index f6ee4c3c3..205d0027a 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitter.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitter.java @@ -209,7 +209,7 @@ public class SharedVertexSequenceSplitter { splitGraph.addEdge(remaining, suffixV, fromMid); } else { // prefix + suffix completely explain this node - splitGraph.addOrUpdateEdge(prefixV, suffixV, new BaseEdge(toMid).add(fromMid)); + splitGraph.addOrUpdateEdge(prefixV, suffixV, toMid.copy().add(fromMid)); } } } @@ -323,7 +323,7 @@ public class SharedVertexSequenceSplitter { } else { // schedule edge for removal, and return a freshly allocated one for our graph to use edgesToRemove.add(e); - return new BaseEdge(e); + return e.copy(); } } } \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/MultiDeBruijnVertex.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/MultiDeBruijnVertex.java new file mode 100644 index 000000000..5752583c7 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/MultiDeBruijnVertex.java @@ -0,0 +1,121 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading; + +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.DeBruijnVertex; +import org.broadinstitute.sting.utils.Utils; + +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * A DeBruijnVertex that supports multiple copies of the same kmer + * + * This is implemented through the same mechanism as SeqVertex, where each + * created MultiDeBruijnVertex has a unique id assigned upon creation. Two + * MultiDeBruijnVertex are equal iff they have the same ID + * + * User: depristo + * Date: 4/17/13 + * Time: 3:20 PM + */ +final class MultiDeBruijnVertex extends DeBruijnVertex { + private final static boolean KEEP_TRACK_OF_READS = false; + + // Note that using an AtomicInteger is critical to allow multi-threaded HaplotypeCaller + private static final AtomicInteger idCounter = new AtomicInteger(0); + private int id = idCounter.getAndIncrement(); + + private final List reads = new LinkedList(); + + /** + * Create a new MultiDeBruijnVertex with kmer sequence + * @param sequence the kmer sequence + */ + MultiDeBruijnVertex(byte[] sequence) { + super(sequence); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + MultiDeBruijnVertex that = (MultiDeBruijnVertex) o; + + return id == that.id; + } + + @Override + public String toString() { + return "MultiDeBruijnVertex_id_" + id + "_seq_" + getSequenceString(); + } + + /** + * Add name information to this vertex for debugging + * + * This information will be captured as a list of strings, and displayed in DOT if this + * graph is written out to disk + * + * This functionality is only enabled when KEEP_TRACK_OF_READS is true + * + * @param name a non-null string + */ + protected void addRead(final String name) { + if ( name == null ) throw new IllegalArgumentException("name cannot be null"); + if ( KEEP_TRACK_OF_READS ) reads.add(name); + } + + @Override + public int hashCode() { return id; } + + @Override + public String additionalInfo() { + return KEEP_TRACK_OF_READS ? (! reads.contains("ref") ? "__" + Utils.join(",", reads) : "") : ""; + } +} diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java new file mode 100644 index 000000000..672c61c0f --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java @@ -0,0 +1,224 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading; + +import org.apache.log4j.Logger; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.LocalAssemblyEngine; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; +import org.broadinstitute.sting.utils.MathUtils; +import org.broadinstitute.sting.utils.haplotype.Haplotype; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; + +import java.io.File; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; + +public class ReadThreadingAssembler extends LocalAssemblyEngine { + private final static Logger logger = Logger.getLogger(ReadThreadingAssembler.class); + + private final static int DEFAULT_NUM_PATHS_PER_GRAPH = 128; + private final static int GGA_MODE_ARTIFICIAL_COUNTS = 1000; + private final static int KMER_SIZE_ITERATION_INCREASE = 10; + private final static int MAX_KMER_ITERATIONS_TO_ATTEMPT = 6; + + /** The min and max kmer sizes to try when building the graph. */ + private final List kmerSizes; + private final int maxAllowedPathsForReadThreadingAssembler; + + private final boolean dontIncreaseKmerSizesForCycles; + private final int numPruningSamples; + private boolean requireReasonableNumberOfPaths = false; + protected boolean removePathsNotConnectedToRef = true; + private boolean justReturnRawGraph = false; + + /** for testing only */ + public ReadThreadingAssembler() { + this(DEFAULT_NUM_PATHS_PER_GRAPH, Arrays.asList(25)); + } + + public ReadThreadingAssembler(final int maxAllowedPathsForReadThreadingAssembler, final List kmerSizes, final boolean dontIncreaseKmerSizesForCycles, final int numPruningSamples) { + super(maxAllowedPathsForReadThreadingAssembler); + this.kmerSizes = kmerSizes; + this.maxAllowedPathsForReadThreadingAssembler = maxAllowedPathsForReadThreadingAssembler; + this.dontIncreaseKmerSizesForCycles = dontIncreaseKmerSizesForCycles; + this.numPruningSamples = numPruningSamples; + } + + public ReadThreadingAssembler(final int maxAllowedPathsForReadThreadingAssembler, final List kmerSizes) { + this(maxAllowedPathsForReadThreadingAssembler, kmerSizes, true, 1); + } + + /** for testing purposes */ + protected void setJustReturnRawGraph(boolean justReturnRawGraph) { + this.justReturnRawGraph = justReturnRawGraph; + } + + @Override + public List assemble(final List reads, final Haplotype refHaplotype, final List activeAlleleHaplotypes) { + final List graphs = new LinkedList<>(); + + // first, try using the requested kmer sizes + for ( final int kmerSize : kmerSizes ) { + final SeqGraph graph = createGraph(reads, refHaplotype, kmerSize, activeAlleleHaplotypes, dontIncreaseKmerSizesForCycles); + if ( graph != null ) + graphs.add(graph); + } + + // if none of those worked, iterate over larger sizes if allowed to do so + if ( graphs.isEmpty() && !dontIncreaseKmerSizesForCycles ) { + int kmerSize = MathUtils.arrayMaxInt(kmerSizes) + KMER_SIZE_ITERATION_INCREASE; + int numIterations = 1; + while ( graphs.isEmpty() && numIterations <= MAX_KMER_ITERATIONS_TO_ATTEMPT ) { + // on the last attempt we will allow low complexity graphs + final SeqGraph graph = createGraph(reads, refHaplotype, kmerSize, activeAlleleHaplotypes, numIterations == MAX_KMER_ITERATIONS_TO_ATTEMPT); + if ( graph != null ) + graphs.add(graph); + kmerSize += KMER_SIZE_ITERATION_INCREASE; + numIterations++; + } + } + + return graphs; + } + + /** + * Creates the sequence graph for the given kmerSize + * + * @param reads reads to use + * @param refHaplotype reference haplotype + * @param kmerSize kmer size + * @param activeAlleleHaplotypes the GGA haplotypes to inject into the graph + * @param allowLowComplexityGraphs if true, do not check for low-complexity graphs + * @return sequence graph or null if one could not be created (e.g. because it contains cycles or too many paths or is low complexity) + */ + protected SeqGraph createGraph(final List reads, + final Haplotype refHaplotype, + final int kmerSize, + final List activeAlleleHaplotypes, + final boolean allowLowComplexityGraphs) { + final ReadThreadingGraph rtgraph = new ReadThreadingGraph(kmerSize, debugGraphTransformations, minBaseQualityToUseInAssembly, numPruningSamples); + + // add the reference sequence to the graph + rtgraph.addSequence("ref", refHaplotype.getBases(), null, true); + + // add the artificial GGA haplotypes to the graph + int hapCount = 0; + for ( final Haplotype h : activeAlleleHaplotypes ) { + final int[] counts = new int[h.length()]; + Arrays.fill(counts, GGA_MODE_ARTIFICIAL_COUNTS); + rtgraph.addSequence("activeAllele" + hapCount++, h.getBases(), counts, false); + } + + // Next pull kmers out of every read and throw them on the graph + for( final GATKSAMRecord read : reads ) { + rtgraph.addRead(read); + } + + // actually build the read threading graph + rtgraph.buildGraphIfNecessary(); + + // sanity check: make sure there are no cycles in the graph + if ( rtgraph.hasCycles() ) { + if ( debug ) logger.info("Not using kmer size of " + kmerSize + " in read threading assembler because it contains a cycle"); + return null; + } + + // sanity check: make sure the graph had enough complexity with the given kmer + if ( ! allowLowComplexityGraphs && rtgraph.isLowComplexity() ) { + if ( debug ) logger.info("Not using kmer size of " + kmerSize + " in read threading assembler because it does not produce a graph with enough complexity"); + return null; + } + + printDebugGraphTransform(rtgraph, new File("sequenceGraph.0.0.raw_readthreading_graph.dot")); + + // go through and prune all of the chains where all edges have <= pruneFactor. This must occur + // before recoverDanglingTails in the graph, so that we don't spend a ton of time recovering + // tails that we'll ultimately just trim away anyway, as the dangling tail edges have weight of 1 + rtgraph.pruneLowWeightChains(pruneFactor); + + // look at all chains in the graph that terminate in a non-ref node (dangling sinks) and see if + // we can recover them by merging some N bases from the chain back into the reference + if ( recoverDanglingTails ) rtgraph.recoverDanglingTails(); + + // remove all heading and trailing paths + if ( removePathsNotConnectedToRef ) rtgraph.removePathsNotConnectedToRef(); + + printDebugGraphTransform(rtgraph, new File("sequenceGraph.0.1.cleaned_readthreading_graph.dot")); + + final SeqGraph initialSeqGraph = rtgraph.convertToSequenceGraph(); + + // if the unit tests don't want us to cleanup the graph, just return the raw sequence graph + if ( justReturnRawGraph ) return initialSeqGraph; + + if ( debug ) logger.info("Using kmer size of " + rtgraph.getKmerSize() + " in read threading assembler"); + printDebugGraphTransform(initialSeqGraph, new File("sequenceGraph.0.2.initial_seqgraph.dot")); + initialSeqGraph.cleanNonRefPaths(); // TODO -- I don't this is possible by construction + + final SeqGraph seqGraph = cleanupSeqGraph(initialSeqGraph); + return ( seqGraph != null && requireReasonableNumberOfPaths && !reasonableNumberOfPaths(seqGraph) ) ? null : seqGraph; + } + + /** + * Did we find a reasonable number of paths in this graph? + * @param graph + * @return + */ + private boolean reasonableNumberOfPaths(final SeqGraph graph) { + final KBestPaths pathFinder = new KBestPaths<>(false); + final List> allPaths = pathFinder.getKBestPaths(graph, 100000); + logger.info("Found " + allPaths.size() + " paths through " + graph + " with maximum " + maxAllowedPathsForReadThreadingAssembler); + return allPaths.size() <= maxAllowedPathsForReadThreadingAssembler; + } + + @Override + public String toString() { + return "ReadThreadingAssembler{" + + "kmerSizes=" + kmerSizes + + '}'; + } +} \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java new file mode 100644 index 000000000..7d7df2c06 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java @@ -0,0 +1,785 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading; + +import net.sf.samtools.Cigar; +import net.sf.samtools.CigarElement; +import net.sf.samtools.CigarOperator; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.KMerCounter; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.Kmer; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; +import org.broadinstitute.sting.utils.BaseUtils; +import org.broadinstitute.sting.utils.collections.Pair; +import org.broadinstitute.sting.utils.sam.AlignmentUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.smithwaterman.SWPairwiseAlignment; +import org.broadinstitute.sting.utils.smithwaterman.SmithWaterman; +import org.jgrapht.EdgeFactory; +import org.jgrapht.alg.CycleDetector; + +import java.io.File; +import java.util.*; + +public class ReadThreadingGraph extends BaseGraph { + /** + * Edge factory that encapsulates the numPruningSamples assembly parameter + */ + private static class MyEdgeFactory implements EdgeFactory { + final int numPruningSamples; + + public MyEdgeFactory(int numPruningSamples) { + this.numPruningSamples = numPruningSamples; + } + + @Override + public MultiSampleEdge createEdge(final MultiDeBruijnVertex sourceVertex, final MultiDeBruijnVertex targetVertex) { + return new MultiSampleEdge(false, 1, numPruningSamples); + } + + public MultiSampleEdge createEdge(final boolean isRef, final int multiplicity) { + return new MultiSampleEdge(isRef, multiplicity, numPruningSamples); + } + + } + + private final static Logger logger = Logger.getLogger(ReadThreadingGraph.class); + + private final static String ANONYMOUS_SAMPLE = "XXX_UNNAMED_XXX"; + private final static boolean WRITE_GRAPH = false; + private final static boolean DEBUG_NON_UNIQUE_CALC = false; + + /** for debugging info printing */ + private static int counter = 0; + + /** + * Sequences added for read threading before we've actually built the graph + */ + private final Map> pending = new LinkedHashMap<>(); + + /** + * A set of non-unique kmers that cannot be used as merge points in the graph + */ + private Set nonUniqueKmers; + + /** + * A map from kmers -> their corresponding vertex in the graph + */ + private Map uniqueKmers = new LinkedHashMap<>(); + + /** + * + */ + final int kmerSize; + final boolean debugGraphTransformations; + final byte minBaseQualityToUseInAssembly; + + protected boolean increaseCountsBackwards = true; + protected boolean increaseCountsThroughBranches = false; // this may increase the branches without bounds + + // -------------------------------------------------------------------------------- + // state variables, initialized in resetToInitialState() + // -------------------------------------------------------------------------------- + private Kmer refSource; + private boolean alreadyBuilt; + + public ReadThreadingGraph() { + this(25, false, (byte)6, 1); + } + + public ReadThreadingGraph(final int kmerSize) { + this(kmerSize, false, (byte)6, 1); + } + + /** + * Create a new ReadThreadingAssembler using kmerSize for matching + * @param kmerSize must be >= 1 + */ + protected ReadThreadingGraph(final int kmerSize, final boolean debugGraphTransformations, final byte minBaseQualityToUseInAssembly, final int numPruningSamples) { + super(kmerSize, new MyEdgeFactory(numPruningSamples)); + + if ( kmerSize < 1 ) throw new IllegalArgumentException("bad minkKmerSize " + kmerSize); + this.kmerSize = kmerSize; + this.debugGraphTransformations = debugGraphTransformations; + this.minBaseQualityToUseInAssembly = minBaseQualityToUseInAssembly; + + resetToInitialState(); + } + + /** + * Reset this assembler to its initial state, so we can create another assembly with a different set of reads + */ + private void resetToInitialState() { + pending.clear(); + nonUniqueKmers = null; + uniqueKmers.clear(); + refSource = null; + alreadyBuilt = false; + } + + /** + * Add the all bases in sequence to the graph + * @param sequence a non-null sequence + * @param isRef is this the reference sequence? + */ + protected void addSequence(final byte[] sequence, final boolean isRef) { + addSequence("anonymous", sequence, null, isRef); + } + + /** + * Add all bases in sequence to this graph + * + * @see #addSequence(String, String, byte[], int, int, int[], boolean) for full information + */ + public void addSequence(final String seqName, final byte[] sequence, final int[] counts, final boolean isRef) { + addSequence(seqName, ANONYMOUS_SAMPLE, sequence, 0, sequence.length, counts, isRef); + } + + /** + * Add bases in sequence to this graph + * + * @param seqName a useful seqName for this read, for debugging purposes + * @param sequence non-null sequence of bases + * @param counts a vector of counts for each bases, indicating how many times that base was observed in the sequence. + * This allows us to support reduced reads in the ReadThreadingAssembler. Can be null, meaning that + * each base is only observed once. If not null, must have length == sequence.length. + * @param start the first base offset in sequence that we should use for constructing the graph using this sequence, inclusive + * @param stop the last base offset in sequence that we should use for constructing the graph using this sequence, exclusive + * @param isRef is this the reference sequence. + */ + public void addSequence(final String seqName, final String sampleName, final byte[] sequence, final int start, final int stop, final int[] counts, final boolean isRef) { + // note that argument testing is taken care of in SequenceForKmers + if ( alreadyBuilt ) throw new IllegalStateException("Graph already built"); + + // get the list of sequences for this sample + List sampleSequences = pending.get(sampleName); + if ( sampleSequences == null ) { // need to create + sampleSequences = new LinkedList<>(); + pending.put(sampleName, sampleSequences); + } + + // add the new sequence to the list of sequences for sample + sampleSequences.add(new SequenceForKmers(seqName, sequence, start, stop, counts, isRef)); + } + + /** + * Return a count appropriate for a kmer starting at kmerStart in sequence for kmers + * + * @param seqForKmers a non-null sequence for kmers object + * @param kmerStart the position where the kmer starts in sequence + * @return a count for a kmer from start -> start + kmerSize in seqForKmers + */ + private int getCountGivenKmerStart(final SequenceForKmers seqForKmers, final int kmerStart) { + return seqForKmers.getCount(kmerStart + kmerSize - 1); + } + + /** + * Thread sequence seqForKmers through the current graph, updating the graph as appropriate + * @param seqForKmers a non-null sequence + */ + private void threadSequence(final SequenceForKmers seqForKmers) { + final Pair startingInfo = findStart(seqForKmers); + if ( startingInfo == null ) + return; + + final MultiDeBruijnVertex startingVertex = startingInfo.getFirst(); + final int uniqueStartPos = startingInfo.getSecond(); + + // increase the counts of all edges incoming into the starting vertex supported by going back in sequence + if ( increaseCountsBackwards ) + increaseCountsInMatchedKmers(seqForKmers, startingVertex, startingVertex.getSequence(), kmerSize - 2); + + if ( debugGraphTransformations ) startingVertex.addRead(seqForKmers.name); + + // keep track of information about the reference source + if ( seqForKmers.isRef ) { + if ( refSource != null ) throw new IllegalStateException("Found two refSources! prev: " + refSource + ", new: " + startingVertex); + refSource = new Kmer(seqForKmers.sequence, seqForKmers.start, kmerSize); + } + + // loop over all of the bases in sequence, extending the graph by one base at each point, as appropriate + MultiDeBruijnVertex vertex = startingVertex; + for ( int i = uniqueStartPos + 1; i <= seqForKmers.stop - kmerSize; i++ ) { + final int count = getCountGivenKmerStart(seqForKmers, i); + + vertex = extendChainByOne(vertex, seqForKmers.sequence, i, count, seqForKmers.isRef); + if ( debugGraphTransformations ) vertex.addRead(seqForKmers.name); + } + } + + /** + * Class to keep track of the important dangling tail merging data + */ + protected final class DanglingTailMergeResult { + final List danglingPath, referencePath; + final byte[] danglingPathString, referencePathString; + final Cigar cigar; + + public DanglingTailMergeResult(final List danglingPath, + final List referencePath, + final byte[] danglingPathString, + final byte[] referencePathString, + final Cigar cigar) { + this.danglingPath = danglingPath; + this.referencePath = referencePath; + this.danglingPathString = danglingPathString; + this.referencePathString = referencePathString; + this.cigar = cigar; + } + } + + /** + * Attempt to attach vertex with out-degree == 0 to the graph + * + * @param vertex the vertex to recover + * @return 1 if we successfully recovered the vertex and 0 otherwise + */ + protected int recoverDanglingChain(final MultiDeBruijnVertex vertex) { + if ( outDegreeOf(vertex) != 0 ) throw new IllegalStateException("Attempting to recover a dangling tail for " + vertex + " but it has out-degree > 0"); + + // generate the CIGAR string from Smith-Waterman between the dangling tail and reference paths + final DanglingTailMergeResult danglingTailMergeResult = generateCigarAgainstReferencePath(vertex); + + // if the CIGAR is too complex (or couldn't be computed) then we do not allow the merge into the reference path + if ( danglingTailMergeResult == null || ! cigarIsOkayToMerge(danglingTailMergeResult.cigar) ) + return 0; + + // merge + return mergeDanglingTail(danglingTailMergeResult); + } + + /** + * Determine whether the provided cigar is okay to merge into the reference path + * + * @param cigar the cigar to analyze + * @return true if it's okay to merge, false otherwise + */ + protected boolean cigarIsOkayToMerge(final Cigar cigar) { + + final List elements = cigar.getCigarElements(); + + // don't allow more than a couple of different ops + if ( elements.size() > 3 ) + return false; + + // the last element must be an M + if ( elements.get(elements.size() - 1).getOperator() != CigarOperator.M ) + return false; + + // TODO -- do we want to check whether the Ms mismatch too much also? + + return true; + } + + /** + * Actually merge the dangling tail if possible + * + * @param danglingTailMergeResult the result from generating a Cigar for the dangling tail against the reference + * @return 1 if merge was successful, 0 otherwise + */ + protected int mergeDanglingTail(final DanglingTailMergeResult danglingTailMergeResult) { + + final List elements = danglingTailMergeResult.cigar.getCigarElements(); + final CigarElement lastElement = elements.get(elements.size() - 1); + if ( lastElement.getOperator() != CigarOperator.M ) + throw new IllegalArgumentException("The last Cigar element must be an M"); + + final int lastRefIndex = danglingTailMergeResult.cigar.getReferenceLength() - 1; + final int matchingSuffix = Math.min(GraphUtils.longestSuffixMatch(danglingTailMergeResult.referencePathString, danglingTailMergeResult.danglingPathString, lastRefIndex), lastElement.getLength()); + if ( matchingSuffix == 0 ) + return 0; + + final int altIndexToMerge = Math.max(danglingTailMergeResult.cigar.getReadLength() - matchingSuffix - 1, 0); + final int refIndexToMerge = lastRefIndex - matchingSuffix + 1; + addEdge(danglingTailMergeResult.danglingPath.get(altIndexToMerge), danglingTailMergeResult.referencePath.get(refIndexToMerge), ((MyEdgeFactory)getEdgeFactory()).createEdge(false, 1)); + return 1; + } + + /** + * Generates the CIGAR string from the Smith-Waterman alignment of the dangling path (where the + * provided vertex is the sink) and the reference path. + * + * @param vertex the sink of the dangling tail + * @return a SmithWaterman object which can be null if no proper alignment could be generated + */ + protected DanglingTailMergeResult generateCigarAgainstReferencePath(final MultiDeBruijnVertex vertex) { + + // find the lowest common ancestor path between vertex and the reference sink if available + final List altPath = findPathToLowestCommonAncestorOfReference(vertex); + if ( altPath == null || isRefSource(altPath.get(0)) ) + return null; + + // now get the reference path from the LCA + final List refPath = getReferencePath(altPath.get(0)); + + // create the Smith-Waterman strings to use + final byte[] refBases = getBasesForPath(refPath); + final byte[] altBases = getBasesForPath(altPath); + + // run Smith-Waterman to determine the best alignment (and remove trailing deletions since they aren't interesting) + final SmithWaterman alignment = new SWPairwiseAlignment(refBases, altBases, SWPairwiseAlignment.OVERHANG_STRATEGY.INDEL); + return new DanglingTailMergeResult(altPath, refPath, altBases, refBases, AlignmentUtils.removeTrailingDeletions(alignment.getCigar())); + } + + /** + * Finds the path upwards in the graph from this vertex to the reference sequence, including the lowest common ancestor vertex + * + * @param vertex the original vertex + * @return the path if it can be determined or null if this vertex either doesn't merge onto the reference path or + * has an ancestor with multiple incoming edges before hitting the reference path + */ + protected List findPathToLowestCommonAncestorOfReference(final MultiDeBruijnVertex vertex) { + final LinkedList path = new LinkedList<>(); + + MultiDeBruijnVertex v = vertex; + while ( ! isReferenceNode(v) && inDegreeOf(v) == 1 ) { + path.addFirst(v); + v = getEdgeSource(incomingEdgeOf(v)); + } + path.addFirst(v); + + return isReferenceNode(v) ? path : null; + } + + /** + * Finds the path downwards in the graph from this vertex to the reference sink, including this vertex + * + * @param start the reference vertex to start from + * @return the path (non-null, non-empty) + */ + protected List getReferencePath(final MultiDeBruijnVertex start) { + if ( ! isReferenceNode(start) ) throw new IllegalArgumentException("Cannot construct the reference path from a vertex that is not on that path"); + + final List path = new ArrayList<>(); + + MultiDeBruijnVertex v = start; + while ( v != null ) { + path.add(v); + v = getNextReferenceVertex(v); + } + + return path; + } + + /** + * Build the read threaded assembly graph if it hasn't already been constructed from the sequences that have + * been added to the graph. + */ + public void buildGraphIfNecessary() { + if ( alreadyBuilt ) return; + + // determine the kmer size we'll use, and capture the set of nonUniques for that kmer size + final NonUniqueResult result = determineKmerSizeAndNonUniques(kmerSize, kmerSize); + nonUniqueKmers = result.nonUniques; + + if ( DEBUG_NON_UNIQUE_CALC ) { + logger.info("using " + kmerSize + " kmer size for this assembly with the following non-uniques"); + } + + // go through the pending sequences, and add them to the graph + for ( final List sequencesForSample : pending.values() ) { + for ( final SequenceForKmers sequenceForKmers : sequencesForSample ) { + threadSequence(sequenceForKmers); + if ( WRITE_GRAPH ) printGraph(new File("threading." + counter++ + "." + sequenceForKmers.name.replace(" ", "_") + ".dot"), 0); + } + + // flush the single sample edge values from the graph + for ( final MultiSampleEdge e : edgeSet() ) e.flushSingleSampleMultiplicity(); + } + + // clear + pending.clear(); + alreadyBuilt = true; + } + + /** + * @return true if the graph has cycles, false otherwise + */ + public boolean hasCycles() { + return new CycleDetector<>(this).detectCycles(); + } + + /** + * Does the graph not have enough complexity? We define low complexity as a situation where the number + * of non-unique kmers is more than 20% of the total number of kmers. + * + * @return true if the graph has low complexity, false otherwise + */ + public boolean isLowComplexity() { + return nonUniqueKmers.size() * 4 > uniqueKmers.size(); + } + + public void recoverDanglingTails() { + if ( ! alreadyBuilt ) throw new IllegalStateException("recoverDanglingTails requires the graph be already built"); + + int attempted = 0; + int nRecovered = 0; + for ( final MultiDeBruijnVertex v : vertexSet() ) { + if ( outDegreeOf(v) == 0 && ! isRefNodeAndRefSink(v) ) { + attempted++; + nRecovered += recoverDanglingChain(v); + } + } + + if ( debugGraphTransformations ) logger.info("Recovered " + nRecovered + " of " + attempted + " dangling tails"); + } + + /** structure that keeps track of the non-unique kmers for a given kmer size */ + private static class NonUniqueResult { + final Set nonUniques; + final int kmerSize; + + private NonUniqueResult(Set nonUniques, int kmerSize) { + this.nonUniques = nonUniques; + this.kmerSize = kmerSize; + } + } + + /** + * Compute the smallest kmer size >= minKmerSize and <= maxKmerSize that has no non-unique kmers + * among all sequences added to the current graph. Will always return a result for maxKmerSize if + * all smaller kmers had non-unique kmers. + * + * @param minKmerSize the minimum kmer size to consider when constructing the graph + * @param maxKmerSize the maximum kmer size to consider + * @return a non-null NonUniqueResult + */ + protected NonUniqueResult determineKmerSizeAndNonUniques(final int minKmerSize, final int maxKmerSize) { + final Collection withNonUniques = getAllPendingSequences(); + final Set nonUniqueKmers = new HashSet(); + + // go through the sequences and determine which kmers aren't unique within each read + int kmerSize = minKmerSize; + for ( ; kmerSize <= maxKmerSize; kmerSize++) { + // clear out set of non-unique kmers + nonUniqueKmers.clear(); + + // loop over all sequences that have non-unique kmers in them from the previous iterator + final Iterator it = withNonUniques.iterator(); + while ( it.hasNext() ) { + final SequenceForKmers sequenceForKmers = it.next(); + + // determine the non-unique kmers for this sequence + final Collection nonUniquesFromSeq = determineNonUniqueKmers(sequenceForKmers, kmerSize); + if ( nonUniquesFromSeq.isEmpty() ) { + // remove this sequence from future consideration + it.remove(); + } else { + // keep track of the non-uniques for this kmerSize, and keep it in the list of sequences that have non-uniques + nonUniqueKmers.addAll(nonUniquesFromSeq); + } + } + + if ( nonUniqueKmers.isEmpty() ) + // this kmerSize produces no non-unique sequences, so go ahead and use it for our assembly + break; + } + + // necessary because the loop breaks with kmerSize = max + 1 + return new NonUniqueResult(nonUniqueKmers, Math.min(kmerSize, maxKmerSize)); + } + + /** + * Get the collection of all sequences for kmers across all samples in no particular order + * @return non-null Collection + */ + private Collection getAllPendingSequences() { + final LinkedList result = new LinkedList(); + for ( final List oneSampleWorth : pending.values() ) result.addAll(oneSampleWorth); + return result; + } + + /** + * Get the collection of non-unique kmers from sequence for kmer size kmerSize + * @param seqForKmers a sequence to get kmers from + * @param kmerSize the size of the kmers + * @return a non-null collection of non-unique kmers in sequence + */ + private Collection determineNonUniqueKmers(final SequenceForKmers seqForKmers, final int kmerSize) { + // count up occurrences of kmers within each read + final KMerCounter counter = new KMerCounter(kmerSize); + final int stopPosition = seqForKmers.stop - kmerSize; + for ( int i = 0; i <= stopPosition; i++ ) { + final Kmer kmer = new Kmer(seqForKmers.sequence, i, kmerSize); + counter.addKmer(kmer, 1); + } + + return counter.getKmersWithCountsAtLeast(2); + } + + /** + * Convert this kmer graph to a simple sequence graph. + * + * Each kmer suffix shows up as a distinct SeqVertex, attached in the same structure as in the kmer + * graph. Nodes that are sources are mapped to SeqVertex nodes that contain all of their sequence + * + * @return a newly allocated SequenceGraph + */ + // TODO -- should override base class method + public SeqGraph convertToSequenceGraph() { + buildGraphIfNecessary(); + + final SeqGraph seqGraph = new SeqGraph(kmerSize); + final Map vertexMap = new HashMap(); + + // create all of the equivalent seq graph vertices + for ( final MultiDeBruijnVertex dv : vertexSet() ) { + final SeqVertex sv = new SeqVertex(dv.getAdditionalSequence(isSource(dv))); + sv.setAdditionalInfo(dv.additionalInfo()); + vertexMap.put(dv, sv); + seqGraph.addVertex(sv); + } + + // walk through the nodes and connect them to their equivalent seq vertices + for( final MultiSampleEdge e : edgeSet() ) { + final SeqVertex seqInV = vertexMap.get(getEdgeSource(e)); + final SeqVertex seqOutV = vertexMap.get(getEdgeTarget(e)); + //logger.info("Adding edge " + seqInV + " -> " + seqOutV); + seqGraph.addEdge(seqInV, seqOutV, new BaseEdge(e.isRef(), e.getMultiplicity())); + } + + return seqGraph; + } + + private void increaseCountsInMatchedKmers(final SequenceForKmers seqForKmers, + final MultiDeBruijnVertex vertex, + final byte[] originalKmer, + final int offset) { + if ( offset == -1 ) return; + + for ( final MultiSampleEdge edge : incomingEdgesOf(vertex) ) { + final MultiDeBruijnVertex prev = getEdgeSource(edge); + final byte suffix = prev.getSuffix(); + final byte seqBase = originalKmer[offset]; +// logger.warn(String.format("Increasing counts for %s -> %s via %s at %d with suffix %s vs. %s", +// prev, vertex, edge, offset, (char)suffix, (char)seqBase)); + if ( suffix == seqBase && (increaseCountsThroughBranches || inDegreeOf(vertex) == 1) ) { + edge.incMultiplicity(seqForKmers.getCount(offset)); + increaseCountsInMatchedKmers(seqForKmers, prev, originalKmer, offset-1); + } + } + } + + /** + * Find vertex and its position in seqForKmers where we should start assembling seqForKmers + * + * @param seqForKmers the sequence we want to thread into the graph + * @return a pair of the starting vertex and its position in seqForKmer + */ + private Pair findStart(final SequenceForKmers seqForKmers) { + final int uniqueStartPos = seqForKmers.isRef ? 0 : findUniqueStartPosition(seqForKmers.sequence, seqForKmers.start, seqForKmers.stop); + + if ( uniqueStartPos == -1 ) + return null; + + return getOrCreateKmerVertex(seqForKmers.sequence, uniqueStartPos, true); + } + + /** + * Find a starting point in sequence that begins a unique kmer among all kmers in the graph + * @param sequence the sequence of bases + * @param start the first base to use in sequence + * @param stop the last base to use in sequence + * @return the index into sequence that begins a unique kmer of size kmerSize, or -1 if none could be found + */ + private int findUniqueStartPosition(final byte[] sequence, final int start, final int stop) { + for ( int i = start; i < stop - kmerSize; i++ ) { + final Kmer kmer1 = new Kmer(sequence, i, kmerSize); + if ( uniqueKmers.containsKey(kmer1) ) + return i; + } + return -1; + } + + /** + * Get the vertex for the kmer in sequence starting at start + * @param sequence the sequence + * @param start the position of the kmer start + * @param allowRefSource if true, we will allow matches to the kmer that represents the reference starting kmer + * @return a non-null vertex + */ + private Pair getOrCreateKmerVertex(final byte[] sequence, final int start, final boolean allowRefSource) { + final Kmer kmer = new Kmer(sequence, start, kmerSize); + final MultiDeBruijnVertex vertex = getUniqueKmerVertex(kmer, allowRefSource); + if ( vertex != null ) { + return new Pair<>(vertex, start); + } else { + return new Pair<>(createVertex(kmer), start); + } + } + + /** + * Get the unique vertex for kmer, or null if not possible. + * + * @param allowRefSource if true, we will allow kmer to match the reference source vertex + * @return a vertex for kmer, or null if it's not unique + */ + private MultiDeBruijnVertex getUniqueKmerVertex(final Kmer kmer, final boolean allowRefSource) { + if ( ! allowRefSource && kmer.equals(refSource) ) return null; + return uniqueKmers.get(kmer); + } + + /** + * Create a new vertex for kmer. Add it to the uniqueKmers map if appropriate. + * + * kmer must not have a entry in unique kmers, or an error will be thrown + * + * @param kmer the kmer we want to create a vertex for + * @return the non-null created vertex + */ + private MultiDeBruijnVertex createVertex(final Kmer kmer) { + final MultiDeBruijnVertex newVertex = new MultiDeBruijnVertex(kmer.bases()); + final int prevSize = vertexSet().size(); + addVertex(newVertex); + + // make sure we aren't adding duplicates (would be a bug) + if ( vertexSet().size() != prevSize + 1) throw new IllegalStateException("Adding vertex " + newVertex + " to graph didn't increase the graph size"); + + // add the vertex to the unique kmer map, if it is in fact unique + if ( ! nonUniqueKmers.contains(kmer) && ! uniqueKmers.containsKey(kmer) ) // TODO -- not sure this last test is necessary + uniqueKmers.put(kmer, newVertex); + + return newVertex; + } + + /** + * Workhorse routine of the assembler. Given a sequence whose last vertex is anchored in the graph, extend + * the graph one bp according to the bases in sequence. + * + * @param prevVertex a non-null vertex where sequence was last anchored in the graph + * @param sequence the sequence we're threading through the graph + * @param kmerStart the start of the current kmer in graph we'd like to add + * @param count the number of observations of this kmer in graph (can be > 1 for reduced reads) + * @param isRef is this the reference sequence? + * @return a non-null vertex connecting prevVertex to in the graph based on sequence + */ + private MultiDeBruijnVertex extendChainByOne(final MultiDeBruijnVertex prevVertex, final byte[] sequence, final int kmerStart, final int count, final boolean isRef) { + final Set outgoingEdges = outgoingEdgesOf(prevVertex); + + final int nextPos = kmerStart + kmerSize - 1; + for ( final MultiSampleEdge outgoingEdge : outgoingEdges ) { + final MultiDeBruijnVertex target = getEdgeTarget(outgoingEdge); + if ( target.getSuffix() == sequence[nextPos] ) { + // we've got a match in the chain, so simply increase the count of the edge by 1 and continue + outgoingEdge.incMultiplicity(count); + return target; + } + } + + // none of our outgoing edges had our unique suffix base, so we check for an opportunity to merge back in + final Kmer kmer = new Kmer(sequence, kmerStart, kmerSize); + final MultiDeBruijnVertex uniqueMergeVertex = getUniqueKmerVertex(kmer, false); + + if ( isRef && uniqueMergeVertex != null ) + throw new IllegalStateException("Found a unique vertex to merge into the reference graph " + prevVertex + " -> " + uniqueMergeVertex); + + // either use our unique merge vertex, or create a new one in the chain + final MultiDeBruijnVertex nextVertex = uniqueMergeVertex == null ? createVertex(kmer) : uniqueMergeVertex; + addEdge(prevVertex, nextVertex, ((MyEdgeFactory)getEdgeFactory()).createEdge(isRef, count)); + return nextVertex; + } + + /** + * Add the given read to the sequence graph. Ultimately the read will get sent through addSequence(), but first + * this method ensures we only use high quality bases and accounts for reduced reads, etc. + * + * @param read a non-null read + */ + protected void addRead(final GATKSAMRecord read) { + final byte[] sequence = read.getReadBases(); + final byte[] qualities = read.getBaseQualities(); + final int[] reducedReadCounts = read.getReducedReadCounts(); // will be null if read is not reduced + + int lastGood = -1; // the index of the last good base we've seen + for( int end = 0; end <= sequence.length; end++ ) { + if ( end == sequence.length || ! baseIsUsableForAssembly(sequence[end], qualities[end]) ) { + // the first good base is at lastGood, can be -1 if last base was bad + final int start = lastGood; + // the stop base is end - 1 (if we're not at the end of the sequence) + final int stop = end == sequence.length ? sequence.length : end; + final int len = stop - start + 1; + + if ( start != -1 && len >= kmerSize ) { + // if the sequence is long enough to get some value out of, add it to the graph + final String name = read.getReadName() + "_" + start + "_" + end; + addSequence(name, read.getReadGroup().getSample(), read.getReadBases(), start, stop, reducedReadCounts, false); + } + + lastGood = -1; // reset the last good base + } else if ( lastGood == -1 ) { + lastGood = end; // we're at a good base, the last good one is us + } + } + } + + /** + * Determines whether a base can safely be used for assembly. + * Currently disallows Ns and/or those with low quality + * + * @param base the base under consideration + * @param qual the quality of that base + * @return true if the base can be used for assembly, false otherwise + */ + protected boolean baseIsUsableForAssembly(final byte base, final byte qual) { + return base != BaseUtils.Base.N.base && qual >= minBaseQualityToUseInAssembly; + } + + /** + * Get the set of non-unique kmers in this graph. For debugging purposes + * @return a non-null set of kmers + */ + protected Set getNonUniqueKmers() { + return nonUniqueKmers; + } + + @Override + public String toString() { + return "ReadThreadingAssembler{" + + "kmerSize=" + kmerSize + + '}'; + } +} \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/SequenceForKmers.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/SequenceForKmers.java new file mode 100644 index 000000000..a4bc0c1c8 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/SequenceForKmers.java @@ -0,0 +1,93 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading; + +/** + * Keeps track of the information needed to add a sequence to the read threading assembly graph + * + * User: depristo + * Date: 4/18/13 + * Time: 8:59 AM + * To change this template use File | Settings | File Templates. + */ +final class SequenceForKmers { + final String name; + final byte[] sequence; + final int start, stop; + final private int[] counts; + final boolean isRef; + + /** + * Create a new sequence for creating kmers + */ + SequenceForKmers(final String name, byte[] sequence, int start, int stop, int[] counts, boolean ref) { + if ( start < 0 ) throw new IllegalArgumentException("Invalid start " + start); + if ( stop < start ) throw new IllegalArgumentException("Invalid stop " + stop); + if ( sequence == null ) throw new IllegalArgumentException("Sequence is null "); + if ( counts != null && counts.length != sequence.length ) throw new IllegalArgumentException("Sequence and counts don't have the same length " + sequence.length + " vs " + counts.length); + + this.name = name; + this.sequence = sequence; + this.start = start; + this.stop = stop; + this.isRef = ref; + this.counts = counts; + } + + /** + * Get the number of observations of the kmer starting at i in this sequence + * + * Can we > 1 because sequence may be a reduced read and therefore count as N observations + * + * @param i the offset into sequence for the start of the kmer + * @return a count >= 1 that indicates the number of observations of kmer starting at i in this sequence. + */ + public int getCount(final int i) { + if ( i < 0 || i > sequence.length ) throw new ArrayIndexOutOfBoundsException("i must be >= 0 and <= " + sequence.length + " but got " + i); + return counts == null ? 1 : counts[i]; + } +} diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManager.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManager.java index 5411c5d98..c98fe4d3c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManager.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManager.java @@ -212,6 +212,15 @@ public class ConstrainedMateFixingManager { public int getNReadsInQueue() { return waitingReads.size(); } + /** + * For testing purposes only + * + * @return the list of reads currently in the queue + */ + protected List getReadsInQueueForTesting() { + return new ArrayList(waitingReads); + } + public boolean canMoveReads(GenomeLoc earliestPosition) { if ( DEBUG ) logger.info("Refusing to realign? " + earliestPosition + " vs. " + lastLocFlushed); @@ -233,7 +242,7 @@ public class ConstrainedMateFixingManager { addRead(newRead, modifiedReads.contains(newRead), false); } - private void addRead(SAMRecord newRead, boolean readWasModified, boolean canFlush) { + protected void addRead(SAMRecord newRead, boolean readWasModified, boolean canFlush) { if ( DEBUG ) logger.info("New read pos " + newRead.getAlignmentStart() + " OP = " + newRead.getAttribute("OP") + " " + readWasModified); //final long curTime = timer.currentTime(); @@ -265,7 +274,7 @@ public class ConstrainedMateFixingManager { // fix mates, as needed // Since setMateInfo can move reads, we potentially need to remove the mate, and requeue // it to ensure proper sorting - if ( newRead.getReadPairedFlag() ) { + if ( newRead.getReadPairedFlag() && !newRead.getNotPrimaryAlignmentFlag() ) { SAMRecordHashObject mate = forMateMatching.get(newRead.getReadName()); if ( mate != null ) { // 1. Frustratingly, Picard's setMateInfo() method unaligns (by setting the reference contig diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java index 363f7a357..c77557da6 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java @@ -54,6 +54,7 @@ import org.broadinstitute.sting.utils.clipping.ReadClipper; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.pairhmm.Log10PairHMM; +import org.broadinstitute.sting.utils.pairhmm.LoglessPairHMM; import org.broadinstitute.sting.utils.pairhmm.PairHMM; import org.broadinstitute.sting.utils.pileup.PileupElement; import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; @@ -78,8 +79,6 @@ public class PairHMMIndelErrorModel { private static final double baseMatchArray[]; private static final double baseMismatchArray[]; - private final static double LOG_ONE_HALF; - private static final int START_HRUN_GAP_IDX = 4; private static final int MAX_HRUN_GAP_IDX = 20; @@ -97,8 +96,6 @@ public class PairHMMIndelErrorModel { ///////////////////////////// static { - LOG_ONE_HALF= -Math.log10(2.0); - baseMatchArray = new double[MAX_CACHED_QUAL+1]; baseMismatchArray = new double[MAX_CACHED_QUAL+1]; for (int k=1; k <= MAX_CACHED_QUAL; k++) { @@ -120,12 +117,11 @@ public class PairHMMIndelErrorModel { case ORIGINAL: pairHMM = new Log10PairHMM(false); break; - case LOGLESS_CACHING: //TODO: still not tested so please do not use yet - //pairHMM = new LoglessCachingPairHMM(); //TODO - add it back when the figure out how to use the protected LoglessCachingPairHMM class - throw new UserException.BadArgumentValue("pairHMM"," this option (LOGLESS_CACHING in UG) is still under development"); - //break; + case LOGLESS_CACHING: + pairHMM = new LoglessPairHMM(); + break; default: - throw new UserException.BadArgumentValue("pairHMM", "Specified pairHMM implementation is unrecognized or incompatible with the UnifiedGenotyper. Acceptable options are ORIGINAL, EXACT or LOGLESS_CACHING (the third option is still under development)."); + throw new UserException.BadArgumentValue("pairHMM", "Specified pairHMM implementation is unrecognized or incompatible with the UnifiedGenotyper. Acceptable options are ORIGINAL, EXACT or LOGLESS_CACHING."); } // fill gap penalty table, affine naive model: @@ -466,7 +462,7 @@ public class PairHMMIndelErrorModel { final double li = readLikelihoods[readIdx][i]; final double lj = readLikelihoods[readIdx][j]; final int readCount = readCounts[readIdx]; - haplotypeLikehoodMatrix[i][j] += readCount * (MathUtils.approximateLog10SumLog10(li, lj) + LOG_ONE_HALF); + haplotypeLikehoodMatrix[i][j] += readCount * (MathUtils.approximateLog10SumLog10(li, lj) + MathUtils.LOG_ONE_HALF); } } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java index eef9da84a..92b0d4df2 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java @@ -47,7 +47,6 @@ package org.broadinstitute.sting.gatk.walkers.variantrecalibration; import Jama.Matrix; -import cern.jet.random.Normal; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.MathUtils; @@ -243,12 +242,10 @@ public class GaussianMixtureModel { public Double evaluateDatumInOneDimension( final VariantDatum datum, final int iii ) { if(datum.isNull[iii]) { return null; } - final Normal normal = new Normal(0.0, 1.0, null); final double[] pVarInGaussianLog10 = new double[gaussians.size()]; int gaussianIndex = 0; for( final MultivariateGaussian gaussian : gaussians ) { - normal.setState( gaussian.mu[iii], gaussian.sigma.get(iii, iii) ); - pVarInGaussianLog10[gaussianIndex++] = gaussian.pMixtureLog10 + Math.log10( normal.pdf( datum.annotations[iii] ) ); + pVarInGaussianLog10[gaussianIndex++] = gaussian.pMixtureLog10 + MathUtils.normalDistributionLog10(gaussian.mu[iii], gaussian.sigma.get(iii, iii), datum.annotations[iii]); } return MathUtils.log10sumLog10(pVarInGaussianLog10); // Sum(pi_k * p(v|n,k)) } diff --git a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java index 54061c781..e7e5cf0e1 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java +++ b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java @@ -80,18 +80,18 @@ class AllHaplotypeBAMWriter extends HaplotypeBAMWriter { final List bestHaplotypes, final Set calledHaplotypes, final Map stratifiedReadMap) { - writeHaplotypesAsReads(haplotypes, new HashSet(bestHaplotypes), paddedReferenceLoc); + writeHaplotypesAsReads(haplotypes, new HashSet<>(bestHaplotypes), paddedReferenceLoc); // we need to remap the Alleles back to the Haplotypes; inefficient but unfortunately this is a requirement currently - final Map alleleToHaplotypeMap = new HashMap(haplotypes.size()); + final Map alleleToHaplotypeMap = new HashMap<>(haplotypes.size()); for ( final Haplotype haplotype : haplotypes ) alleleToHaplotypeMap.put(Allele.create(haplotype.getBases()), haplotype); // next, output the interesting reads for each sample aligned against the appropriate haplotype for ( final PerReadAlleleLikelihoodMap readAlleleLikelihoodMap : stratifiedReadMap.values() ) { - for ( Map.Entry> entry : readAlleleLikelihoodMap.getLikelihoodReadMap().entrySet() ) { + for ( final Map.Entry> entry : readAlleleLikelihoodMap.getLikelihoodReadMap().entrySet() ) { final MostLikelyAllele bestAllele = PerReadAlleleLikelihoodMap.getMostLikelyAllele(entry.getValue()); - writeReadAgainstHaplotype(entry.getKey(), alleleToHaplotypeMap.get(bestAllele.getMostLikelyAllele()), paddedReferenceLoc.getStart()); + writeReadAgainstHaplotype(entry.getKey(), alleleToHaplotypeMap.get(bestAllele.getMostLikelyAllele()), paddedReferenceLoc.getStart(), bestAllele.isInformative()); } } } diff --git a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java index d63cf65fc..7206dd674 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java +++ b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java @@ -87,7 +87,7 @@ class CalledHaplotypeBAMWriter extends HaplotypeBAMWriter { writeHaplotypesAsReads(calledHaplotypes, calledHaplotypes, paddedReferenceLoc); // we need to remap the Alleles back to the Haplotypes; inefficient but unfortunately this is a requirement currently - final Map alleleToHaplotypeMap = new HashMap(haplotypes.size()); + final Map alleleToHaplotypeMap = new HashMap<>(haplotypes.size()); for ( final Haplotype haplotype : calledHaplotypes ) { alleleToHaplotypeMap.put(Allele.create(haplotype.getBases()), haplotype); } @@ -97,10 +97,10 @@ class CalledHaplotypeBAMWriter extends HaplotypeBAMWriter { // next, output the interesting reads for each sample aligned against one of the called haplotypes for ( final PerReadAlleleLikelihoodMap readAlleleLikelihoodMap : stratifiedReadMap.values() ) { - for ( Map.Entry> entry : readAlleleLikelihoodMap.getLikelihoodReadMap().entrySet() ) { + for ( final Map.Entry> entry : readAlleleLikelihoodMap.getLikelihoodReadMap().entrySet() ) { if ( entry.getKey().getMappingQuality() > 0 ) { final MostLikelyAllele bestAllele = PerReadAlleleLikelihoodMap.getMostLikelyAllele(entry.getValue(), allelesOfCalledHaplotypes); - writeReadAgainstHaplotype(entry.getKey(), alleleToHaplotypeMap.get(bestAllele.getMostLikelyAllele()), paddedReferenceLoc.getStart()); + writeReadAgainstHaplotype(entry.getKey(), alleleToHaplotypeMap.get(bestAllele.getMostLikelyAllele()), paddedReferenceLoc.getStart(), bestAllele.isInformative()); } } } diff --git a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java index 2eea664d9..1afbeed63 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java +++ b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java @@ -185,11 +185,13 @@ public abstract class HaplotypeBAMWriter { * @param originalRead the read we want to write aligned to the reference genome * @param haplotype the haplotype that the read should be aligned to, before aligning to the reference * @param referenceStart the start of the reference that haplotype is aligned to. Provides global coordinate frame. + * @param isInformative true if the read is differentially informative for one of the haplotypes */ protected void writeReadAgainstHaplotype(final GATKSAMRecord originalRead, final Haplotype haplotype, - final int referenceStart) { - final GATKSAMRecord alignedToRef = createReadAlignedToRef(originalRead, haplotype, referenceStart); + final int referenceStart, + final boolean isInformative) { + final GATKSAMRecord alignedToRef = createReadAlignedToRef(originalRead, haplotype, referenceStart, isInformative); if ( alignedToRef != null ) bamWriter.addAlignment(alignedToRef); } @@ -201,11 +203,13 @@ public abstract class HaplotypeBAMWriter { * @param originalRead the read we want to write aligned to the reference genome * @param haplotype the haplotype that the read should be aligned to, before aligning to the reference * @param referenceStart the start of the reference that haplotype is aligned to. Provides global coordinate frame. + * @param isInformative true if the read is differentially informative for one of the haplotypes * @return a GATKSAMRecord aligned to reference, or null if no meaningful alignment is possible */ protected GATKSAMRecord createReadAlignedToRef(final GATKSAMRecord originalRead, final Haplotype haplotype, - final int referenceStart) { + final int referenceStart, + final boolean isInformative) { if ( originalRead == null ) throw new IllegalArgumentException("originalRead cannot be null"); if ( haplotype == null ) throw new IllegalArgumentException("haplotype cannot be null"); if ( haplotype.getCigar() == null ) throw new IllegalArgumentException("Haplotype cigar not set " + haplotype); @@ -225,6 +229,10 @@ public abstract class HaplotypeBAMWriter { addHaplotypeTag(read, haplotype); + // uninformative reads are set to zero mapping quality to enhance visualization + if ( !isInformative ) + read.setMappingQuality(0); + // compute here the read starts w.r.t. the reference from the SW result and the hap -> ref cigar final Cigar extendedHaplotypeCigar = haplotype.getConsolidatedPaddedCigar(1000); final int readStartOnHaplotype = AlignmentUtils.calcFirstBaseMatchingReferenceInCigar(extendedHaplotypeCigar, swPairwiseAlignment.getAlignmentStart2wrt1()); diff --git a/protected/java/src/org/broadinstitute/sting/utils/pairhmm/LoglessPairHMM.java b/protected/java/src/org/broadinstitute/sting/utils/pairhmm/LoglessPairHMM.java index ab2a5bb2a..184a2689d 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/pairhmm/LoglessPairHMM.java +++ b/protected/java/src/org/broadinstitute/sting/utils/pairhmm/LoglessPairHMM.java @@ -55,7 +55,7 @@ import org.broadinstitute.sting.utils.QualityUtils; * User: rpoplin, carneiro * Date: 10/16/12 */ -public final class LoglessPairHMM extends PairHMM { +public final class LoglessPairHMM extends N2MemoryPairHMM { protected static final double INITIAL_CONDITION = Math.pow(2, 1020); protected static final double INITIAL_CONDITION_LOG10 = Math.log10(INITIAL_CONDITION); @@ -99,8 +99,13 @@ public final class LoglessPairHMM extends PairHMM { } } - if ( ! constantsAreInitialized || recacheReadValues ) - initializeProbabilities(insertionGOP, deletionGOP, overallGCP); + if ( ! constantsAreInitialized || recacheReadValues ) { + initializeProbabilities(transition, insertionGOP, deletionGOP, overallGCP); + + // note that we initialized the constants + constantsAreInitialized = true; + } + initializePriors(haplotypeBases, readBases, readQuals, hapStartIndex); for (int i = 1; i < paddedReadLength; i++) { @@ -159,7 +164,7 @@ public final class LoglessPairHMM extends PairHMM { "overallGCP != null" }) @Ensures("constantsAreInitialized") - private void initializeProbabilities(final byte[] insertionGOP, final byte[] deletionGOP, final byte[] overallGCP) { + protected static void initializeProbabilities(final double[][] transition, final byte[] insertionGOP, final byte[] deletionGOP, final byte[] overallGCP) { for (int i = 0; i < insertionGOP.length; i++) { final int qualIndexGOP = Math.min(insertionGOP[i] + deletionGOP[i], Byte.MAX_VALUE); transition[i+1][matchToMatch] = QualityUtils.qualToProb((byte) qualIndexGOP); @@ -169,9 +174,6 @@ public final class LoglessPairHMM extends PairHMM { transition[i+1][matchToDeletion] = QualityUtils.qualToErrorProb(deletionGOP[i]); transition[i+1][deletionToDeletion] = QualityUtils.qualToErrorProb(overallGCP[i]); } - - // note that we initialized the constants - constantsAreInitialized = true; } /** diff --git a/protected/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMMTestData.java b/protected/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMMTestData.java new file mode 100644 index 000000000..3d8137ecf --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMMTestData.java @@ -0,0 +1,162 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.utils.pairhmm; + +import net.sf.samtools.SAMUtils; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.text.XReadLines; + +import java.io.*; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.zip.GZIPInputStream; + +/** + * Useful single class carrying test data for PairHMMs (for use in benchmarking and unit tests) + * + * User: depristo + * Date: 5/12/13 + * Time: 3:52 PM + * To change this template use File | Settings | File Templates. + */ +public class PairHMMTestData { + public final String ref; + private final String read; + public final byte[] baseQuals, insQuals, delQuals, gcp; + public final double log10l; + + PairHMMTestData(String ref, String read, byte[] baseQuals, byte[] insQuals, byte[] delQuals, byte[] gcp, double log10l) { + this.ref = ref; + this.read = read; + this.baseQuals = baseQuals; + this.insQuals = insQuals; + this.delQuals = delQuals; + this.gcp = gcp; + this.log10l = log10l; + } + + PairHMMTestData(String ref, String read, final byte qual) { + this.ref = ref; + this.read = read; + this.baseQuals = this.insQuals = this.delQuals = Utils.dupBytes(qual, read.length()); + this.gcp = Utils.dupBytes((byte)10, read.length()); + this.log10l = -1; + } + + public double runHMM(final PairHMM hmm) { + hmm.initialize(getRead().length(), ref.length()); + return hmm.computeReadLikelihoodGivenHaplotypeLog10(ref.getBytes(), getRead().getBytes(), + baseQuals, insQuals, delQuals, gcp, true); + } + + @Override + public String toString() { + return "Info{" + + "ref='" + ref + '\'' + + ", read='" + getRead() + '\'' + + ", log10l=" + log10l + + '}'; + } + + public static void runHMMs(final PairHMM hmm, final List data, final boolean runSingly) { + if ( runSingly ) { + for ( final PairHMMTestData datum : data ) + datum.runHMM(hmm); + } else { + // running in batch mode + final PairHMMTestData first = data.get(0); + int maxHaplotypeLen = calcMaxHaplotypeLen(data); + hmm.initialize(first.getRead().length(), maxHaplotypeLen); + for ( final PairHMMTestData datum : data ) { + hmm.computeReadLikelihoodGivenHaplotypeLog10(datum.ref.getBytes(), datum.getRead().getBytes(), + datum.baseQuals, datum.insQuals, datum.delQuals, datum.gcp, false); + + } + } + } + + public static int calcMaxHaplotypeLen(final List data) { + int maxHaplotypeLen = 0; + for ( final PairHMMTestData datum : data ) + maxHaplotypeLen = Math.max(maxHaplotypeLen, datum.ref.length()); + return maxHaplotypeLen; + } + + public static Map> readLikelihoods(final File file) throws IOException { + final Map> results = new LinkedHashMap<>(); + + InputStream in = new FileInputStream(file); + if ( file.getName().endsWith(".gz") ) { + in = new GZIPInputStream(in); + } + + for ( final String line : new XReadLines(in) ) { + final String[] parts = line.split(" "); + final PairHMMTestData info = new PairHMMTestData( + parts[0], parts[1], + SAMUtils.fastqToPhred(parts[2]), + SAMUtils.fastqToPhred(parts[3]), + SAMUtils.fastqToPhred(parts[4]), + SAMUtils.fastqToPhred(parts[5]), + Double.parseDouble(parts[6])); + + if ( ! results.containsKey(info.read) ) { + results.put(info.read, new LinkedList()); + } + final List byHap = results.get(info.read); + byHap.add(info); + } + + return results; + } + + public String getRead() { + return read; + } +} diff --git a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java index ae6b56e19..56f7e8257 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java +++ b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java @@ -70,9 +70,7 @@ import org.broadinstitute.sting.utils.sam.GATKSAMReadGroupRecord; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.sam.ReadUtils; -import java.io.File; -import java.io.IOException; -import java.io.PrintStream; +import java.io.*; import java.util.*; /** @@ -223,6 +221,150 @@ public class RecalUtils { } } + /** + * Component used to print out csv representation of the reports that can be use to perform analysis in + * external tools. E.g. generate plots using R scripts. + *

+ * A header is always printed into the output stream (or file) when the printer is created. Then you only need + * to call {@link #print(RecalibrationReport,String) print} for each report you want to include in the csv file. + * Once finished, you close the printer calling {@link #close() close} + * + */ + private static class CsvPrinter { + + private final PrintStream ps; + private final Covariate[] covariates; + + /** + * Constructs a printer redirected to an output file. + * @param out the output file. + * @param c covariates to print out. + * @throws FileNotFoundException if the file could not be created anew. + */ + protected CsvPrinter(final File out, final Covariate ... c) + throws FileNotFoundException { + this(new FileOutputStream(out), c); + } + + /** + * Constructs a printer redirected to an output stream + * @param os the output. + * @param c covariates to print out. + */ + protected CsvPrinter(final OutputStream os, final Covariate ... c) { + covariates = c == null ? new Covariate[0] : c.clone(); + ps = new PrintStream(os); + printHeader(); + } + + /** + * Prints the header out. + *

+ * Should only be invoked at creation. + */ + protected void printHeader() { + RecalUtils.printHeader(ps); + } + + /** + * Prints out a report into the csv file. + * + * + * @param report the report to print out. + * @param mode the report associated mode. (typically ORIGINAL, RECALIBRATED + */ + public void print(final RecalibrationReport report, final String mode) { + RecalUtils.writeCSV(ps,report.getRecalibrationTables(),mode,covariates,false); + } + + /** + * Close the csv printer. + * + * No further output will be allowed or take place after calling this method. + */ + public void close() { + ps.close(); + } + + } + + /** + * Returns a csv output printer. + * + * @param out the output file. It will be overridden + * @param c list of covariates to print out. + * + * @throws FileNotFoundException if out could not be created anew. + * + * @return never null + */ + protected static CsvPrinter csvPrinter(final File out, final Covariate ... c) + throws FileNotFoundException + { + if (c == null) { + throw new IllegalArgumentException("the input covariate array cannot be null"); + } + return new CsvPrinter(out,c); + } + + /** + * Prints out a collection of reports into a file in Csv format in a way + * that can be used by R scripts (such as the plot generator script). + *

+ * The set of covariates is take as the minimum common set from all reports. + * + * @param out the output file. It will be overridden. + * @param reports map where keys are the unique 'mode' (ORIGINAL, RECALIBRATED, ...) + * of each report and the corresponding value the report itself. + * @throws FileNotFoundException if out could not be created anew. + */ + public static void generateCsv(final File out, final Map reports) + throws FileNotFoundException { + if (reports.size() == 0) { + writeCsv(out, reports, new Covariate[0]); + } else { + final Iterator rit = reports.values().iterator(); + final RecalibrationReport first = rit.next(); + final Covariate[] firstCovariates = first.getRequestedCovariates(); + final Set covariates = new LinkedHashSet<>(); + Utils.addAll(covariates,firstCovariates); + while (rit.hasNext() && covariates.size() > 0) { + final Covariate[] nextCovariates = rit.next().getRequestedCovariates(); + final Set nextCovariateNames = new LinkedHashSet(nextCovariates.length); + for (final Covariate nc : nextCovariates) { + nextCovariateNames.add(nc.getClass().getSimpleName()); + } + final Iterator cit = covariates.iterator(); + while (cit.hasNext()) { + if (!nextCovariateNames.contains(cit.next().getClass().getSimpleName())) { + cit.remove(); + } + } + } + writeCsv(out, reports, covariates.toArray(new Covariate[covariates.size()])); + } + } + + /** + * Print out a collection of reports into a file in Csv format in a way + * that can be used by R scripts (such as the plot generator script). + * + * @param out + * @param reports map where keys are the unique 'mode' (ORIGINAL, RECALIBRATED, ...) + * of each report and the corresponding value the report itself. + * @param c the covariates to print out. + * @throws FileNotFoundException if out could not be created anew. + */ + private static void writeCsv(final File out, + final Map reports, final Covariate[] c) + throws FileNotFoundException { + final CsvPrinter p = csvPrinter(out,c); + for (Map.Entry e : reports.entrySet()) { + p.print(e.getValue(),e.getKey()); + } + p.close(); + } + public enum SOLID_RECAL_MODE { /** * Treat reference inserted bases as reference matching bases. Very unsafe! @@ -390,36 +532,66 @@ public class RecalUtils { report.print(outputFile); } - private static void outputRecalibrationPlot(final RecalibrationArgumentCollection RAC) { - + /** s + * Write recalibration plots into a file + * + * @param csvFile location of the intermediary file + * @param exampleReportFile where the report arguments are collected from. + * @param output result plot file name. + */ + public static void generatePlots(final File csvFile, final File exampleReportFile, final File output) { final RScriptExecutor executor = new RScriptExecutor(); + executor.setExceptOnError(true); executor.addScript(new Resource(SCRIPT_FILE, RecalUtils.class)); - executor.addArgs(RAC.RECAL_CSV_FILE.getAbsolutePath()); - executor.addArgs(RAC.RECAL_TABLE_FILE.getAbsolutePath()); - executor.addArgs(RAC.RECAL_PDF_FILE.getAbsolutePath()); + executor.addArgs(csvFile.getAbsolutePath()); + executor.addArgs(exampleReportFile.getAbsolutePath()); + executor.addArgs(output.getAbsolutePath()); + Logger.getLogger(RecalUtils.class).debug("R command line: " + executor.getApproximateCommandLine()); executor.exec(); } + private static void outputRecalibrationPlot(final File csvFile, final RecalibrationArgumentCollection RAC) { + + final RScriptExecutor executor = new RScriptExecutor(); + executor.addScript(new Resource(SCRIPT_FILE, RecalUtils.class)); + executor.addArgs(csvFile.getAbsolutePath()); + executor.addArgs(RAC.RECAL_TABLE_FILE.getAbsolutePath()); + executor.exec(); + } + + /** + * Please use {@link #generateCsv(java.io.File, java.util.Map)} and {@link #generatePlots(java.io.File, java.io.File, java.io.File)} instead. + * + * @deprecated + */ + @Deprecated public static void generateRecalibrationPlot(final RecalibrationArgumentCollection RAC, final RecalibrationTables original, final Covariate[] requestedCovariates) { generateRecalibrationPlot(RAC, original, null, requestedCovariates); } + /** + * Please use {@link #generateCsv(java.io.File, java.util.Map)} and {@link #generatePlots(java.io.File, java.io.File, java.io.File)} instead. + * + * @deprecated + */ + @Deprecated public static void generateRecalibrationPlot(final RecalibrationArgumentCollection RAC, final RecalibrationTables original, final RecalibrationTables recalibrated, final Covariate[] requestedCovariates) { - final PrintStream csvFile; + final PrintStream csvStream; + final File csvTempFile = null; try { - if ( RAC.RECAL_CSV_FILE == null ) { - RAC.RECAL_CSV_FILE = File.createTempFile("BQSR", ".csv"); - RAC.RECAL_CSV_FILE.deleteOnExit(); - } - csvFile = new PrintStream(RAC.RECAL_CSV_FILE); + File csvTmpFile = File.createTempFile("BQSR",".csv"); + csvTmpFile.deleteOnExit(); + csvStream = new PrintStream(csvTmpFile); } catch (IOException e) { - throw new UserException.CouldNotCreateOutputFile(RAC.RECAL_CSV_FILE, e); + throw new UserException("Could not create temporary csv file", e); } if ( recalibrated != null ) - writeCSV(csvFile, recalibrated, "RECALIBRATED", requestedCovariates, true); - writeCSV(csvFile, original, "ORIGINAL", requestedCovariates, recalibrated == null); - outputRecalibrationPlot(RAC); + writeCSV(csvStream, recalibrated, "RECALIBRATED", requestedCovariates, true); + writeCSV(csvStream, original, "ORIGINAL", requestedCovariates, recalibrated == null); + csvStream.close(); + outputRecalibrationPlot(csvTempFile, RAC); + csvTempFile.delete(); } private static void writeCSV(final PrintStream deltaTableFile, final RecalibrationTables recalibrationTables, final String recalibrationMode, final Covariate[] requestedCovariates, final boolean printHeader) { @@ -452,18 +624,7 @@ public class RecalUtils { // output the csv file if (printHeader) { - final List header = new LinkedList(); - header.add("ReadGroup"); - header.add("CovariateValue"); - header.add("CovariateName"); - header.add("EventType"); - header.add("Observations"); - header.add("Errors"); - header.add("EmpiricalQuality"); - header.add("AverageReportedQuality"); - header.add("Accuracy"); - header.add("Recalibration"); - deltaTableFile.println(Utils.join(",", header)); + printHeader(deltaTableFile); } final Map covariateNameMap = new HashMap(requestedCovariates.length); @@ -480,6 +641,21 @@ public class RecalUtils { } } + private static void printHeader(PrintStream out) { + final List header = new LinkedList(); + header.add("ReadGroup"); + header.add("CovariateValue"); + header.add("CovariateName"); + header.add("EventType"); + header.add("Observations"); + header.add("Errors"); + header.add("EmpiricalQuality"); + header.add("AverageReportedQuality"); + header.add("Accuracy"); + header.add("Recalibration"); + out.println(Utils.join(",", header)); + } + /* * Return an initialized nested integer array with appropriate dimensions for use with the delta tables * diff --git a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java index ea45c2abf..091b5ecf0 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java +++ b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java @@ -340,9 +340,6 @@ public class RecalibrationReport { else if (argument.equals("recalibration_report")) RAC.existingRecalibrationReport = (value == null) ? null : new File((String) value); - else if (argument.equals("plot_pdf_file")) - RAC.RECAL_PDF_FILE = (value == null) ? null : new File((String) value); - else if (argument.equals("binary_tag_name")) RAC.BINARY_TAG_NAME = (value == null) ? null : (String) value; @@ -369,6 +366,11 @@ public class RecalibrationReport { return RAC; } + /** + * + * @deprecated use {@link #getRequestedCovariates()} instead. + */ + @Deprecated public Covariate[] getCovariates() { return requestedCovariates; } diff --git a/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ContextCovariate.java b/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ContextCovariate.java index 4fc9470f4..79ffa50a3 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ContextCovariate.java +++ b/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ContextCovariate.java @@ -67,6 +67,8 @@ import java.util.ArrayList; public class ContextCovariate implements StandardCovariate { private final static Logger logger = Logger.getLogger(ContextCovariate.class); + + private int mismatchesContextSize; private int indelsContextSize; diff --git a/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ReadGroupCovariate.java b/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ReadGroupCovariate.java index 350cf5d33..664c1786e 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ReadGroupCovariate.java +++ b/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ReadGroupCovariate.java @@ -93,10 +93,13 @@ public class ReadGroupCovariate implements RequiredCovariate { private final HashMap readGroupLookupTable = new HashMap(); private final HashMap readGroupReverseLookupTable = new HashMap(); private int nextId = 0; + private String forceReadGroup; // Initialize any member variables using the command-line arguments passed to the walkers @Override - public void initialize(final RecalibrationArgumentCollection RAC) {} + public void initialize(final RecalibrationArgumentCollection RAC) { + forceReadGroup = RAC.FORCE_READGROUP; + } @Override public void recordValues(final GATKSAMRecord read, final ReadCovariates values) { @@ -170,6 +173,9 @@ public class ReadGroupCovariate implements RequiredCovariate { * @return platform unit or readgroup id */ private String readGroupValueFromRG(final GATKSAMReadGroupRecord rg) { + if ( forceReadGroup != null ) + return forceReadGroup; + final String platformUnit = rg.getPlatformUnit(); return platformUnit == null ? rg.getId() : platformUnit; } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/RankSumUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/RankSumUnitTest.java new file mode 100644 index 000000000..fec83e1a8 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/RankSumUnitTest.java @@ -0,0 +1,151 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.annotator; + +import org.broadinstitute.sting.gatk.walkers.compression.reducereads.*; +import org.broadinstitute.sting.gatk.walkers.compression.reducereads.BaseCounts; +import org.broadinstitute.sting.utils.MannWhitneyU; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public class RankSumUnitTest { + + List distribution20, distribution30, distribution20_40; + static final int observations = 100; + + @BeforeClass + public void init() { + distribution20 = new ArrayList<>(observations); + distribution30 = new ArrayList<>(observations); + distribution20_40 = new ArrayList<>(observations); + + final int skew = 3; + makeDistribution(distribution20, 20, skew, observations); + makeDistribution(distribution30, 30, skew, observations); + makeDistribution(distribution20_40, 20, skew, observations/2); + makeDistribution(distribution20_40, 40, skew, observations/2); + + // shuffle the observations + Collections.shuffle(distribution20); + Collections.shuffle(distribution30); + Collections.shuffle(distribution20_40); + } + + private static void makeDistribution(final List result, final int target, final int skew, final int numObservations) { + final int rangeStart = target - skew; + final int rangeEnd = target + skew; + + int current = rangeStart; + for ( int i = 0; i < numObservations; i++ ) { + result.add(current++); + if ( current > rangeEnd ) + current = rangeStart; + } + } + + @DataProvider(name = "DistributionData") + public Object[][] makeDistributionData() { + List tests = new ArrayList(); + + for ( final int numToReduce : Arrays.asList(0, 10, 50, 100) ) { + tests.add(new Object[]{distribution20, distribution20, numToReduce, true, "20-20"}); + tests.add(new Object[]{distribution30, distribution30, numToReduce, true, "30-30"}); + tests.add(new Object[]{distribution20_40, distribution20_40, numToReduce, true, "20/40-20/40"}); + + tests.add(new Object[]{distribution20, distribution30, numToReduce, false, "20-30"}); + tests.add(new Object[]{distribution30, distribution20, numToReduce, false, "30-20"}); + + tests.add(new Object[]{distribution20, distribution20_40, numToReduce, false, "20-20/40"}); + tests.add(new Object[]{distribution30, distribution20_40, numToReduce, true, "30-20/40"}); + } + + return tests.toArray(new Object[][]{}); + } + + @Test(enabled = true, dataProvider = "DistributionData") + public void testDistribution(final List distribution1, final List distribution2, final int numToReduceIn2, final boolean distributionsShouldBeEqual, final String debugString) { + final MannWhitneyU mannWhitneyU = new MannWhitneyU(true); + + for ( final Integer num : distribution1 ) + mannWhitneyU.add(num, MannWhitneyU.USet.SET1); + + final List dist2 = new ArrayList<>(distribution2); + if ( numToReduceIn2 > 0 ) { + final org.broadinstitute.sting.gatk.walkers.compression.reducereads.BaseCounts counts = new BaseCounts(); + for ( int i = 0; i < numToReduceIn2; i++ ) { + final int value = dist2.remove(0); + counts.incr(BaseIndex.A, (byte)value, 0, false); + } + + final int qual = (int)counts.averageQualsOfBase(BaseIndex.A); + for ( int i = 0; i < numToReduceIn2; i++ ) + dist2.add(qual); + } + + for ( final Integer num : dist2 ) + mannWhitneyU.add(num, MannWhitneyU.USet.SET2); + + final Double result = mannWhitneyU.runTwoSidedTest().second; + Assert.assertFalse(Double.isNaN(result)); + + if ( distributionsShouldBeEqual ) { + // TODO -- THIS IS THE FAILURE POINT OF USING REDUCED READS WITH RANK SUM TESTS + if ( numToReduceIn2 >= observations / 2 ) + return; + Assert.assertTrue(result > 0.1, String.format("%f %d %d", result, numToReduceIn2, dist2.get(0))); + } else { + Assert.assertTrue(result < 0.01, String.format("%f %d %d", result, numToReduceIn2, dist2.get(0))); + } + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorIntegrationTest.java index 5866075a7..e7d7300ae 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorIntegrationTest.java @@ -78,7 +78,7 @@ public class VariantAnnotatorIntegrationTest extends WalkerTest { public void testHasAnnotsAsking1() { WalkerTestSpec spec = new WalkerTestSpec( baseTestString() + " -G Standard --variant " + privateTestDir + "vcfexample2.vcf -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -L 1:10,020,000-10,021,000", 1, - Arrays.asList("fbfbd4d13b7ba3d76e8e186902e81378")); + Arrays.asList("823868a4b5b5ec2cdf080c059d04d31a")); executeTest("test file has annotations, asking for annotations, #1", spec); } @@ -86,7 +86,7 @@ public class VariantAnnotatorIntegrationTest extends WalkerTest { public void testHasAnnotsAsking2() { WalkerTestSpec spec = new WalkerTestSpec( baseTestString() + " -G Standard --variant " + privateTestDir + "vcfexample3.vcf -I " + validationDataLocation + "NA12878.1kg.p2.chr1_10mb_11_mb.SLX.bam -L 1:10,000,000-10,050,000", 1, - Arrays.asList("19aef8914efc497192f89a9038310ca5")); + Arrays.asList("213560f395280e6a066d0b0497ce8881")); executeTest("test file has annotations, asking for annotations, #2", spec); } @@ -112,7 +112,7 @@ public class VariantAnnotatorIntegrationTest extends WalkerTest { public void testNoAnnotsAsking1() { WalkerTestSpec spec = new WalkerTestSpec( baseTestString() + " -G Standard --variant " + privateTestDir + "vcfexample2empty.vcf -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -L 1:10,020,000-10,021,000", 1, - Arrays.asList("4f0b8033da18e6cf6e9b8d5d36c21ba2")); + Arrays.asList("6f873b3152db291e18e3a04fbce2e117")); executeTest("test file doesn't have annotations, asking for annotations, #1", spec); } @@ -120,7 +120,7 @@ public class VariantAnnotatorIntegrationTest extends WalkerTest { public void testNoAnnotsAsking2() { WalkerTestSpec spec = new WalkerTestSpec( baseTestString() + " -G Standard --variant " + privateTestDir + "vcfexample3empty.vcf -I " + validationDataLocation + "NA12878.1kg.p2.chr1_10mb_11_mb.SLX.bam -L 1:10,000,000-10,050,000", 1, - Arrays.asList("64ca176d587dfa2b3b9dec9f7999305c")); + Arrays.asList("d8089c5874ff35a7fd7e35ebd7d3b137")); executeTest("test file doesn't have annotations, asking for annotations, #2", spec); } @@ -128,7 +128,7 @@ public class VariantAnnotatorIntegrationTest extends WalkerTest { public void testExcludeAnnotations() { WalkerTestSpec spec = new WalkerTestSpec( baseTestString() + " -G Standard -XA FisherStrand -XA ReadPosRankSumTest --variant " + privateTestDir + "vcfexample2empty.vcf -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -L 1:10,020,000-10,021,000", 1, - Arrays.asList("f33f417fad98c05d9cd08ffa22943b0f")); + Arrays.asList("552c2ad9dbfaa85d51d2def93c8229c6")); executeTest("test exclude annotations", spec); } @@ -136,7 +136,7 @@ public class VariantAnnotatorIntegrationTest extends WalkerTest { public void testOverwritingHeader() { WalkerTestSpec spec = new WalkerTestSpec( baseTestString() + " -G Standard --variant " + privateTestDir + "vcfexample4.vcf -I " + validationDataLocation + "NA12878.1kg.p2.chr1_10mb_11_mb.SLX.bam -L 1:10,001,292", 1, - Arrays.asList("0c810f6c4abef9d9dc5513ca872d3d22")); + Arrays.asList("0ed4c7760f6e7a158b6d743d257300f3")); executeTest("test overwriting header", spec); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotatorUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotatorUnitTest.java new file mode 100644 index 000000000..6d6761f1c --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotatorUnitTest.java @@ -0,0 +1,164 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.annotator; + +import net.sf.picard.reference.IndexedFastaSequenceFile; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.commandline.RodBinding; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; +import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; +import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.variantcontext.VariantContextBuilder; +import org.broadinstitute.variant.vcf.VCFConstants; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.io.File; +import java.io.FileNotFoundException; +import java.util.*; + +public class VariantOverlapAnnotatorUnitTest extends BaseTest { + private GenomeLocParser genomeLocParser; + private IndexedFastaSequenceFile seq; + + @BeforeClass + public void setup() throws FileNotFoundException { + // sequence + seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference)); + genomeLocParser = new GenomeLocParser(seq); + } + + private VariantContext makeVC(final String source, final String id, final List alleles) { + final VariantContext vc = GATKVariantContextUtils.makeFromAlleles(source, "20", 10, alleles); + return new VariantContextBuilder(vc).id(id).make(); + } + + private VariantOverlapAnnotator makeAnnotator(final String dbSNP, final String ... overlaps) { + final RodBinding dbSNPBinding = dbSNP == null ? null : new RodBinding<>(VariantContext.class, dbSNP); + final Map, String> overlapBinding = new LinkedHashMap<>(); + for ( final String overlap : overlaps ) overlapBinding.put(new RodBinding<>(VariantContext.class, overlap), overlap); + return new VariantOverlapAnnotator(dbSNPBinding, overlapBinding, genomeLocParser); + } + + @Test + public void testCreateWithSpecialNames() { + final List names = Arrays.asList("X", "Y", "Z"); + final Map, String> overlapBinding = new LinkedHashMap<>(); + for ( final String overlap : names ) overlapBinding.put(new RodBinding<>(VariantContext.class, overlap + "Binding"), overlap); + final VariantOverlapAnnotator annotator = new VariantOverlapAnnotator(null, overlapBinding, genomeLocParser); + Assert.assertEquals(annotator.getOverlapNames(), names); + } + + @DataProvider(name = "AnnotateRsIDData") + public Object[][] makeAnnotateRsIDData() { + List tests = new ArrayList<>(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + final VariantContext callNoIDAC = makeVC("call", VCFConstants.EMPTY_ID_FIELD, Arrays.asList("A", "C")); + final VariantContext callNoIDAT = makeVC("call", VCFConstants.EMPTY_ID_FIELD, Arrays.asList("A", "T")); + final VariantContext callIDAC = makeVC("call", "foo", Arrays.asList("A", "C")); + final VariantContext callExistingIDAC = makeVC("call", "rsID1", Arrays.asList("A", "C")); + + final VariantContext dbSNP_AC = makeVC("DBSNP", "rsID1", Arrays.asList("A", "C")); + final VariantContext dbSNP_AT = makeVC("DBSNP", "rsID2", Arrays.asList("A", "T")); + final VariantContext dbSNP_AG = makeVC("DBSNP", "rsID3", Arrays.asList("A", "G")); + final VariantContext dbSNP_AC_AT = makeVC("DBSNP", "rsID1;rsID2", Arrays.asList("A", "C", "T")); + final VariantContext dbSNP_AC_AG = makeVC("DBSNP", "rsID1;rsID3", Arrays.asList("A", "C", "G")); + + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC), dbSNP_AC.getID(), true}); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AT), VCFConstants.EMPTY_ID_FIELD, false}); + tests.add(new Object[]{callIDAC, Arrays.asList(dbSNP_AC), "foo" + ";" + dbSNP_AC.getID(), true}); + tests.add(new Object[]{callIDAC, Arrays.asList(dbSNP_AT), "foo", false}); + tests.add(new Object[]{callExistingIDAC, Arrays.asList(dbSNP_AC), "rsID1", true}); + tests.add(new Object[]{callExistingIDAC, Arrays.asList(dbSNP_AT), "rsID1", false}); + + final VariantContext callNoIDACT = makeVC("call", VCFConstants.EMPTY_ID_FIELD, Arrays.asList("A", "C", "T")); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AC), dbSNP_AC.getID(), true}); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AT), dbSNP_AT.getID(), true}); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AG), VCFConstants.EMPTY_ID_FIELD, false}); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AC_AT), dbSNP_AC_AT.getID(), true}); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AC_AG), dbSNP_AC_AG.getID(), true}); + + // multiple options + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC, dbSNP_AT), "rsID1", true}); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AT, dbSNP_AC), "rsID1", true}); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC_AT), "rsID1;rsID2", true}); + tests.add(new Object[]{callNoIDAT, Arrays.asList(dbSNP_AC_AT), "rsID1;rsID2", true}); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC_AG), "rsID1;rsID3", true}); + tests.add(new Object[]{callNoIDAT, Arrays.asList(dbSNP_AC_AG), VCFConstants.EMPTY_ID_FIELD, false}); + + final VariantContext dbSNP_AC_FAIL = new VariantContextBuilder(makeVC("DBSNP", "rsID1", Arrays.asList("A", "C"))).filter("FAIL").make(); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC_FAIL), VCFConstants.EMPTY_ID_FIELD, false}); + + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "AnnotateRsIDData") + public void testAnnotateRsID(final VariantContext toAnnotate, final List dbSNPRecords, final String expectedID, final boolean expectOverlap) throws Exception { + final VariantOverlapAnnotator annotator = makeAnnotator("dbnsp"); + final VariantContext annotated = annotator.annotateRsID(dbSNPRecords, toAnnotate); + Assert.assertNotNull(annotated); + Assert.assertEquals(annotated.getID(), expectedID); + } + + @Test(dataProvider = "AnnotateRsIDData") + public void testAnnotateOverlaps(final VariantContext toAnnotate, final List records, final String expectedID, final boolean expectOverlap) throws Exception { + final String name = "binding"; + final VariantOverlapAnnotator annotator = makeAnnotator(null, name); + final VariantContext annotated = annotator.annotateOverlap(records, name, toAnnotate); + Assert.assertNotNull(annotated); + Assert.assertEquals(annotated.getID(), toAnnotate.getID(), "Shouldn't modify annotation"); + Assert.assertEquals(annotated.hasAttribute(name), expectOverlap); + if ( expectOverlap ) { + Assert.assertEquals(annotated.getAttribute(name), true); + } + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/beagle/BeagleIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/beagle/BeagleIntegrationTest.java index 69a5fc65f..5601d66fb 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/beagle/BeagleIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/beagle/BeagleIntegrationTest.java @@ -62,7 +62,7 @@ public class BeagleIntegrationTest extends WalkerTest { "--beagleR2:BEAGLE " + beagleValidationDataLocation + "inttestbgl.r2 " + "--beagleProbs:BEAGLE " + beagleValidationDataLocation + "inttestbgl.gprobs " + "--beaglePhased:BEAGLE " + beagleValidationDataLocation + "inttestbgl.phased " + - "-o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING", 1, Arrays.asList("c5522304abf0633041c7772dd7dafcea")); + "-o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING", 1, Arrays.asList("989449fa3e262b88ba126867fa3ad9fb")); spec.disableShadowBCF(); executeTest("test BeagleOutputToVCF", spec); } @@ -96,7 +96,7 @@ public class BeagleIntegrationTest extends WalkerTest { "--beagleR2:beagle /humgen/gsa-hpprojects/GATK/data/Validation_Data/EUR_beagle_in_test.r2 "+ "--beagleProbs:beagle /humgen/gsa-hpprojects/GATK/data/Validation_Data/EUR_beagle_in_test.gprobs.bgl "+ "--beaglePhased:beagle /humgen/gsa-hpprojects/GATK/data/Validation_Data/EUR_beagle_in_test.phased.bgl "+ - "-L 20:1-70000 -o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING",1,Arrays.asList("d8906b67c7f9fdb5b37b8e9e050982d3")); + "-L 20:1-70000 -o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING",1,Arrays.asList("e036636fcd6a748ede4a70ea47941d47")); spec.disableShadowBCF(); executeTest("testBeagleChangesSitesToRef",spec); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java new file mode 100644 index 000000000..95ce80848 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java @@ -0,0 +1,362 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ +package org.broadinstitute.sting.gatk.walkers.bqsr; + +import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Method; +import java.util.*; + +import static org.testng.Assert.assertTrue; + +/** + * Tests Analyze Covariates. + *

+ * Notice that since PDF report generated by R are different every-time this program + * is executed their content won't be tested. It only will verify that file has a healthy size. + * + */ +public class AnalyzeCovariatesIntegrationTest extends WalkerTest { + + private static final String TOOL_NAME = AnalyzeCovariates.class.getSimpleName(); + + /** + * Directory where the testdata is located. + */ + private static final File TEST_DATA_DIR = new File(privateTestDir,"AnalyzeCovariates"); + + /** + * File containing the before report for normal testing. + */ + private static final File BEFORE_FILE = new File(TEST_DATA_DIR,"before.table"); + + /** + * File containing the after report for normal testing. + */ + private static final File AFTER_FILE = new File(TEST_DATA_DIR,"after.table"); + + + /** + * File containing the bqsr report for normal testing. + */ + private static final File BQSR_FILE = new File(TEST_DATA_DIR,"bqsr.table"); + + /** + * Test the content of the generated csv file. + * + * @throws IOException should never happen. It would be an indicator of a + * problem with the testing environment. + */ + @Test(enabled = true) + public void testCsvGeneration() + throws IOException { + + final WalkerTestSpec spec = new WalkerTestSpec( + buildCommandLine("%s",null,true,true,true), + Collections.singletonList("106709d32e6f0a0a9dd6a6340ec246ab")); + executeTest("testCsvGeneration",spec); + } + + + /** + * Test the size of the generated pdf. + *

+ * Unfortunately we cannot test the content as it changes slightly + * every time the tool is run. + * + * @throws IOException should never happen. It would be an + * indicator of a problem with the testing environment. + */ + @Test(enabled = true) + public void testPdfGeneration() + throws IOException { + final File pdfFile = File.createTempFile("ACTest",".pdf"); + pdfFile.delete(); + pdfFile.deleteOnExit(); + + final List md5 = Collections.emptyList(); + final WalkerTestSpec spec = new WalkerTestSpec( + buildCommandLine(null,pdfFile.toString(),true,true,true),md5); + executeTest("testPdfGeneration",spec); + assertTrue(pdfFile.exists(),"the pdf file was not created"); + assertTrue(pdfFile.length() > 260000,"the pdf file size does" + + " not reach the minimum of 260Kb"); + } + + /** + * Test the effect of changing some recalibration parameters. + * @param afterFileName name of the alternative after recalibration file. + * @param description describes what has been changed. + * @throws IOException should never happen. It would be an + * indicator of a problem with the testing environment. + */ + @Test(enabled = true, dataProvider="alternativeAfterFileProvider") + public void testParameterChangeException(final String afterFileName, + final String description) + throws IOException { + + final File pdfFile = File.createTempFile("ACTest",".pdf"); + pdfFile.deleteOnExit(); + final List md5 = Collections.emptyList(); + final File afterFile = new File(TEST_DATA_DIR,afterFileName); + final WalkerTestSpec spec = new WalkerTestSpec( + buildCommandLine(null,"%s",true,true,afterFile), + 1,UserException.IncompatibleRecalibrationTableParameters.class); + executeTest("testParameterChangeException - " + description, spec); + } + + + /** + * Test combinations of input and output inclusion exclusion of the command + * line that cause an exception to be thrown. + * + * @param useCsvFile whether to include the output csv file. + * @param usePdfFile whether to include the output pdf file. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param useAfterFile whether to include the -after input file. + * @throws IOException never thrown, unless there is a problem with the testing environment. + */ + @Test(enabled = true, dataProvider="alternativeInOutAbsenceCombinations") + public void testInOutAbsenceException(final boolean useCsvFile, final boolean usePdfFile, + final boolean useBQSRFile, final boolean useBeforeFile, final boolean useAfterFile) + throws IOException { + final WalkerTestSpec spec = new WalkerTestSpec(buildCommandLine(useCsvFile,usePdfFile, + useBQSRFile,useBeforeFile,useAfterFile),0,UserException.class); + executeTest("testInOutAbsencePresenceException", spec); + } + + /** + * Test combinations of input and output inclusion exclusion of the + * command line that won't cause an exception. + * + * @param useCsvFile whether to include the output csv file. + * @param usePdfFile whether to include the output pdf file. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param useAfterFile whether to include the -after input file. + * @throws IOException never thrown, unless there is a problem with the testing environment. + */ + @Test(enabled = true, dataProvider="alternativeInOutAbsenceCombinations") + public void testInOutAbsence(final boolean useCsvFile, final boolean usePdfFile, + final boolean useBQSRFile, final boolean useBeforeFile, final boolean useAfterFile) + throws IOException { + final List md5 = Collections.emptyList(); + final WalkerTestSpec spec = new WalkerTestSpec(buildCommandLine(useCsvFile,usePdfFile, + useBQSRFile,useBeforeFile,useAfterFile),md5); + executeTest("testInOutAbsencePresence", spec); + } + + + + @DataProvider + public Iterator alternativeInOutAbsenceCombinations(Method m) { + List result = new LinkedList(); + if (m.getName().endsWith("Exception")) { + result.add(new Object[] { false, false, true, true, true }); + result.add(new Object[] { true, true, false, false ,false}); + } + else { + result.add(new Object[] { true, true, true, false, false }); + result.add(new Object[] { true, true, false, true, false }); + result.add(new Object[] { true, true, false, false, true }); + result.add(new Object[] { true, false,false, true, false }); + result.add(new Object[] { false, true, true, false, false }); + + } + return result.iterator(); + } + + /** + * Provide recalibration parameter change data to relevant tests. + * @param m target test method. + * @return never null. + */ + @DataProvider + public Iterator alternativeAfterFileProvider (Method m) { + final boolean expectsException = m.getName().endsWith("Exception"); + final List result = new LinkedList(); + for (final Object[] data : DIFFERENT_PARAMETERS_AFTER_FILES) { + if (data[1].equals(expectsException)) { + result.add(new Object[] { data[0], data[2] }); + } + } + return result.iterator(); + } + + /** + * Triplets < alfter-grp-file, whether it should fail, what is different > + */ + private final Object[][] DIFFERENT_PARAMETERS_AFTER_FILES = { + {"after-cov.table", true, "Adds additional covariate: repeat-length" }, + {"after-dpSOLID.table", true, "Change the default platform to SOLID" }, + {"after-noDp.table",true, "Unset the default platform" }, + {"after-mcs4.table", true, "Changed -mcs parameter from 2 to 4" } + }; + + /** + * Build the AC command line given what combinations of input and output files should be included. + * + * @param useCsvFile whether to include the output csv file. + * @param usePdfFile whether to include the output pdf file. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param useAfterFile whether to include the -after input file. + * @return never null. + * @throws IOException never thrown, unless there is a problem with the testing environment. + */ + private String buildCommandLine(final boolean useCsvFile, final boolean usePdfFile, + final boolean useBQSRFile, final boolean useBeforeFile, final boolean useAfterFile) + throws IOException { + + final File csvFile = useCsvFile ? File.createTempFile("ACTest",".csv") : null; + final File pdfFile = usePdfFile ? File.createTempFile("ACTest",".pdf") : null; + + if (csvFile != null) { + csvFile.deleteOnExit(); + } + + if (pdfFile != null) { + pdfFile.deleteOnExit(); + } + + return buildCommandLine(csvFile == null ? null : csvFile.toString(), + pdfFile == null ? null : pdfFile.toString(), + useBQSRFile,useBeforeFile,useAfterFile); + } + + /** + * Build the AC command line given the output file names explicitly and what test input files to use. + *

+ * + * @param csvFileName the csv output file, null if none should be provided. + * @param pdfFileName the plots output file, null if none should be provided. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param useAfterFile whether to include the -after input file. + * + * @return never null. + */ + private String buildCommandLine(final String csvFileName, final String pdfFileName, final boolean useBQSRFile, + final boolean useBeforeFile, final boolean useAfterFile) { + return buildCommandLine(csvFileName,pdfFileName,useBQSRFile ? BQSR_FILE : null, + useBeforeFile ? BEFORE_FILE : null, + useAfterFile ? AFTER_FILE : null); + } + + /** + * Build the AC command line given the output file names and the after file name explicitly and what other + * test input files to use. + *

+ * + * @param csvFileName the csv output file, null if none should be provided. + * @param pdfFileName the plots output file, null if none should be provided. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param afterFile the after input report file, null if none should be provided. + * + * @return never null. + */ + private String buildCommandLine(final String csvFileName, final String pdfFileName, final boolean useBQSRFile, + final boolean useBeforeFile, final File afterFile) { + return buildCommandLine(csvFileName,pdfFileName,useBQSRFile ? BQSR_FILE : null, + useBeforeFile ? BEFORE_FILE : null, + afterFile); + } + + /** + * Build the AC command line given the output file names and the after file name explicitly and what other + * test input files to use. + *

+ * + * @param csvFileName the csv output file, null if none should be provided. + * @param pdfFileName the plots output file, null if none should be provided. + * @param bqsrFile the BQSR input report file, null if none should be provided. + * @param beforeFile the before input report file, null if non should be provided. + * @param afterFile the after input report file, null if none should be provided. + * + * @return never null. + */ + private String buildCommandLine(final String csvFileName, final String pdfFileName, final File bqsrFile, + final File beforeFile, final File afterFile) { + + final List args = new LinkedList(); + args.add("-T"); + args.add(TOOL_NAME); + args.add("-R"); + args.add(hg19Reference); + args.add("-ignoreLMT"); + + if (csvFileName != null) { + args.add("-" + AnalyzeCovariates.CSV_ARG_SHORT_NAME); + args.add("'" + csvFileName + "'"); + } + if (pdfFileName != null) { + args.add("-" + AnalyzeCovariates.PDF_ARG_SHORT_NAME); + args.add("'" + pdfFileName + "'"); + } + if (bqsrFile != null) { + args.add("-BQSR"); + args.add("'" + bqsrFile.getAbsoluteFile().toString() + "'"); + } + if (beforeFile != null) { + args.add("-" + AnalyzeCovariates.BEFORE_ARG_SHORT_NAME); + args.add("'" + beforeFile.getAbsolutePath().toString() + "'"); + } + if (afterFile != null) { + args.add("-" + AnalyzeCovariates.AFTER_ARG_SHORT_NAME); + args.add("'" + afterFile.getAbsolutePath().toString() + "'"); + } + return Utils.join(" ", args); + + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java index 907046704..05183a521 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java @@ -100,23 +100,23 @@ public class BQSRIntegrationTest extends WalkerTest { @DataProvider(name = "BQSRTest") public Object[][] createBQSRTestData() { return new Object[][]{ - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, "", "61fd466b5e94d2d67e116f6f67c9f939")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --no_standard_covs -cov ContextCovariate", "e08b5bcdb64f4beea03730e5631a14ca")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --no_standard_covs -cov CycleCovariate", "448a45dc154c95d1387cb5cdddb67071")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --indels_context_size 4", "c1e7999e445d51bbe2e775dac5325643")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --low_quality_tail 5", "a57c16918cdfe12d55a89c21bf195279")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --quantizing_levels 6", "836dccacf48ccda6b2843d07e8f1ef4d")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --mismatches_context_size 4", "0fb2aedc2f8d66b5821cb570f15a8c4d")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", "", "c9953f020a65c1603a6d71aeeb1b95f3")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-10,200,000", "", "85a120b7d86b61597b86b9e93decbdfc")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA12873.454.SRP000031.2009_06.chr1.10_20mb.1RG.bam", "1:10,000,000-10,200,000", "", "5248dc49aec0323c74b496bb4928c73c")}, - {new BQSRTest(b36KGReference, validationDataLocation + "originalQuals.1kg.chr1.1-1K.1RG.bam", "1:1-1,000", " -OQ", "cb52f267e0010f849f50b0bf1de474a1")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-20,000,000", " --solid_recal_mode REMOVE_REF_BIAS", "1425a5063ee757dbfc013df24e65a67a")}, - {new BQSRTest(b36KGReference, privateTestDir + "NA19240.chr1.BFAST.SOLID.hasCSNoCall.bam", "1:50,000-80,000", " --solid_nocall_strategy LEAVE_READ_UNRECALIBRATED", "c1c3cda8caceed619d3d439c3990cd26")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", " -knownSites:anyNameABCD,VCF " + privateTestDir + "vcfexample3.vcf", "c9953f020a65c1603a6d71aeeb1b95f3")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", " -knownSites:bed " + validationDataLocation + "bqsrKnownTest.bed", "5bfff0c699345cca12a9b33acf95588f")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, "", "f805a0020eea987b79f314fa99913806")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --no_standard_covs -cov ContextCovariate", "86075d3856eb06816a0dd81af55e421f")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --no_standard_covs -cov CycleCovariate", "155802237e1fc7a001398b8f4bcf4b72")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --indels_context_size 4", "38c7916cc019fe8d134df67639422b42")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --low_quality_tail 5", "b74e75f3c5aa90bd21af1e20f2ac8c40")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --quantizing_levels 6", "e564505aea11464de8ed72890d9ea89a")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --mismatches_context_size 4", "380d8be121ffaddd3461ee0ac3d1a76f")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", "", "0b5a8e259e997e4c7b5836d4c28e6f4d")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-10,200,000", "", "281682124584ab384f23359934df0c3b")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA12873.454.SRP000031.2009_06.chr1.10_20mb.1RG.bam", "1:10,000,000-10,200,000", "", "0a92fdff5fd26227c29d34eda5a32f49")}, + {new BQSRTest(b36KGReference, validationDataLocation + "originalQuals.1kg.chr1.1-1K.1RG.bam", "1:1-1,000", " -OQ", "90d8c24077e8ae9a0037a9aad5f09e31")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-20,000,000", " --solid_recal_mode REMOVE_REF_BIAS", "c41ef02c640ef1fed4bfc03b9b33b616")}, + {new BQSRTest(b36KGReference, privateTestDir + "NA19240.chr1.BFAST.SOLID.hasCSNoCall.bam", "1:50,000-80,000", " --solid_nocall_strategy LEAVE_READ_UNRECALIBRATED", "b577cd1d529425f66db49620db09fdca")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", " -knownSites:anyNameABCD,VCF " + privateTestDir + "vcfexample3.vcf", "0b5a8e259e997e4c7b5836d4c28e6f4d")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", " -knownSites:bed " + validationDataLocation + "bqsrKnownTest.bed", "9ad49269c0156f8ab1173261bf23e600")}, // make sure we work with ION torrent bam - {new BQSRTest(b37KGReference, privateTestDir + "iontorrent.bam", "20:10,000,000-10,200,000", "", "7375c7b692e76b651c278a9fb478fa1c")}, + {new BQSRTest(b37KGReference, privateTestDir + "iontorrent.bam", "20:10,000,000-10,200,000", "", "04bfa4760767022e7f5252e6e4432cc1")}, }; } @@ -141,22 +141,6 @@ public class BQSRIntegrationTest extends WalkerTest { executeTest("testBQSRFailWithoutDBSNP", spec); } - @Test - public void testBQSRCSV() { - WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( - " -T BaseRecalibrator" + - " -R " + b36KGReference + - " -I " + validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.bam" + - " -knownSites " + b36dbSNP129 + - " -L 1:10,000,000-10,200,000" + - " -o /dev/null" + - " -sortAllCols" + - " --plot_pdf_file /dev/null" + - " --intermediate_csv_file %s", - Arrays.asList("90ad19143024684e3c4410dc8fd2bd9d")); - executeTest("testBQSR-CSVfile", spec); - } - @Test public void testBQSRFailWithSolidNoCall() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReadsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReadsIntegrationTest.java index b5963498a..4fbbe1d0c 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReadsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReadsIntegrationTest.java @@ -53,6 +53,7 @@ import org.testng.annotations.Test; import java.io.File; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class ReduceReadsIntegrationTest extends WalkerTest { @@ -221,13 +222,13 @@ public class ReduceReadsIntegrationTest extends WalkerTest { @Test(enabled = true) public void testCoReduction() { - String base = String.format("-T ReduceReads %s -npt -R %s -I %s -I %s", COREDUCTION_L, REF, COREDUCTION_BAM_A, COREDUCTION_BAM_B) + " -o %s "; + String base = String.format("-T ReduceReads %s --cancer_mode -npt -R %s -I %s -I %s", COREDUCTION_L, REF, COREDUCTION_BAM_A, COREDUCTION_BAM_B) + " -o %s "; executeTest("testCoReduction", new WalkerTestSpec(base, Arrays.asList("bam"), Arrays.asList("5f4d2c1d9c010dfd6865aeba7d0336fe")), COREDUCTION_QUALS_TEST_MD5); } @Test(enabled = true) public void testCoReductionWithKnowns() { - String base = String.format("-T ReduceReads %s -npt -R %s -I %s -I %s -known %s", COREDUCTION_L, REF, COREDUCTION_BAM_A, COREDUCTION_BAM_B, DBSNP) + " -o %s "; + String base = String.format("-T ReduceReads %s --cancer_mode -npt -R %s -I %s -I %s -known %s", COREDUCTION_L, REF, COREDUCTION_BAM_A, COREDUCTION_BAM_B, DBSNP) + " -o %s "; executeTest("testCoReductionWithKnowns", new WalkerTestSpec(base, Arrays.asList("bam"), Arrays.asList("ca48dd972bf57595c691972c0f887cb4")), COREDUCTION_QUALS_TEST_MD5); } @@ -259,7 +260,7 @@ public class ReduceReadsIntegrationTest extends WalkerTest { public void testDivideByZero() { String base = String.format("-T ReduceReads %s -npt -R %s -I %s", DIVIDEBYZERO_L, REF, DIVIDEBYZERO_BAM) + " -o %s "; // we expect to lose coverage due to the downsampling so don't run the systematic tests - executeTestWithoutAdditionalRRTests("testDivideByZero", new WalkerTestSpec(base, Arrays.asList("bam"), Arrays.asList("1663f35802f82333c5e15653e437ce2d"))); + executeTestWithoutAdditionalRRTests("testDivideByZero", new WalkerTestSpec(base, Arrays.asList("bam"), Arrays.asList("4f0ef477c0417d1eb602b323474ef377"))); } /** @@ -281,5 +282,24 @@ public class ReduceReadsIntegrationTest extends WalkerTest { " -o %s --downsample_coverage 250 -dcov 50 "; executeTest("testPairedReadsInVariantRegion", new WalkerTestSpec(base, Arrays.asList("bam"), Arrays.asList("7e7b358443827ca239db3b98f299aec6")), "2af063d1bd3c322b03405dbb3ecf59a9"); } + + /** + * Confirm that this bam does not fail when multi-sample mode is enabled. The provided example is tricky and used to cause + * us to exception out in the code. + */ + @Test(enabled = true) + public void testMultiSampleDoesNotFailWithFlag() { + String cmd = "-T ReduceReads --cancer_mode -npt -R " + b37KGReference + " -I " + privateTestDir + "rr_multisample.bam -o /dev/null"; + executeTestWithoutAdditionalRRTests("testMultiSampleDoesNotFailWithFlag", new WalkerTestSpec(cmd, 0, Collections.emptyList())); + } + + /** + * Confirm that this bam fails when multi-sample mode is not enabled + */ + @Test(enabled = true) + public void testMultiSampleFailsWithoutFlag() { + String cmd = "-T ReduceReads -npt -R " + b37KGReference + " -I " + privateTestDir + "rr_multisample.bam -o /dev/null"; + executeTestWithoutAdditionalRRTests("testMultiSampleDoesNotFailWithFlag", new WalkerTestSpec(cmd, 0, UserException.BadInput.class)); + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java index 56ad02084..c9bb2f084 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java @@ -89,6 +89,25 @@ public class SlidingWindowUnitTest extends BaseTest { return variantRegionBitset; } + ////////////////////////////////////////////////////////////////////////////////////// + //// Test for leading softclips immediately followed by an insertion in the CIGAR //// + ////////////////////////////////////////////////////////////////////////////////////// + + @Test(enabled = true) + public void testLeadingClipThenInsertion() { + + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 10); + read.setReadBases(Utils.dupBytes((byte) 'A', 10)); + read.setBaseQualities(Utils.dupBytes((byte)30, 10)); + read.setMappingQuality(30); + read.setCigarString("2S2I6M"); + + final SlidingWindow slidingWindow = new SlidingWindow("1", 0, 1); + slidingWindow.addRead(read); + Pair, CompressionStash> result = slidingWindow.close(null); + + } + ////////////////////////////////////////////////////////////////////////////////////// //// This section tests the findVariantRegions() method and related functionality //// ////////////////////////////////////////////////////////////////////////////////////// diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/diagnostics/ErrorRatePerCycleIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/diagnostics/ErrorRatePerCycleIntegrationTest.java index b435fc2eb..84020e2d0 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/diagnostics/ErrorRatePerCycleIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/diagnostics/ErrorRatePerCycleIntegrationTest.java @@ -57,7 +57,7 @@ public class ErrorRatePerCycleIntegrationTest extends WalkerTest { WalkerTestSpec spec = new WalkerTestSpec( "-T ErrorRatePerCycle -R " + b37KGReference + " -I " + b37GoodBAM + " -L 20:10,000,000-10,100,000 -o %s", 1, - Arrays.asList("dccdf3cb3193d01a1a767097e4a5c35e")); + Arrays.asList("6191340f0b56ee81fb248c8f5c913a8e")); executeTest("ErrorRatePerCycle:", spec); } } \ No newline at end of file diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargetsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargetsIntegrationTest.java index bac09f30d..52e385957 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargetsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargetsIntegrationTest.java @@ -66,11 +66,11 @@ public class DiagnoseTargetsIntegrationTest extends WalkerTest { @Test(enabled = true) public void testSingleSample() { - DTTest("testSingleSample ", "-I " + singleSample + " -max 75", "850304909477afa8c2a8f128d6eedde9"); + DTTest("testSingleSample ", "-I " + singleSample + " -max 75", "1771e95aed2b3b240dc353f84e19847d"); } @Test(enabled = true) public void testMultiSample() { - DTTest("testMultiSample ", "-I " + multiSample, "bedd19bcf21d1a779f6706c0351c9d26"); + DTTest("testMultiSample ", "-I " + multiSample, "c7f1691dbe5f121c4a79be823d3057e5"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java index 34b19ed2d..aaa3b1284 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java @@ -69,16 +69,16 @@ public class UnifiedGenotyperGeneralPloidySuite1IntegrationTest extends WalkerTe @Test(enabled = true) public void testBOTH_GGA_Pools() { - executor.PC_LSV_Test(String.format(" -maxAltAlleles 2 -ploidy 24 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles %s", LSV_ALLELES), "LSV_BOTH_GGA", "BOTH", "71f16e19b7d52e8edee46f4121e59f54"); + executor.PC_LSV_Test(String.format(" -maxAltAlleles 2 -ploidy 24 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles %s", LSV_ALLELES), "LSV_BOTH_GGA", "BOTH", "dac2d7969e109aee9ad2dad573759f58"); } @Test(enabled = true) public void testINDEL_GGA_Pools() { - executor.PC_LSV_Test(String.format(" -maxAltAlleles 1 -ploidy 24 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles %s", LSV_ALLELES), "LSV_INDEL_GGA", "INDEL", "3f7d763c654f1d708323f369ea4a099b"); + executor.PC_LSV_Test(String.format(" -maxAltAlleles 1 -ploidy 24 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles %s", LSV_ALLELES), "LSV_INDEL_GGA", "INDEL", "ceb105e3db0f2b993e3d725b0d60b6a3"); } @Test(enabled = true) public void testINDEL_maxAltAlleles2_ploidy1_Pools_noRef() { - executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 1", "LSV_INDEL_DISC_NOREF_p1", "INDEL", "603416111f34e2a735163fa97e1a8272"); + executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 1", "LSV_INDEL_DISC_NOREF_p1", "INDEL", "98f4d78aad745c6e853b81b2e4e207b4"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java index 8a165cbeb..0eb89adc7 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java @@ -58,16 +58,16 @@ public class UnifiedGenotyperGeneralPloidySuite2IntegrationTest extends WalkerTe @Test(enabled = true) public void testINDEL_maxAltAlleles2_ploidy3_Pools_noRef() { - executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 3","LSV_INDEL_DISC_NOREF_p3","INDEL","13de8558acaa0b9082f2df477b45de9b"); + executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 3","LSV_INDEL_DISC_NOREF_p3","INDEL","25902d7a6a0c00c60c2d5845dfaa1a4c"); } @Test(enabled = true) public void testMT_SNP_DISCOVERY_sp4() { - executor.PC_MT_Test(CEUTRIO_BAM, " -maxAltAlleles 1 -ploidy 8", "MT_SNP_DISCOVERY_sp4","3fc6f4d458313616727c60e49c0e852b"); + executor.PC_MT_Test(CEUTRIO_BAM, " -maxAltAlleles 1 -ploidy 8", "MT_SNP_DISCOVERY_sp4","5d55b71688a0777a7c0247c376401368"); } @Test(enabled = true) public void testMT_SNP_GGA_sp10() { - executor.PC_MT_Test(CEUTRIO_BAM, String.format(" -maxAltAlleles 1 -ploidy 20 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles %s",NA12891_CALLS), "MT_SNP_GGA_sp10", "1bebbc0f28bff6fd64736ccca8839df8"); + executor.PC_MT_Test(CEUTRIO_BAM, String.format(" -maxAltAlleles 1 -ploidy 20 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles %s",NA12891_CALLS), "MT_SNP_GGA_sp10", "cf336d66a109c55f90e9ed2b3bc196c8"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java index 52970d70d..65a569cdc 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java @@ -73,7 +73,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { " -o %s" + " -L 1:10,000,000-10,500,000", 1, - Arrays.asList("d8b0c5be39ec6b239641c2f2646d2bc3")); + Arrays.asList("ef8151aa699da3272c1ae0986d16ca21")); executeTest(String.format("test indel caller in SLX"), spec); } @@ -88,7 +88,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { " -minIndelCnt 1" + " -L 1:10,000,000-10,100,000", 1, - Arrays.asList("d9572a227ccb13a6baa6dc4fb65bc1e5")); + Arrays.asList("7f88229ccefb74513efb199b61183cb8")); executeTest(String.format("test indel caller in SLX with low min allele count"), spec); } @@ -101,7 +101,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { " -o %s" + " -L 1:10,000,000-10,500,000", 1, - Arrays.asList("8d9b8f8a1479322961c840e461b6dba8")); + Arrays.asList("1928ad48bcd0ca180e046bc235cfb3f4")); executeTest(String.format("test indel calling, multiple technologies"), spec); } @@ -111,7 +111,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( baseCommandIndels + " --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + privateTestDir + "indelAllelesForUG.vcf -I " + validationDataLocation + "pilot2_daughters.chr20.10k-11k.bam -o %s -L 20:10,000,000-10,100,000", 1, - Arrays.asList("16d975480ff1e689113171805b916b62")); + Arrays.asList("6663e434a7b549f23bfd52db90e53a1a")); executeTest("test MultiSample Pilot2 indels with alleles passed in", spec); } @@ -121,7 +121,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { baseCommandIndels + " --output_mode EMIT_ALL_SITES --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + privateTestDir + "indelAllelesForUG.vcf -I " + validationDataLocation + "pilot2_daughters.chr20.10k-11k.bam -o %s -L 20:10,000,000-10,100,000", 1, - Arrays.asList("60ed3f8d5bc3f765e6ce3fa698b68bb7")); + Arrays.asList("581c552664e536df6d0f102fb0d10e5a")); executeTest("test MultiSample Pilot2 indels with alleles passed in and emitting all sites", spec); } @@ -136,7 +136,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { WalkerTest.WalkerTestSpec spec2 = new WalkerTest.WalkerTestSpec( baseCommandIndels + " --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + result.get(0).getAbsolutePath() + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L " + result.get(0).getAbsolutePath(), 1, - Arrays.asList("3d4d66cc253eac55f16e5b0a36f17d8d")); + Arrays.asList("5596851d19582dd1af3901b7d703ae0a")); executeTest("test MultiSample Pilot1 CEU indels using GENOTYPE_GIVEN_ALLELES", spec2); } @@ -176,7 +176,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { public void testMinIndelFraction0() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( assessMinIndelFraction + " -minIndelFrac 0.0", 1, - Arrays.asList("264325878b988acc11d8e5d9d2ba0b7f")); + Arrays.asList("862d82c8aa35f1da4f9e67b5b48dfe52")); executeTest("test minIndelFraction 0.0", spec); } @@ -184,7 +184,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { public void testMinIndelFraction25() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( assessMinIndelFraction + " -minIndelFrac 0.25", 1, - Arrays.asList("98abcfb0a008050eba8b9c285a25b2a0")); + Arrays.asList("8d9fc96be07db791737ac18135de4d63")); executeTest("test minIndelFraction 0.25", spec); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java index d55a923dc..532982853 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java @@ -46,11 +46,15 @@ package org.broadinstitute.sting.gatk.walkers.genotyper; +import net.sf.samtools.util.BlockCompressedInputStream; +import org.broad.tribble.readers.AsciiLineReader; import org.broadinstitute.sting.WalkerTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.exceptions.UserException; +import org.testng.Assert; import org.testng.annotations.Test; +import java.io.File; import java.util.Arrays; import java.util.Collections; @@ -156,6 +160,14 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest { } + @Test + public void emitPLsAtAllSites() { + WalkerTest.WalkerTestSpec spec1 = new WalkerTest.WalkerTestSpec( + baseCommand + " -I " + validationDataLocation + "NA12878.1kg.p2.chr1_10mb_11_mb.SLX.bam -o %s -L 1:10,000,000-10,010,000 --output_mode EMIT_ALL_SITES -allSitePLs", 1, + Arrays.asList("7cc55db8693759e059a05bc4398f6f69")); + executeTest("test all site PLs 1", spec1); + + } // -------------------------------------------------------------------------------------------------------------- // // testing heterozygosity @@ -288,9 +300,24 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest { @Test public void testNsInCigar() { - WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( + final WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "testWithNs.bam -o %s -L 8:141813600-141813700 -out_mode EMIT_ALL_SITES", 1, - Arrays.asList("2ae3fd39c53a6954d32faed8703adfe8")); + UserException.UnsupportedCigarOperatorException.class); + executeTest("test calling on reads with Ns in CIGAR", spec); } + + @Test(enabled = true) + public void testCompressedVCFOutputWithNT() throws Exception { + WalkerTestSpec spec = new WalkerTestSpec("-T UnifiedGenotyper -R " + b37KGReference + " -I " + + privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam" + + " -o %s -L 20:10,000,000-10,100,000 -nt 4", + 1, Arrays.asList("vcf.gz"), Arrays.asList("")); + final File vcf = executeTest("testCompressedVCFOutputWithNT", spec).first.get(0); + final AsciiLineReader reader = new AsciiLineReader(new BlockCompressedInputStream(vcf)); + int nLines = 0; + while ( reader.readLine() != null ) + nLines++; + Assert.assertTrue(nLines > 0); + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java index 8256a8496..1bfbbac17 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java @@ -64,7 +64,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testMultiSamplePilot1() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( baseCommand + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L 1:10,022,000-10,025,000", 1, - Arrays.asList("a6c224235c21b4af816b1512eb0624df")); + Arrays.asList("a9466c1e3ce1fc4bac83086b25a6df54")); executeTest("test MultiSample Pilot1", spec); } @@ -80,7 +80,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testWithAllelesPassedIn2() { WalkerTest.WalkerTestSpec spec2 = new WalkerTest.WalkerTestSpec( baseCommand + " --output_mode EMIT_ALL_SITES --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + privateTestDir + "allelesForUG.vcf -I " + validationDataLocation + "pilot2_daughters.chr20.10k-11k.bam -o %s -L 20:10,000,000-10,025,000", 1, - Arrays.asList("698e54aeae3130779d246b9480a4052c")); + Arrays.asList("3e646003c5b93da80c7d8e5d0ff2ee4e")); executeTest("test MultiSample Pilot2 with alleles passed in and emitting all sites", spec2); } @@ -96,7 +96,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testMultipleSNPAlleles() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -glm BOTH --dbsnp " + b37dbSNP129 + " -I " + privateTestDir + "multiallelic.snps.bam -o %s -L " + privateTestDir + "multiallelic.snps.intervals", 1, - Arrays.asList("09a1a4d4bf0289bcc5e8a958f783a989")); + Arrays.asList("06c85e8eab08b67244cf38fc785aca22")); executeTest("test Multiple SNP alleles", spec); } @@ -112,7 +112,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testReverseTrim() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -glm INDEL -I " + validationDataLocation + "CEUTrio.HiSeq.b37.chr20.10_11mb.bam -o %s -L 20:10289124 -L 20:10090289", 1, - Arrays.asList("57a1bb44967988f2b7ae7779127990ae")); + Arrays.asList("f3da1ff1e49a831af055ca52d6d07dd7")); executeTest("test reverse trim", spec); } @@ -120,7 +120,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testMismatchedPLs() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -glm INDEL -I " + privateTestDir + "mismatchedPLs.bam -o %s -L 1:24020341", 1, - Arrays.asList("3011c20165951ca43c8a4e86a5835dbd")); + Arrays.asList("20ff311f363c51b7385a76f6f296759c")); executeTest("test mismatched PLs", spec); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java index f7ac87cda..33810e255 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java @@ -63,18 +63,18 @@ public class UnifiedGenotyperReducedReadsIntegrationTest extends WalkerTest { public void testReducedBam() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "bamExample.ReducedRead.ADAnnotation.bam -o %s -L 1:67,225,396-67,288,518", 1, - Arrays.asList("e6565060b44a7804935973efcd56e596")); + Arrays.asList("ffde0d5e23523e4bd9e7e18f62d37d0f")); executeTest("test calling on a ReducedRead BAM", spec); } @Test public void testReducedBamSNPs() { - testReducedCalling("SNP", "ab776d74c41ce2b859e2b2466a76204a"); + testReducedCalling("SNP", "e8de8c523751ad2fa2ee20185ba5dea7"); } @Test public void testReducedBamINDELs() { - testReducedCalling("INDEL", "9a986b98ed014576ce923e07452447f4"); + testReducedCalling("INDEL", "4b4902327fb132f9aaab3dd5ace934e1"); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java index e6dea4d11..95592241d 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java @@ -52,10 +52,7 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; * Date: 3/27/12 */ -import net.sf.samtools.Cigar; -import net.sf.samtools.CigarElement; -import net.sf.samtools.CigarOperator; -import net.sf.samtools.SAMFileHeader; +import net.sf.samtools.*; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.DeBruijnGraph; import org.broadinstitute.sting.utils.haplotype.Haplotype; @@ -76,55 +73,13 @@ public class DeBruijnAssemblerUnitTest extends BaseTest { public void testReferenceCycleGraph() { String refCycle = "ATCGAGGAGAGCGCCCCGAGATATATATATATATATTTGCGAGCGCGAGCGTTTTAAAAATTTTAGACGGAGAGATATATATATATGGGAGAGGGGATATATATATATCCCCCC"; String noCycle = "ATCGAGGAGAGCGCCCCGAGATATTATTTGCGAGCGCGAGCGTTTTAAAAATTTTAGACGGAGAGATGGGAGAGGGGATATATAATATCCCCCC"; - final DeBruijnGraph g1 = new DeBruijnAssembler().createGraphFromSequences(new ArrayList(), 10, new Haplotype(refCycle.getBytes(), true)); - final DeBruijnGraph g2 = new DeBruijnAssembler().createGraphFromSequences(new ArrayList(), 10, new Haplotype(noCycle.getBytes(), true)); + final DeBruijnGraph g1 = new DeBruijnAssembler().createGraphFromSequences(new ArrayList(), 10, new Haplotype(refCycle.getBytes(), true), Collections.emptyList()); + final DeBruijnGraph g2 = new DeBruijnAssembler().createGraphFromSequences(new ArrayList(), 10, new Haplotype(noCycle.getBytes(), true), Collections.emptyList()); Assert.assertTrue(g1 == null, "Reference cycle graph should return null during creation."); Assert.assertTrue(g2 != null, "Reference non-cycle graph should not return null during creation."); } - @Test(enabled = !DEBUG) - public void testLeftAlignCigarSequentially() { - String preRefString = "GATCGATCGATC"; - String postRefString = "TTT"; - String refString = "ATCGAGGAGAGCGCCCCG"; - String indelString1 = "X"; - String indelString2 = "YZ"; - int refIndel1 = 10; - int refIndel2 = 12; - - for ( final int indelSize1 : Arrays.asList(1, 2, 3, 4) ) { - for ( final int indelOp1 : Arrays.asList(1, -1) ) { - for ( final int indelSize2 : Arrays.asList(1, 2, 3, 4) ) { - for ( final int indelOp2 : Arrays.asList(1, -1) ) { - - Cigar expectedCigar = new Cigar(); - expectedCigar.add(new CigarElement(refString.length(), CigarOperator.M)); - expectedCigar.add(new CigarElement(indelSize1, (indelOp1 > 0 ? CigarOperator.I : CigarOperator.D))); - expectedCigar.add(new CigarElement((indelOp1 < 0 ? refIndel1 - indelSize1 : refIndel1), CigarOperator.M)); - expectedCigar.add(new CigarElement(refString.length(), CigarOperator.M)); - expectedCigar.add(new CigarElement(indelSize2 * 2, (indelOp2 > 0 ? CigarOperator.I : CigarOperator.D))); - expectedCigar.add(new CigarElement((indelOp2 < 0 ? (refIndel2 - indelSize2) * 2 : refIndel2 * 2), CigarOperator.M)); - expectedCigar.add(new CigarElement(refString.length(), CigarOperator.M)); - - Cigar givenCigar = new Cigar(); - givenCigar.add(new CigarElement(refString.length() + refIndel1/2, CigarOperator.M)); - givenCigar.add(new CigarElement(indelSize1, (indelOp1 > 0 ? CigarOperator.I : CigarOperator.D))); - givenCigar.add(new CigarElement((indelOp1 < 0 ? (refIndel1/2 - indelSize1) : refIndel1/2) + refString.length() + refIndel2/2 * 2, CigarOperator.M)); - givenCigar.add(new CigarElement(indelSize2 * 2, (indelOp2 > 0 ? CigarOperator.I : CigarOperator.D))); - givenCigar.add(new CigarElement((indelOp2 < 0 ? (refIndel2/2 - indelSize2) * 2 : refIndel2/2 * 2) + refString.length(), CigarOperator.M)); - - String theRef = preRefString + refString + Utils.dupString(indelString1, refIndel1) + refString + Utils.dupString(indelString2, refIndel2) + refString + postRefString; - String theRead = refString + Utils.dupString(indelString1, refIndel1 + indelOp1 * indelSize1) + refString + Utils.dupString(indelString2, refIndel2 + indelOp2 * indelSize2) + refString; - - Cigar calculatedCigar = new DeBruijnAssembler().leftAlignCigarSequentially(AlignmentUtils.consolidateCigar(givenCigar), theRef.getBytes(), theRead.getBytes(), preRefString.length(), 0); - Assert.assertEquals(AlignmentUtils.consolidateCigar(calculatedCigar).toString(), AlignmentUtils.consolidateCigar(expectedCigar).toString(), "Cigar strings do not match!"); - } - } - } - } - } - private static class MockBuilder extends DeBruijnGraphBuilder { public final List addedPairs = new LinkedList(); @@ -165,7 +120,7 @@ public class DeBruijnAssemblerUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "AddReadKmersToGraph") + @Test(dataProvider = "AddReadKmersToGraph", enabled = ! DEBUG) public void testAddReadKmersToGraph(final String bases, final int kmerSize, final List badQualsSites) { final int readLen = bases.length(); final DeBruijnAssembler assembler = new DeBruijnAssembler(); @@ -198,4 +153,47 @@ public class DeBruijnAssemblerUnitTest extends BaseTest { Assert.assertTrue(expectedBases.contains(new String(addedKmer.bases())), "Couldn't find kmer " + addedKmer + " among all expected kmers " + expectedBases); } } + + @DataProvider(name = "AddGGAKmersToGraph") + public Object[][] makeAddGGAKmersToGraphData() { + List tests = new ArrayList(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + final String bases = "ACGTAACCGGTTAAACCCGGGTTT"; + final int readLen = bases.length(); + final List allBadStarts = new ArrayList(readLen); + for ( int i = 0; i < readLen; i++ ) allBadStarts.add(i); + + for ( final int kmerSize : Arrays.asList(3, 4, 5) ) { + tests.add(new Object[]{bases, kmerSize}); + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "AddGGAKmersToGraph", enabled = ! DEBUG) + public void testAddGGAKmersToGraph(final String bases, final int kmerSize) { + final int readLen = bases.length(); + final DeBruijnAssembler assembler = new DeBruijnAssembler(); + final MockBuilder builder = new MockBuilder(kmerSize); + + final Set expectedBases = new HashSet(); + final Set expectedStarts = new LinkedHashSet(); + for ( int i = 0; i < readLen; i++) { + boolean good = true; + for ( int j = 0; j < kmerSize + 1; j++ ) { // +1 is for pairing + good &= i + j < readLen; + } + if ( good ) { + expectedStarts.add(i); + expectedBases.add(bases.substring(i, i + kmerSize + 1)); + } + } + + assembler.addGGAKmersToGraph(builder, Arrays.asList(new Haplotype(bases.getBytes()))); + Assert.assertEquals(builder.addedPairs.size(), expectedStarts.size()); + for ( final Kmer addedKmer : builder.addedPairs ) { + Assert.assertTrue(expectedBases.contains(new String(addedKmer.bases())), "Couldn't find kmer " + addedKmer + " among all expected kmers " + expectedBases); + } + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java index d3f3a9936..0636d7c1b 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java @@ -64,7 +64,7 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleComplex1() { - HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "27db36467d40c3cde201f5826e959d78"); + HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "4a3479fc4ad387d381593b328f737a1b"); } private void HCTestSymbolicVariants(String bam, String args, String md5) { @@ -88,12 +88,12 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleGGAComplex() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:119673-119823 -L 20:121408-121538", - "ed3b577e6f7d68bba6774a62d9df9cd9"); + "b7a01525c00d02b3373513a668a43c6a"); } @Test public void testHaplotypeCallerMultiSampleGGAMultiAllelic() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:133041-133161 -L 20:300207-300337", - "a594a28d8053c3e969c39de81a9d03d6"); + "a2a42055b068334f415efb07d6bb9acd"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java index 50165bd01..aca1172d4 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java @@ -47,15 +47,12 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import net.sf.picard.reference.IndexedFastaSequenceFile; -import org.broad.tribble.TribbleIndexedFeatureReader; import org.broadinstitute.sting.WalkerTest; -import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.variant.GATKVCFUtils; import org.broadinstitute.variant.variantcontext.VariantContext; -import org.broadinstitute.variant.vcf.VCFCodec; import org.testng.annotations.Test; import java.io.File; @@ -69,6 +66,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { final static String NA12878_CHR20_BAM = validationDataLocation + "NA12878.HiSeq.WGS.bwa.cleaned.recal.hg19.20.bam"; final static String CEUTRIO_BAM = validationDataLocation + "CEUTrio.HiSeq.b37.chr20.10_11mb.bam"; final static String NA12878_RECALIBRATED_BAM = privateTestDir + "NA12878.100kb.BQSRv2.example.bam"; + final static String NA12878_PCRFREE = privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam"; final static String CEUTRIO_MT_TEST_BAM = privateTestDir + "CEUTrio.HiSeq.b37.MT.1_50.bam"; final static String INTERVALS_FILE = validationDataLocation + "NA12878.HiSeq.b37.chr20.10_11mb.test.intervals"; @@ -80,12 +78,12 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSample() { - HCTest(CEUTRIO_BAM, "", "aeab5f0d40852e6332b96481981a0e46"); + HCTest(CEUTRIO_BAM, "", "baa5a2eedc8f06ce9f8f98411ee09f8a"); } @Test public void testHaplotypeCallerSingleSample() { - HCTest(NA12878_BAM, "", "18d5671d8454e8a0c05ee5f6e9fabfe3"); + HCTest(NA12878_BAM, "", "f09e03d41238697b23f95716a12667cb"); } @Test(enabled = false) // can't annotate the rsID's yet @@ -96,12 +94,12 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSampleGGA() { HCTest(CEUTRIO_BAM, "--max_alternate_alleles 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf", - "28c3b1f276ec8198801aafe880e40fb6"); + "130d36448faeb7b8d4bce4be12dacd3a"); } @Test public void testHaplotypeCallerInsertionOnEdgeOfContig() { - HCTest(CEUTRIO_MT_TEST_BAM, "-dcov 90 -L MT:1-10", "7f1fb8f9587f64643f6612ef1dd6d4ae"); + HCTest(CEUTRIO_MT_TEST_BAM, "-L MT:1-10", "7f1fb8f9587f64643f6612ef1dd6d4ae"); } private void HCTestIndelQualityScores(String bam, String args, String md5) { @@ -112,7 +110,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerSingleSampleIndelQualityScores() { - HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "bac6f98e910290722df28da44b41f06f"); + HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "7c20aa62633f4ce8ebf12950fbf05ec0"); } private void HCTestNearbySmallIntervals(String bam, String args, String md5) { @@ -149,7 +147,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerNearbySmallIntervals() { - HCTestNearbySmallIntervals(NA12878_BAM, "", "65e7b1b72a2411d6360138049914aa3a"); + HCTestNearbySmallIntervals(NA12878_BAM, "", "0ddc56f0a0fbcfefda79aa20b2ecf603"); } // This problem bam came from a user on the forum and it spotted a problem where the ReadClipper @@ -166,7 +164,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void HCTestStructuralIndels() { final String base = String.format("-T HaplotypeCaller --disableDithering -R %s -I %s", REF, privateTestDir + "AFR.structural.indels.bam") + " --no_cmdline_in_header -o %s -minPruning 6 -L 20:8187565-8187800 -L 20:18670537-18670730"; - final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("cb190c935541ebb9f660f713a882b922")); + final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("91717e5e271742c2c9b67223e58f1320")); executeTest("HCTestStructuralIndels: ", spec); } @@ -188,7 +186,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void HCTestReducedBam() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "bamExample.ReducedRead.ADAnnotation.bam -o %s -L 1:67,225,396-67,288,518", 1, - Arrays.asList("0df626cd0d76aca8a05a545d0b36bf23")); + Arrays.asList("5fe9310addf881bed4fde2354e59ce34")); executeTest("HC calling on a ReducedRead BAM", spec); } @@ -196,7 +194,30 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void testReducedBamWithReadsNotFullySpanningDeletion() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "reduced.readNotFullySpanningDeletion.bam -o %s -L 1:167871297", 1, - Arrays.asList("8adfa8a27a312760dab50787da595c57")); + Arrays.asList("26a9917f6707536636451266de0116c3")); executeTest("test calling on a ReducedRead BAM where the reads do not fully span a deletion", spec); } + + // -------------------------------------------------------------------------------------------------------------- + // + // test dbSNP annotation + // + // -------------------------------------------------------------------------------------------------------------- + + @Test + public void HCTestDBSNPAnnotationWGS() { + WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( + "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + NA12878_PCRFREE + " -o %s -L 20:10,000,000-10,100,000 -D " + b37dbSNP132, 1, + Arrays.asList("cc6f2a76ee97ecc14a5f956ffbb21d88")); + executeTest("HC calling with dbSNP ID annotation on WGS intervals", spec); + } + + @Test + public void HCTestDBSNPAnnotationWEx() { + WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( + "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + NA12878_PCRFREE + " -o %s -L 20:10,000,000-11,000,000 -D " + b37dbSNP132 + + " -L " + hg19Intervals + " -isr INTERSECTION", 1, + Arrays.asList("51e91c8af61a6b47807165906baefb00")); + executeTest("HC calling with dbSNP ID annotation on WEx intervals", spec); + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java new file mode 100644 index 000000000..d009550f4 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java @@ -0,0 +1,79 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller; + +import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.utils.haplotypeBAMWriter.HaplotypeBAMWriter; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class HaplotypeCallerParallelIntegrationTest extends WalkerTest { + @DataProvider(name = "NCTDataProvider") + public Object[][] makeNCTDataProvider() { + List tests = new ArrayList(); + + for ( final int nct : Arrays.asList(1, 2, 4) ) { + tests.add(new Object[]{nct, "9da4cc89590c4c64a36f4a9c820f8609"}); + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "NCTDataProvider") + public void testHCNCT(final int nct, final String md5) { + WalkerTestSpec spec = new WalkerTestSpec( + "-T HaplotypeCaller -R " + b37KGReference + " --no_cmdline_in_header -I " + + privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam -o %s " + + " -L 20:10,000,000-10,100,000 -G none -A -contamination 0.0 -nct " + nct, 1, + Arrays.asList(md5)); + executeTest("HC test parallel HC with NCT with nct " + nct, spec); + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounterCaseFixUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounterCaseFixUnitTest.java index c049121a3..9b08e8214 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounterCaseFixUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounterCaseFixUnitTest.java @@ -1,48 +1,48 @@ /* - * By downloading the PROGRAM you agree to the following terms of use: - * - * BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY - * - * This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). - * - * WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and - * WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. - * NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: - * - * 1. DEFINITIONS - * 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. - * - * 2. LICENSE - * 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. - * The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. - * 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. - * 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. - * - * 3. OWNERSHIP OF INTELLECTUAL PROPERTY - * LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. - * Copyright 2012 Broad Institute, Inc. - * Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. - * LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. - * - * 4. INDEMNIFICATION - * LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. - * - * 5. NO REPRESENTATIONS OR WARRANTIES - * THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. - * IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. - * - * 6. ASSIGNMENT - * This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. - * - * 7. MISCELLANEOUS - * 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. - * 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. - * 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. - * 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. - * 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. - * 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. - * 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. - */ +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; @@ -50,6 +50,9 @@ import org.broadinstitute.sting.BaseTest; import org.testng.Assert; import org.testng.annotations.Test; +import java.util.HashSet; +import java.util.Set; + public class KMerCounterCaseFixUnitTest extends BaseTest { @Test public void testMyData() { @@ -76,6 +79,18 @@ public class KMerCounterCaseFixUnitTest extends BaseTest { testCounting(counter, "NNC", 0); Assert.assertNotNull(counter.toString()); + + assertCounts(counter, 5); + assertCounts(counter, 4, "ATG"); + assertCounts(counter, 3, "ATG", "ACC"); + assertCounts(counter, 2, "ATG", "ACC", "AAA"); + assertCounts(counter, 1, "ATG", "ACC", "AAA", "CTG", "NNA", "CCC"); + } + + private void assertCounts(final KMerCounter counter, final int minCount, final String ... expecteds) { + final Set expected = new HashSet(); + for ( final String one : expecteds ) expected.add(new Kmer(one)); + Assert.assertEquals(new HashSet(counter.getKmersWithCountsAtLeast(minCount)), expected); } private void testCounting(final KMerCounter counter, final String in, final int expectedCount) { diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KmerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KmerUnitTest.java index 989c38628..116c987a6 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KmerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KmerUnitTest.java @@ -47,13 +47,12 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; +import java.util.*; public class KmerUnitTest extends BaseTest { @DataProvider(name = "KMerCreationData") @@ -130,4 +129,40 @@ public class KmerUnitTest extends BaseTest { } } } + + @Test + public void testDifferingPositions() { + final String bases = "ACGTCAGACGTACGTTTGACGTCAGACGTACGT"; + final Kmer baseKmer = new Kmer(bases.getBytes()); + + + final int NUM_TEST_CASES = 30; + + for (int test = 0; test < NUM_TEST_CASES; test++) { + + final int numBasesToChange = test % bases.length(); + + // changes numBasesToChange bases - spread regularly through read string + final int step = (numBasesToChange > 0?Math.min(bases.length() / numBasesToChange,1) : 1); + + final byte[] newBases = bases.getBytes().clone(); + int actualChangedBases =0; // could be different from numBasesToChange due to roundoff + for (int idx=0; idx < numBasesToChange; idx+=step) { + // now change given positions + newBases[idx] = (newBases[idx] == (byte)'A'? (byte)'T':(byte)'A'); + actualChangedBases++; + } + + // compute changed positions + final int[] differingIndices = new int[newBases.length]; + final byte[] differingBases = new byte[newBases.length]; + final int numDiffs = baseKmer.getDifferingPositions(new Kmer(newBases),newBases.length,differingIndices,differingBases); + Assert.assertEquals(numDiffs,actualChangedBases); + for (int k=0; k < numDiffs; k++) { + final int idx = differingIndices[k]; + Assert.assertTrue(newBases[idx] != bases.getBytes()[idx]); + Assert.assertEquals(differingBases[idx],newBases[idx]); + } + } + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java new file mode 100644 index 000000000..2fda56665 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java @@ -0,0 +1,287 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller; + +import net.sf.picard.reference.IndexedFastaSequenceFile; +import net.sf.samtools.Cigar; +import net.sf.samtools.CigarElement; +import net.sf.samtools.CigarOperator; +import net.sf.samtools.SAMFileHeader; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading.ReadThreadingAssembler; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.UnvalidatingGenomeLoc; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.activeregion.ActiveRegion; +import org.broadinstitute.sting.utils.collections.PrimitivePair; +import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; +import org.broadinstitute.sting.utils.haplotype.Haplotype; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.variant.variantcontext.Allele; +import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.variantcontext.VariantContextBuilder; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.io.File; +import java.io.FileNotFoundException; +import java.util.*; + +public class LocalAssemblyEngineUnitTest extends BaseTest { + private GenomeLocParser genomeLocParser; + private IndexedFastaSequenceFile seq; + private SAMFileHeader header; + + @BeforeClass + public void setup() throws FileNotFoundException { + seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference)); + genomeLocParser = new GenomeLocParser(seq); + header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary()); + } + + private enum Assembler {DEBRUIJN_ASSEMBLER, READ_THREADING_ASSEMBLER} + private LocalAssemblyEngine createAssembler(final Assembler type) { + switch ( type ) { + case DEBRUIJN_ASSEMBLER: return new DeBruijnAssembler(); + case READ_THREADING_ASSEMBLER: return new ReadThreadingAssembler(); + default: throw new IllegalStateException("Unexpected " + type); + } + } + + @DataProvider(name = "AssembleIntervalsData") + public Object[][] makeAssembleIntervalsData() { + List tests = new ArrayList(); + + final String contig = "20"; + final int start = 10000000; + final int end = 10100000; + final int windowSize = 100; + final int stepSize = 200; + final int nReadsToUse = 5; + + for ( final Assembler assembler : Assembler.values() ) { + for ( int startI = start; startI < end; startI += stepSize) { + final int endI = startI + windowSize; + final GenomeLoc refLoc = genomeLocParser.createGenomeLoc(contig, startI, endI); + tests.add(new Object[]{assembler, refLoc, nReadsToUse}); + } + } + + return tests.toArray(new Object[][]{}); + } + + @DataProvider(name = "AssembleIntervalsWithVariantData") + public Object[][] makeAssembleIntervalsWithVariantData() { + List tests = new ArrayList(); + + final String contig = "20"; + final int start = 10000000; + final int end = 10001000; + final int windowSize = 100; + final int stepSize = 200; + final int variantStepSize = 1; + final int nReadsToUse = 5; + + for ( final Assembler assembler : Assembler.values() ) { + for ( int startI = start; startI < end; startI += stepSize) { + final int endI = startI + windowSize; + final GenomeLoc refLoc = genomeLocParser.createGenomeLoc(contig, startI, endI); + for ( int variantStart = windowSize / 2 - 10; variantStart < windowSize / 2 + 10; variantStart += variantStepSize ) { + tests.add(new Object[]{assembler, refLoc, nReadsToUse, variantStart}); + } + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "AssembleIntervalsData") + public void testAssembleRef(final Assembler assembler, final GenomeLoc loc, final int nReadsToUse) { + final byte[] refBases = seq.getSubsequenceAt(loc.getContig(), loc.getStart(), loc.getStop()).getBases(); + + final List reads = new LinkedList(); + for ( int i = 0; i < nReadsToUse; i++ ) { + final byte[] bases = refBases.clone(); + final byte[] quals = Utils.dupBytes((byte) 30, refBases.length); + final String cigar = refBases.length + "M"; + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, loc.getContig(), loc.getContigIndex(), loc.getStart(), bases, quals, cigar); + reads.add(read); + } + + // TODO -- generalize to all assemblers + final Haplotype refHaplotype = new Haplotype(refBases, true); + final List haplotypes = assemble(assembler, refBases, loc, reads); + Assert.assertEquals(haplotypes, Collections.singletonList(refHaplotype)); + } + + @Test(dataProvider = "AssembleIntervalsWithVariantData") + public void testAssembleRefAndSNP(final Assembler assembler, final GenomeLoc loc, final int nReadsToUse, final int variantSite) { + final byte[] refBases = seq.getSubsequenceAt(loc.getContig(), loc.getStart(), loc.getStop()).getBases(); + final Allele refBase = Allele.create(refBases[variantSite], true); + final Allele altBase = Allele.create((byte)(refBase.getBases()[0] == 'A' ? 'C' : 'A'), false); + final VariantContextBuilder vcb = new VariantContextBuilder("x", loc.getContig(), variantSite, variantSite, Arrays.asList(refBase, altBase)); + testAssemblyWithVariant(assembler, refBases, loc, nReadsToUse, vcb.make()); + } + + @Test(dataProvider = "AssembleIntervalsWithVariantData") + public void testAssembleRefAndDeletion(final Assembler assembler, final GenomeLoc loc, final int nReadsToUse, final int variantSite) { + final byte[] refBases = seq.getSubsequenceAt(loc.getContig(), loc.getStart(), loc.getStop()).getBases(); + for ( int deletionLength = 1; deletionLength < 10; deletionLength++ ) { + final Allele refBase = Allele.create(new String(refBases).substring(variantSite, variantSite + deletionLength + 1), true); + final Allele altBase = Allele.create(refBase.getBases()[0], false); + final VariantContextBuilder vcb = new VariantContextBuilder("x", loc.getContig(), variantSite, variantSite + deletionLength, Arrays.asList(refBase, altBase)); + testAssemblyWithVariant(assembler, refBases, loc, nReadsToUse, vcb.make()); + } + } + + @Test(dataProvider = "AssembleIntervalsWithVariantData") + public void testAssembleRefAndInsertion(final Assembler assembler, final GenomeLoc loc, final int nReadsToUse, final int variantSite) { + final byte[] refBases = seq.getSubsequenceAt(loc.getContig(), loc.getStart(), loc.getStop()).getBases(); + for ( int insertionLength = 1; insertionLength < 10; insertionLength++ ) { + final Allele refBase = Allele.create(refBases[variantSite], false); + final Allele altBase = Allele.create(new String(refBases).substring(variantSite, variantSite + insertionLength + 1), true); + final VariantContextBuilder vcb = new VariantContextBuilder("x", loc.getContig(), variantSite, variantSite + insertionLength, Arrays.asList(refBase, altBase)); + testAssemblyWithVariant(assembler, refBases, loc, nReadsToUse, vcb.make()); + } + } + + private void testAssemblyWithVariant(final Assembler assembler, final byte[] refBases, final GenomeLoc loc, final int nReadsToUse, final VariantContext site) { + final String preRef = new String(refBases).substring(0, site.getStart()); + final String postRef = new String(refBases).substring(site.getEnd() + 1, refBases.length); + final byte[] altBases = (preRef + site.getAlternateAllele(0).getBaseString() + postRef).getBytes(); + +// logger.warn("ref " + new String(refBases)); +// logger.warn("alt " + new String(altBases)); + + final List reads = new LinkedList(); + for ( int i = 0; i < nReadsToUse; i++ ) { + final byte[] bases = altBases.clone(); + final byte[] quals = Utils.dupBytes((byte) 30, altBases.length); + final String cigar = altBases.length + "M"; + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, loc.getContig(), loc.getContigIndex(), loc.getStart(), bases, quals, cigar); + reads.add(read); + } + + final Haplotype refHaplotype = new Haplotype(refBases, true); + final Haplotype altHaplotype = new Haplotype(altBases, false); + final List haplotypes = assemble(assembler, refBases, loc, reads); + Assert.assertEquals(haplotypes, Arrays.asList(refHaplotype, altHaplotype)); + } + + + private List assemble(final Assembler assembler, final byte[] refBases, final GenomeLoc loc, final List reads) { + final Haplotype refHaplotype = new Haplotype(refBases, true); + final Cigar c = new Cigar(); + c.add(new CigarElement(refHaplotype.getBases().length, CigarOperator.M)); + refHaplotype.setCigar(c); + + final ActiveRegion activeRegion = new ActiveRegion(loc, null, true, genomeLocParser, 0); + activeRegion.addAll(reads); + final LocalAssemblyEngine engine = createAssembler(assembler); +// logger.warn("Assembling " + activeRegion + " with " + engine); + return engine.runLocalAssembly(activeRegion, refHaplotype, refBases, loc, Collections.emptyList(), null); + } + + @DataProvider(name = "SimpleAssemblyTestData") + public Object[][] makeSimpleAssemblyTestData() { + List tests = new ArrayList(); + + final String contig = "20"; + final int start = 10000000; + final int windowSize = 200; + final int end = start + windowSize; + + final Map edgeExcludesByAssembler = new EnumMap<>(Assembler.class); + edgeExcludesByAssembler.put(Assembler.DEBRUIJN_ASSEMBLER, 26); + edgeExcludesByAssembler.put(Assembler.READ_THREADING_ASSEMBLER, 25); // TODO -- decrease to zero when the edge calling problem is fixed + + final String ref = new String(seq.getSubsequenceAt(contig, start, end).getBases()); + final GenomeLoc refLoc = genomeLocParser.createGenomeLoc(contig, start, end); + + for ( final Assembler assembler : Assembler.values() ) { + final int excludeVariantsWithXbp = edgeExcludesByAssembler.get(assembler); + for ( int snpPos = 0; snpPos < windowSize; snpPos++) { + if ( snpPos > excludeVariantsWithXbp && (windowSize - snpPos) >= excludeVariantsWithXbp ) { + final byte[] altBases = ref.getBytes(); + altBases[snpPos] = altBases[snpPos] == 'A' ? (byte)'C' : (byte)'A'; + final String alt = new String(altBases); + tests.add(new Object[]{"SNP at " + snpPos, assembler, refLoc, ref, alt}); + } + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "SimpleAssemblyTestData") + public void testSimpleAssembly(final String name, final Assembler assembler, final GenomeLoc loc, final String ref, final String alt) { + final byte[] refBases = ref.getBytes(); + final byte[] altBases = alt.getBytes(); + + final List reads = new LinkedList<>(); + for ( int i = 0; i < 20; i++ ) { + final byte[] bases = altBases.clone(); + final byte[] quals = Utils.dupBytes((byte) 30, altBases.length); + final String cigar = altBases.length + "M"; + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, loc.getContig(), loc.getContigIndex(), loc.getStart(), bases, quals, cigar); + reads.add(read); + } + + final Haplotype refHaplotype = new Haplotype(refBases, true); + final Haplotype altHaplotype = new Haplotype(altBases, false); + final List haplotypes = assemble(assembler, refBases, loc, reads); + Assert.assertTrue(haplotypes.size() > 0, "Failed to find ref haplotype"); + Assert.assertEquals(haplotypes.get(0), refHaplotype); + + Assert.assertEquals(haplotypes.size(), 2, "Failed to find single alt haplotype"); + Assert.assertEquals(haplotypes.get(1), altHaplotype); + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrectorUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrectorUnitTest.java new file mode 100644 index 000000000..e201b24fc --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrectorUnitTest.java @@ -0,0 +1,190 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller; + +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class ReadErrorCorrectorUnitTest { + private static final boolean debug = true; + final String refChunk = "GCATAAACATGGCTCACTGC"; + final String refChunkHard = "AGCCTTGAACTCCTGGGCTCAAGTGATCCTCCTGCCTCAGTTTCCCATGTAGCTGGGACCACAGGTGGGGGCTCCACCCCTGGCTGATTTTTTTTTTTTTTTTTTTTTGAGATAGGGT"; + + @Test + public void TestBasicCorrectionSet() { + + final byte[] trueBases = refChunk.getBytes(); + final int numCorrections = 50; + final ReadErrorCorrector.CorrectionSet correctionSet = new ReadErrorCorrector.CorrectionSet(trueBases.length); + + int offset = 2; + for (int k=0; k < numCorrections; k++) { + // introduce one correction at a random offset in array. To make testing easier, we will replicate corrrection + final byte base = trueBases[offset]; + correctionSet.add(offset, base); + // skip to some other offset + offset += 7; + if (offset >= trueBases.length) + offset -= trueBases.length; + } + + for (int k=0; k < trueBases.length; k++) { + final byte corr = correctionSet.getConsensusCorrection(k); + Assert.assertEquals(corr, trueBases[k]); + } + } + + @Test + public void TestExtendedCorrectionSet() { + + final byte[] trueBases = refChunk.getBytes(); + final int numCorrections = 50; + final ReadErrorCorrector.CorrectionSet correctionSet = new ReadErrorCorrector.CorrectionSet(trueBases.length); + + for (int offset=0; offset < trueBases.length; offset++) { + // insert k corrections at offset k and make sure we get exactly k bases back + for (int k=0; k < offset; k++) + correctionSet.add(offset,trueBases[offset]); + + } + + for (int offset=0; offset < trueBases.length; offset++) { + Assert.assertEquals(correctionSet.get(offset).size(),offset); + } + } + + @Test + public void TestAddReadsToKmers() { + final int NUM_GOOD_READS = 500; + + final String bases = "AAAAAAAAAAAAAAA"; + final int READ_LENGTH = bases.length(); + final int kmerLengthForReadErrorCorrection = READ_LENGTH; + final List finalizedReadList = new ArrayList(NUM_GOOD_READS); + int offset = 0; + final byte[] quals = new byte[READ_LENGTH]; + + Arrays.fill(quals,(byte)30); + + for (int k=0; k < NUM_GOOD_READS; k++) { + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases.getBytes(), quals,READ_LENGTH+"M"); + finalizedReadList.add(read); + } + + ReadErrorCorrector readErrorCorrector = new ReadErrorCorrector(kmerLengthForReadErrorCorrection,(byte)6,10, debug,refChunkHard.getBytes()); + readErrorCorrector.addReadsToKmers(finalizedReadList); + + // special trivial case: kmer length is equal to read length. + // K-mer counter should hold then exactly one kmer + Assert.assertEquals(readErrorCorrector.countsByKMer.getCountedKmers().size(), 1); + for (final KMerCounter.CountedKmer kmer : readErrorCorrector.countsByKMer.getCountedKmers()) { + Assert.assertTrue(Arrays.equals( kmer.getKmer().bases(),bases.getBytes())); + Assert.assertEquals(kmer.getCount(),NUM_GOOD_READS); + } + + // special case 2: kmers are all the same but length < read length. + // Each kmer is added then readLength-kmerLength+1 times + final int KMER_LENGTH = 10; + readErrorCorrector = new ReadErrorCorrector(KMER_LENGTH,(byte)6,10, debug,refChunkHard.getBytes()); + readErrorCorrector.addReadsToKmers(finalizedReadList); + Assert.assertEquals(readErrorCorrector.countsByKMer.getCountedKmers().size(), 1); + for (final KMerCounter.CountedKmer kmer : readErrorCorrector.countsByKMer.getCountedKmers()) { + Assert.assertEquals(kmer.getCount(),NUM_GOOD_READS*(READ_LENGTH-KMER_LENGTH+1)); + } + + } + @Test + public void TestBasicErrorCorrection() { + final int NUM_GOOD_READS = 500; + final int NUM_BAD_READS = 10; + final int READ_LENGTH = 15; + final int kmerLengthForReadErrorCorrection = 10; + final List finalizedReadList = new ArrayList(NUM_GOOD_READS); + int offset = 0; + final byte[] quals = new byte[READ_LENGTH]; + + Arrays.fill(quals,(byte)30); + + for (int k=0; k < NUM_GOOD_READS; k++) { + final byte[] bases = Arrays.copyOfRange(refChunk.getBytes(),offset,offset+READ_LENGTH); + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals,READ_LENGTH+"M"); + finalizedReadList.add(read); + offset++; + if (offset >= refChunk.length()-READ_LENGTH) + offset = 0; + } + offset = 2; + // coverage profile is now perfectly triangular with "good" bases. Inject now bad bases with errors in them. + for (int k=0; k < NUM_BAD_READS; k++) { + final byte[] bases = finalizedReadList.get(k).getReadBases().clone(); + bases[offset] = 'N'; + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, READ_LENGTH + "M"); + finalizedReadList.add(read); + offset += 7; + if (offset >= READ_LENGTH) + offset = 4; // just some randomly circulating offset for error position + } + + // now correct all reads + final ReadErrorCorrector readErrorCorrector = new ReadErrorCorrector(kmerLengthForReadErrorCorrection,(byte)6,10, debug,refChunkHard.getBytes()); + readErrorCorrector.addReadsToKmers(finalizedReadList); + readErrorCorrector.correctReads(finalizedReadList); + + // check that corrected reads have exactly same content as original reads + for (int k=0; k < NUM_BAD_READS; k++) { + final byte[] badBases = finalizedReadList.get(k).getReadBases(); + final byte[] originalBases = finalizedReadList.get(k).getReadBases(); + Assert.assertTrue(Arrays.equals(badBases,originalBases)); + } + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseEdgeUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseEdgeUnitTest.java index 7df6ee6c8..ea1d120b6 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseEdgeUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseEdgeUnitTest.java @@ -83,7 +83,10 @@ public class BaseEdgeUnitTest extends BaseTest { e.setMultiplicity(mult + 1); Assert.assertEquals(e.getMultiplicity(), mult + 1); - final BaseEdge copy = new BaseEdge(e); + e.incMultiplicity(2); + Assert.assertEquals(e.getMultiplicity(), mult + 3); + + final BaseEdge copy = e.copy(); Assert.assertEquals(copy.isRef(), e.isRef()); Assert.assertEquals(copy.getMultiplicity(), e.getMultiplicity()); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphUnitTest.java index c829488ba..f9cbc6c73 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphUnitTest.java @@ -49,8 +49,8 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; import org.broadinstitute.sting.BaseTest; import org.testng.Assert; import org.testng.annotations.BeforeMethod; +import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import scala.actors.threadpool.Arrays; import java.io.File; import java.util.*; @@ -312,4 +312,19 @@ public class BaseGraphUnitTest extends BaseTest { Assert.assertTrue(BaseGraph.graphEquals(graph, expectedGraph)); } + + @Test(enabled = true) + public void testGetBases() { + + final int kmerSize = 4; + final String testString = "AATGGGGGCAATACTA"; + + final List vertexes = new ArrayList<>(); + for ( int i = 0; i <= testString.length() - kmerSize; i++ ) { + vertexes.add(new DeBruijnVertex(testString.substring(i, i + kmerSize))); + } + + final String result = new String(new DeBruijnGraph().getBasesForPath(vertexes)); + Assert.assertEquals(result, testString.substring(kmerSize - 1)); + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixMergerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixMergerUnitTest.java index 8682ae5e4..e1398e119 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixMergerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixMergerUnitTest.java @@ -137,12 +137,12 @@ public class CommonSuffixMergerUnitTest extends BaseTest { public static void assertSameHaplotypes(final String name, final SeqGraph actual, final SeqGraph original) { try { final Set haplotypes = new HashSet(); - final List> originalPaths = new KBestPaths().getKBestPaths(original); - for ( final Path path : originalPaths ) + final List> originalPaths = new KBestPaths().getKBestPaths(original); + for ( final Path path : originalPaths ) haplotypes.add(new String(path.getBases())); - final List> splitPaths = new KBestPaths().getKBestPaths(actual); - for ( final Path path : splitPaths ) { + final List> splitPaths = new KBestPaths().getKBestPaths(actual); + for ( final Path path : splitPaths ) { final String h = new String(path.getBases()); Assert.assertTrue(haplotypes.contains(h), "Failed to find haplotype " + h); } @@ -166,4 +166,20 @@ public class CommonSuffixMergerUnitTest extends BaseTest { splitter.merge(data.graph, data.v); assertSameHaplotypes(String.format("suffixMerge.%s.%d", data.commonSuffix, data.graph.vertexSet().size()), data.graph, original); } + + @Test + public void testDoesntMergeSourceNodes() { + final SeqGraph g = new SeqGraph(); + final SeqVertex v1 = new SeqVertex("A"); + final SeqVertex v2 = new SeqVertex("A"); + final SeqVertex v3 = new SeqVertex("A"); + final SeqVertex top = new SeqVertex("T"); + final SeqVertex b = new SeqVertex("C"); + g.addVertices(top, v1, v2, v3, top, b); + g.addEdges(top, v1, b); + g.addEdges(v2, b); // v2 doesn't have previous node, cannot be merged + g.addEdges(top, v3, b); + final SharedSequenceMerger merger = new SharedSequenceMerger(); + Assert.assertFalse(merger.merge(g, b), "Shouldn't be able to merge shared vertices, when one is a source"); + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixSplitterUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixSplitterUnitTest.java index 1ed20e5f4..9703d76cb 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixSplitterUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixSplitterUnitTest.java @@ -154,16 +154,16 @@ public class CommonSuffixSplitterUnitTest extends BaseTest { original.addEdge(v3, v4, new BaseEdge(false, 34)); original.addEdge(v4, v2, new BaseEdge(false, 42)); - original.printGraph(new File("testSplitInfiniteCycleFailure.dot"), 0); +// original.printGraph(new File("testSplitInfiniteCycleFailure.dot"), 0); final SeqGraph graph = (SeqGraph)original.clone(); final boolean success = new CommonSuffixSplitter().split(graph, v2); Assert.assertTrue(success); for ( final SeqVertex v : graph.vertexSet() ) { - graph.printGraph(new File("testSplitInfiniteCycleFailure.first_split.dot"), 0); +// graph.printGraph(new File("testSplitInfiniteCycleFailure.first_split.dot"), 0); final boolean success2 = new CommonSuffixSplitter().split((SeqGraph)graph.clone(), v); - if ( success2 ) graph.printGraph(new File("testSplitInfiniteCycleFailure.fail.dot"), 0); +// if ( success2 ) graph.printGraph(new File("testSplitInfiniteCycleFailure.fail.dot"), 0); Assert.assertFalse(success2, "Shouldn't be able to split any vertices but CommonSuffixSplitter says it could for " + v); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtilsUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtilsUnitTest.java new file mode 100644 index 000000000..01a6b5dbb --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtilsUnitTest.java @@ -0,0 +1,120 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; + +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.collections.PrimitivePair; +import org.testng.Assert; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class GraphUtilsUnitTest extends BaseTest { + @DataProvider(name = "findLongestUniqueMatchData") + public Object[][] makefindLongestUniqueMatchData() { + List tests = new ArrayList(); + + { // test all edge conditions + final String ref = "ACGT"; + for ( int start = 0; start < ref.length(); start++ ) { + for ( int end = start + 1; end <= ref.length(); end++ ) { + final String kmer = ref.substring(start, end); + tests.add(new Object[]{ref, kmer, end - 1, end - start}); + tests.add(new Object[]{ref, "N" + kmer, end - 1, end - start}); + tests.add(new Object[]{ref, "NN" + kmer, end - 1, end - start}); + tests.add(new Object[]{ref, kmer + "N", -1, 0}); + tests.add(new Object[]{ref, kmer + "NN", -1, 0}); + } + } + } + + { // multiple matches + final String ref = "AACCGGTT"; + for ( final String alt : Arrays.asList("A", "C", "G", "T") ) + tests.add(new Object[]{ref, alt, -1, 0}); + tests.add(new Object[]{ref, "AA", 1, 2}); + tests.add(new Object[]{ref, "CC", 3, 2}); + tests.add(new Object[]{ref, "GG", 5, 2}); + tests.add(new Object[]{ref, "TT", 7, 2}); + } + + { // complex matches that have unique substrings of lots of parts of kmer in the ref + final String ref = "ACGTACGTACGT"; + tests.add(new Object[]{ref, "ACGT", -1, 0}); + tests.add(new Object[]{ref, "TACGT", -1, 0}); + tests.add(new Object[]{ref, "GTACGT", -1, 0}); + tests.add(new Object[]{ref, "CGTACGT", -1, 0}); + tests.add(new Object[]{ref, "ACGTACGT", -1, 0}); + tests.add(new Object[]{ref, "TACGTACGT", 11, 9}); + tests.add(new Object[]{ref, "NTACGTACGT", 11, 9}); + tests.add(new Object[]{ref, "GTACGTACGT", 11, 10}); + tests.add(new Object[]{ref, "NGTACGTACGT", 11, 10}); + tests.add(new Object[]{ref, "CGTACGTACGT", 11, 11}); + } + + return tests.toArray(new Object[][]{}); + } + + /** + * Example testng test using MyDataProvider + */ + @Test(dataProvider = "findLongestUniqueMatchData") + public void testfindLongestUniqueMatch(final String seq, final String kmer, final int start, final int length) { + // adaptor this code to do whatever testing you want given the arguments start and size + final PrimitivePair.Int actual = GraphUtils.findLongestUniqueSuffixMatch(seq.getBytes(), kmer.getBytes()); + if ( start == -1 ) + Assert.assertNull(actual); + else { + Assert.assertNotNull(actual); + Assert.assertEquals(actual.first, start); + Assert.assertEquals(actual.second, length); + } + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPathsUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPathsUnitTest.java index d1bae74b2..d6709672a 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPathsUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPathsUnitTest.java @@ -114,7 +114,7 @@ public class KBestPathsUnitTest extends BaseTest { if ( addCycle ) graph.addEdge(middleBottom, middleBottom); // enumerate all possible paths - final List> paths = new KBestPaths(allowCycles).getKBestPaths(graph, starts, ends); + final List> paths = new KBestPaths(allowCycles).getKBestPaths(graph, starts, ends); final int expectedNumOfPaths = nStartNodes * nBranchesPerBubble * (addCycle && allowCycles ? 2 : 1) * nEndNodes; Assert.assertEquals(paths.size(), expectedNumOfPaths, "Didn't find the expected number of paths"); @@ -127,7 +127,7 @@ public class KBestPathsUnitTest extends BaseTest { // get the best path, and make sure it's the same as our optimal path overall final Path best = paths.get(0); - final List> justOne = new KBestPaths(allowCycles).getKBestPaths(graph, 1, starts, ends); + final List> justOne = new KBestPaths(allowCycles).getKBestPaths(graph, 1, starts, ends); Assert.assertEquals(justOne.size(), 1); Assert.assertTrue(justOne.get(0).pathsAreTheSame(best), "Best path from complete enumerate " + best + " not the same as from k = 1 search " + justOne.get(0)); } @@ -147,7 +147,7 @@ public class KBestPathsUnitTest extends BaseTest { graph.addEdges(v4, v2); // enumerate all possible paths - final List> paths = new KBestPaths(false).getKBestPaths(graph, v1, v5); + final List> paths = new KBestPaths(false).getKBestPaths(graph, v1, v5); Assert.assertEquals(paths.size(), 1, "Didn't find the expected number of paths"); } @@ -163,7 +163,7 @@ public class KBestPathsUnitTest extends BaseTest { graph.addEdges(v1, v2, v3, v3); // enumerate all possible paths - final List> paths = new KBestPaths(false).getKBestPaths(graph, v1, v3); + final List> paths = new KBestPaths(false).getKBestPaths(graph, v1, v3); Assert.assertEquals(paths.size(), 1, "Didn't find the expected number of paths"); } @@ -201,9 +201,9 @@ public class KBestPathsUnitTest extends BaseTest { graph.addEdge(v2Alt, v3, new BaseEdge(false, 5)); // Construct the test path - Path path = new Path(v, graph); - path = new Path(path, graph.getEdge(v, v2Alt)); - path = new Path(path, graph.getEdge(v2Alt, v3)); + Path path = new Path(v, graph); + path = new Path(path, graph.getEdge(v, v2Alt)); + path = new Path(path, graph.getEdge(v2Alt, v3)); // Construct the actual cigar string implied by the test path Cigar expectedCigar = new Cigar(); @@ -219,7 +219,8 @@ public class KBestPathsUnitTest extends BaseTest { } expectedCigar.add(new CigarElement(postRef.length(), CigarOperator.M)); - Assert.assertEquals(path.calculateCigar().toString(), AlignmentUtils.consolidateCigar(expectedCigar).toString(), "Cigar string mismatch"); + final String ref = preRef + v2Ref.getSequenceString() + postRef; + Assert.assertEquals(path.calculateCigar(ref.getBytes()).toString(), AlignmentUtils.consolidateCigar(expectedCigar).toString(), "Cigar string mismatch"); } @DataProvider(name = "GetBasesData") @@ -251,9 +252,9 @@ public class KBestPathsUnitTest extends BaseTest { } // enumerate all possible paths - final List> paths = new KBestPaths().getKBestPaths(graph); + final List> paths = new KBestPaths().getKBestPaths(graph); Assert.assertEquals(paths.size(), 1); - final Path path = paths.get(0); + final Path path = paths.get(0); Assert.assertEquals(new String(path.getBases()), Utils.join("", frags), "Path doesn't have the expected sequence"); } @@ -296,6 +297,8 @@ public class KBestPathsUnitTest extends BaseTest { SeqVertex v7 = new SeqVertex(postRef); SeqVertex postV = new SeqVertex(postAltOption); + final String ref = preRef + v2Ref.getSequenceString() + midRef1 + v4Ref.getSequenceString() + midRef2 + v6Ref.getSequenceString() + postRef; + graph.addVertex(preV); graph.addVertex(v); graph.addVertex(v2Ref); @@ -324,18 +327,18 @@ public class KBestPathsUnitTest extends BaseTest { graph.addEdge(v7, postV, new BaseEdge(false, 1)); // Construct the test path - Path path = new Path( (offRefBeginning ? preV : v), graph); + Path path = new Path( (offRefBeginning ? preV : v), graph); if( offRefBeginning ) { - path = new Path(path, graph.getEdge(preV, v)); + path = new Path(path, graph.getEdge(preV, v)); } - path = new Path(path, graph.getEdge(v, v2Alt)); - path = new Path(path, graph.getEdge(v2Alt, v3)); - path = new Path(path, graph.getEdge(v3, v4Ref)); - path = new Path(path, graph.getEdge(v4Ref, v5)); - path = new Path(path, graph.getEdge(v5, v6Alt)); - path = new Path(path, graph.getEdge(v6Alt, v7)); + path = new Path(path, graph.getEdge(v, v2Alt)); + path = new Path(path, graph.getEdge(v2Alt, v3)); + path = new Path(path, graph.getEdge(v3, v4Ref)); + path = new Path(path, graph.getEdge(v4Ref, v5)); + path = new Path(path, graph.getEdge(v5, v6Alt)); + path = new Path(path, graph.getEdge(v6Alt, v7)); if( offRefEnding ) { - path = new Path(path, graph.getEdge(v7,postV)); + path = new Path(path, graph.getEdge(v7,postV)); } // Construct the actual cigar string implied by the test path @@ -373,7 +376,9 @@ public class KBestPathsUnitTest extends BaseTest { expectedCigar.add(new CigarElement(postAltOption.length(), CigarOperator.I)); } - Assert.assertEquals(path.calculateCigar().toString(), AlignmentUtils.consolidateCigar(expectedCigar).toString(), "Cigar string mismatch"); + Assert.assertEquals(path.calculateCigar(ref.getBytes()).toString(), + AlignmentUtils.consolidateCigar(expectedCigar).toString(), + "Cigar string mismatch: ref = " + ref + " alt " + new String(path.getBases())); } @Test(enabled = !DEBUG) @@ -389,43 +394,46 @@ public class KBestPathsUnitTest extends BaseTest { graph.addEdges(new BaseEdge(true, 1), top, ref, bot); graph.addEdges(new BaseEdge(false, 1), top, alt, bot); - final KBestPaths pathFinder = new KBestPaths(); - final List> paths = pathFinder.getKBestPaths(graph, top, bot); + final KBestPaths pathFinder = new KBestPaths(); + final List> paths = pathFinder.getKBestPaths(graph, top, bot); Assert.assertEquals(paths.size(), 2); - final Path refPath = paths.get(0); - final Path altPath = paths.get(1); + final Path refPath = paths.get(0); + final Path altPath = paths.get(1); - Assert.assertEquals(refPath.calculateCigar().toString(), "10M"); - Assert.assertEquals(altPath.calculateCigar().toString(), "1M3I5M3D1M"); + final String refString = top.getSequenceString() + ref.getSequenceString() + bot.getSequenceString(); + Assert.assertEquals(refPath.calculateCigar(refString.getBytes()).toString(), "10M"); + Assert.assertEquals(altPath.calculateCigar(refString.getBytes()).toString(), "1M3I5M3D1M"); } @Test(enabled = !DEBUG) public void testHardSWPath() { // Construct the assembly graph SeqGraph graph = new SeqGraph(); - final SeqVertex top = new SeqVertex( "NNN"); - final SeqVertex bot = new SeqVertex( "NNN"); + final SeqVertex top = new SeqVertex( "NNN" ); + final SeqVertex bot = new SeqVertex( "NNN" ); final SeqVertex alt = new SeqVertex( "ACAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGA" ); final SeqVertex ref = new SeqVertex( "TGTGTGTGTGTGTGACAGAGAGAGAGAGAGAGAGAGAGAGAGAGA" ); graph.addVertices(top, bot, alt, ref); graph.addEdges(new BaseEdge(true, 1), top, ref, bot); graph.addEdges(new BaseEdge(false, 1), top, alt, bot); - final KBestPaths pathFinder = new KBestPaths(); - final List> paths = pathFinder.getKBestPaths(graph, top, bot); + final KBestPaths pathFinder = new KBestPaths(); + final List> paths = pathFinder.getKBestPaths(graph, top, bot); Assert.assertEquals(paths.size(), 2); - final Path refPath = paths.get(0); - final Path altPath = paths.get(1); + final Path refPath = paths.get(0); + final Path altPath = paths.get(1); - logger.warn("RefPath : " + refPath + " cigar " + refPath.calculateCigar()); - logger.warn("AltPath : " + altPath + " cigar " + altPath.calculateCigar()); + final String refString = top.getSequenceString() + ref.getSequenceString() + bot.getSequenceString(); - Assert.assertEquals(refPath.calculateCigar().toString(), "51M"); - Assert.assertEquals(altPath.calculateCigar().toString(), "3M6I48M"); + logger.warn("RefPath : " + refPath + " cigar " + refPath.calculateCigar(refString.getBytes())); + logger.warn("AltPath : " + altPath + " cigar " + altPath.calculateCigar(refString.getBytes())); + + Assert.assertEquals(refPath.calculateCigar(refString.getBytes()).toString(), "51M"); + Assert.assertEquals(altPath.calculateCigar(refString.getBytes()).toString(), "3M6I48M"); } // ----------------------------------------------------------------- @@ -466,30 +474,87 @@ public class KBestPathsUnitTest extends BaseTest { // Construct the assembly graph SeqGraph graph = new SeqGraph(); - SeqVertex top = new SeqVertex(""); + final int padSize = 0; + SeqVertex top = new SeqVertex(Utils.dupString("N", padSize)); SeqVertex ref = new SeqVertex(prefix + refMid + end); SeqVertex alt = new SeqVertex(prefix + altMid + end); - SeqVertex bot = new SeqVertex(""); + SeqVertex bot = new SeqVertex(Utils.dupString("N", padSize)); graph.addVertices(top, ref, alt, bot); graph.addEdges(new BaseEdge(true, 1), top, ref, bot); graph.addEdges(new BaseEdge(false, 1), top, alt, bot); // Construct the test path - Path path = Path.makePath(Arrays.asList(top, alt, bot), graph); + Path path = Path.makePath(Arrays.asList(top, alt, bot), graph); Cigar expected = new Cigar(); + expected.add(new CigarElement(padSize, CigarOperator.M)); if ( ! prefix.equals("") ) expected.add(new CigarElement(prefix.length(), CigarOperator.M)); for ( final CigarElement elt : TextCigarCodec.getSingleton().decode(midCigar).getCigarElements() ) expected.add(elt); if ( ! end.equals("") ) expected.add(new CigarElement(end.length(), CigarOperator.M)); + expected.add(new CigarElement(padSize, CigarOperator.M)); expected = AlignmentUtils.consolidateCigar(expected); - final Cigar pathCigar = path.calculateCigar(); + final String refString = top.getSequenceString() + ref.getSequenceString() + bot.getSequenceString(); + final Cigar pathCigar = path.calculateCigar(refString.getBytes()); logger.warn("diffs: " + ref + " vs. " + alt + " cigar " + midCigar); logger.warn("Path " + path + " with cigar " + pathCigar); logger.warn("Expected cigar " + expected); - Assert.assertEquals(pathCigar, expected, "Cigar mismatch"); + Assert.assertEquals(pathCigar, expected, "Cigar mismatch: ref = " + refString + " vs alt = " + new String(path.getBases())); + } + + @Test(enabled = !DEBUG) + public void testLeftAlignCigarSequentially() { + String preRefString = "GATCGATCGATC"; + String postRefString = "TTT"; + String refString = "ATCGAGGAGAGCGCCCCG"; + String indelString1 = "X"; + String indelString2 = "YZ"; + int refIndel1 = 10; + int refIndel2 = 12; + + for ( final int indelSize1 : Arrays.asList(1, 2, 3, 4) ) { + for ( final int indelOp1 : Arrays.asList(1, -1) ) { + for ( final int indelSize2 : Arrays.asList(1, 2, 3, 4) ) { + for ( final int indelOp2 : Arrays.asList(1, -1) ) { + + Cigar expectedCigar = new Cigar(); + expectedCigar.add(new CigarElement(refString.length(), CigarOperator.M)); + expectedCigar.add(new CigarElement(indelSize1, (indelOp1 > 0 ? CigarOperator.I : CigarOperator.D))); + expectedCigar.add(new CigarElement((indelOp1 < 0 ? refIndel1 - indelSize1 : refIndel1), CigarOperator.M)); + expectedCigar.add(new CigarElement(refString.length(), CigarOperator.M)); + expectedCigar.add(new CigarElement(indelSize2 * 2, (indelOp2 > 0 ? CigarOperator.I : CigarOperator.D))); + expectedCigar.add(new CigarElement((indelOp2 < 0 ? (refIndel2 - indelSize2) * 2 : refIndel2 * 2), CigarOperator.M)); + expectedCigar.add(new CigarElement(refString.length(), CigarOperator.M)); + + Cigar givenCigar = new Cigar(); + givenCigar.add(new CigarElement(refString.length() + refIndel1/2, CigarOperator.M)); + givenCigar.add(new CigarElement(indelSize1, (indelOp1 > 0 ? CigarOperator.I : CigarOperator.D))); + givenCigar.add(new CigarElement((indelOp1 < 0 ? (refIndel1/2 - indelSize1) : refIndel1/2) + refString.length() + refIndel2/2 * 2, CigarOperator.M)); + givenCigar.add(new CigarElement(indelSize2 * 2, (indelOp2 > 0 ? CigarOperator.I : CigarOperator.D))); + givenCigar.add(new CigarElement((indelOp2 < 0 ? (refIndel2/2 - indelSize2) * 2 : refIndel2/2 * 2) + refString.length(), CigarOperator.M)); + + String theRef = preRefString + refString + Utils.dupString(indelString1, refIndel1) + refString + Utils.dupString(indelString2, refIndel2) + refString + postRefString; + String theRead = refString + Utils.dupString(indelString1, refIndel1 + indelOp1 * indelSize1) + refString + Utils.dupString(indelString2, refIndel2 + indelOp2 * indelSize2) + refString; + + Cigar calculatedCigar = Path.leftAlignCigarSequentially(AlignmentUtils.consolidateCigar(givenCigar), theRef.getBytes(), theRead.getBytes(), preRefString.length(), 0); + Assert.assertEquals(AlignmentUtils.consolidateCigar(calculatedCigar).toString(), AlignmentUtils.consolidateCigar(expectedCigar).toString(), "Cigar strings do not match!"); + } + } + } + } + } + + @Test(enabled = true) + public void testLeftAlignCigarSequentiallyAdjacentID() { + final String ref = "GTCTCTCTCTCTCTCTCTATATATATATATATATTT"; + final String hap = "GTCTCTCTCTCTCTCTCTCTCTATATATATATATTT"; + final Cigar originalCigar = TextCigarCodec.getSingleton().decode("18M4I12M4D2M"); + + final Cigar result = Path.leftAlignCigarSequentially(originalCigar, ref.getBytes(), hap.getBytes(), 0, 0); + logger.warn("Result is " + result); + Assert.assertEquals(originalCigar.getReferenceLength(), result.getReferenceLength(), "Reference lengths are different"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/LowWeightChainPrunerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/LowWeightChainPrunerUnitTest.java new file mode 100644 index 000000000..06d81499c --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/LowWeightChainPrunerUnitTest.java @@ -0,0 +1,163 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; + +import org.broadinstitute.sting.BaseTest; +import org.testng.Assert; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.io.File; +import java.util.*; + +public class LowWeightChainPrunerUnitTest extends BaseTest { + @DataProvider(name = "pruneChainsData") + public Object[][] makePruneChainsData() { + List tests = new ArrayList<>(); + + final SeqVertex v1 = new SeqVertex("A"); + final SeqVertex v2 = new SeqVertex("C"); + final SeqVertex v3 = new SeqVertex("G"); + final SeqVertex v4 = new SeqVertex("T"); + final SeqVertex v5 = new SeqVertex("AA"); + final SeqVertex v6 = new SeqVertex("CC"); + + for ( final int edgeWeight : Arrays.asList(1, 2, 3) ) { + for ( final int pruneFactor : Arrays.asList(1, 2, 3, 4) ) { + for ( final boolean isRef : Arrays.asList(true, false)) { + { // just an isolated chain + final int nExpected = edgeWeight < pruneFactor && ! isRef ? 3 : 0; + SeqGraph graph = new SeqGraph(); + graph.addVertices(v1, v2, v3); + graph.addEdges(new BaseEdge(isRef, edgeWeight), v1, v2, v3); + tests.add(new Object[]{"combinatorial", graph, pruneFactor, nExpected > 0 ? Collections.emptySet() : graph.vertexSet()}); + } + } + } + } + + { // connects to ref chain + SeqGraph graph = new SeqGraph(); + graph.addVertices(v1, v2, v3); + graph.addVertices(v4, v5); + graph.addEdges(new BaseEdge(true, 1), v4, v5); + graph.addEdges(new BaseEdge(false, 1), v4, v1, v2, v3, v5); + tests.add(new Object[]{"bad internal branch", graph, 2, new HashSet<>(Arrays.asList(v4, v5))}); + } + + { // has bad cycle + SeqGraph graph = new SeqGraph(); + graph.addVertices(v1, v2, v3, v4); + graph.addEdges(new BaseEdge(false, 1), v4, v1, v2, v3, v1); + // note that we'll remove v4 because it's low weight + tests.add(new Object[]{"has bad cycle", graph, 2, Collections.emptySet()}); + } + + { // has good cycle + SeqGraph graph = new SeqGraph(); + graph.addVertices(v1, v2, v3, v4); + graph.addEdges(new BaseEdge(false, 3), v4, v1, v2, v3, v1); + // note that we'll remove v4 because it's low weight + tests.add(new Object[]{"has good cycle", graph, 2, graph.vertexSet()}); + } + + { // has branch + SeqGraph graph = new SeqGraph(); + graph.addVertices(v1, v2, v3, v4, v5, v6); + graph.addEdges(new BaseEdge(false, 1), v1, v2, v3, v4, v6); + graph.addEdges(new BaseEdge(false, 1), v1, v2, v3, v5, v6); + tests.add(new Object[]{"has two bad branches", graph, 2, Collections.emptySet()}); + } + + { // middle vertex above threshold => no one can be removed + SeqGraph graph = new SeqGraph(); + graph.addVertices(v1, v2, v3, v4, v5); + graph.addEdges(new BaseEdge(false, 1), v1, v2); + graph.addEdges(new BaseEdge(false, 3), v2, v3); + graph.addEdges(new BaseEdge(false, 1), v3, v4, v5); + tests.add(new Object[]{"middle vertex above factor", graph, 2, graph.vertexSet()}); + } + + { // the branching node has value > pruneFactor + SeqGraph graph = new SeqGraph(); + graph.addVertices(v1, v2, v3, v4, v5, v6); + graph.addEdges(new BaseEdge(false, 3), v1, v2); + graph.addEdges(new BaseEdge(false, 3), v2, v3); + graph.addEdges(new BaseEdge(false, 1), v3, v4, v6); + graph.addEdges(new BaseEdge(false, 3), v2, v5, v6); + tests.add(new Object[]{"branch node greater than pruneFactor", graph, 2, graph.vertexSet()}); + } + + { // A single isolated chain with weights all below pruning should be pruned + SeqGraph graph = new SeqGraph(); + graph.addVertices(v1, v2, v3, v4, v5); + graph.addEdges(new BaseEdge(false, 1), v1, v2, v3); + graph.addEdges(new BaseEdge(false, 5), v4, v5); + tests.add(new Object[]{"isolated chain", graph, 2, new LinkedHashSet<>(Arrays.asList(v4, v5))}); + } + + { // A chain with weights all below pruning should be pruned, even if it connects to another good chain + SeqGraph graph = new SeqGraph(); + graph.addVertices(v1, v2, v3, v4, v5, v6); + graph.addEdges(new BaseEdge(false, 1), v1, v2, v3, v5); + graph.addEdges(new BaseEdge(false, 5), v4, v5, v6); + tests.add(new Object[]{"bad chain branching into good one", graph, 2, new HashSet<>(Arrays.asList(v4, v5, v6))}); + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "pruneChainsData", enabled = true) + public void testPruneChains(final String name, final SeqGraph graph, final int pruneFactor, final Set remainingVertices) { + final Set copy = new HashSet<>(remainingVertices); +// graph.printGraph(new File("in.dot"), 0); + final LowWeightChainPruner pruner = new LowWeightChainPruner<>(pruneFactor); + pruner.pruneLowWeightChains(graph); +// graph.printGraph(new File("out.dot"), 0); + Assert.assertEquals(graph.vertexSet(), copy); + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdgeUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdgeUnitTest.java new file mode 100644 index 000000000..fc40edc42 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdgeUnitTest.java @@ -0,0 +1,115 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; + +import org.apache.commons.lang.ArrayUtils; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.MathUtils; +import org.broadinstitute.sting.utils.Utils; +import org.testng.Assert; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.util.*; + +public class MultiSampleEdgeUnitTest extends BaseTest { + + private class MultiplicityTestProvider { + final List countsPerSample; + final int numSamplesPruning; + public MultiplicityTestProvider(final List countsPerSample, final int numSamplesPruning) { + this.countsPerSample = countsPerSample; + this.numSamplesPruning = numSamplesPruning; + } + } + + @DataProvider(name = "MultiplicityData") + public Object[][] makeMultiplicityData() { + List tests = new ArrayList<>(); + + final List countsPerSample = Arrays.asList(0, 1, 2, 3, 4, 5); + for ( final int numSamplesPruning : Arrays.asList(1, 2, 3) ) { + for ( final int nSamples : Arrays.asList(1, 2, 3, 4, 5)) { + for ( final List perm : Utils.makePermutations(countsPerSample, nSamples, false) ) { + tests.add(new Object[]{new MultiplicityTestProvider(perm, numSamplesPruning)}); + } + } + } + + return tests.toArray(new Object[][]{}); + } + + /** + * Example testng test using MyDataProvider + */ + @Test(dataProvider = "MultiplicityData") + public void testMultiplicity(final MultiplicityTestProvider cfg) { + final MultiSampleEdge edge = new MultiSampleEdge(false, 0, cfg.numSamplesPruning); + Assert.assertEquals(edge.getMultiplicity(), 0); + Assert.assertEquals(edge.getPruningMultiplicity(), 0); + + int total = 0; + for ( int i = 0; i < cfg.countsPerSample.size(); i++ ) { + int countForSample = 0; + for ( int count = 0; count < cfg.countsPerSample.get(i); count++ ) { + edge.incMultiplicity(1); + total++; + countForSample++; + Assert.assertEquals(edge.getMultiplicity(), total); + Assert.assertEquals(edge.getCurrentSingleSampleMultiplicity(), countForSample); + } + edge.flushSingleSampleMultiplicity(); + } + + ArrayList counts = new ArrayList<>(cfg.countsPerSample); + counts.add(0); + Collections.sort(counts); + final int prune = counts.get(Math.max(counts.size() - cfg.numSamplesPruning, 0)); + Assert.assertEquals(edge.getMultiplicity(), total); + Assert.assertEquals(edge.getPruningMultiplicity(), prune); + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/PathUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/PathUnitTest.java new file mode 100644 index 000000000..ee07bea33 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/PathUnitTest.java @@ -0,0 +1,80 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; + +import net.sf.samtools.Cigar; +import org.broadinstitute.sting.BaseTest; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class PathUnitTest extends BaseTest { + @Test(enabled = true) + public void testAlignReallyLongDeletion() { + final String ref = "ATGGTGGCTCATACCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGAACATCACCTGAGGCCAGGAGTTCAAAACCAGCCTGGCTAACATAGCAAAACCCCATCTCTAATGAAAATACAAAAATTAGCTGGGTGTGGTGGTGTCCGCCTGTAGTCCCAGCTACTCAGGAGACTAAGGCATGAGAATCACTTGAACCCAGGATGCAGAGGCTGTAGTGAGCCGAGATTGCACCACGGCTGCACTCCAGCCTGGGCAACAGAGCGAGACTCTGTCTCAAATAAAATAGCGTAACGTAACATAACATAACATAACATAACATAACATAACATAACATAACATAACATAACATAACACAACAACAAAATAAAATAACATAAATCATGTTGTTAGGAAAAAAATCAGTTATGCAGCTACATGCTATTTACAAGAGATATACCTTAAAATATAAGACACAGAGGCCGGGCGCGGTAGCTCATGCCTGTAATCCCAGCACTTTGGGAGGCTGAGGCAAGCGGATCATGAGGTCAGGAGATCGAGACCATCC"; + final String hap = "ATGGTGGCTCATACCTGTAATCCCAGCACTTTGGGAGGCTGAGGCAAGCGGATCATGAGGTCAGGAGATCGAGACCATCCT"; + + final SeqGraph graph = new SeqGraph(); + final SeqVertex v = new SeqVertex(hap); + graph.addVertex(v); + final Path path = new Path(v, graph); + final Cigar cigar = path.calculateCigar(ref.getBytes()); + Assert.assertNull(cigar, "Should have failed gracefully"); + } + + @Test(enabled = true) + public void testAlignReallyLongDeletion2() { + final String ref = "CGGCTAATTTTTGTATTTTTAGTAGAGACAGGGTTTCACCATGTTGGCCAGGCTGGTCTTGAACTCCTGACCTCAGGTGATCCACTCGCCTCGGTCTCCCAAAGTGTTGGGATTACAGGCATGAACCACTGCACCTGGCCTAGTGTTTGGGAAAACTATACTAGGAAAAGAATAGTTGCTTTAAGTCATTCTTTGATTATTCTGAGAATTGGCATATAGCTGCCATTATAACCTACTTTTGCTAAATATAATAATAATAATCATTATTTTTATTTTTTGAGACAGGGTCTTGTTTTGTCACCCCGGCTGGAGTGAAGTGGCGCAATCTCGGCTCACTGCAACCTCCACCTCCGGGTGCAAGCAATTCTCCTGCCTCAGCCTCTTGAGTAGCTAGGATTACAGGCACAAGCCATCATGCCCAGCTAATTTTTGTATTTTTAGTAGAGACAGGGTTTCACCATGTTGGTCAGGCTGGTCTTGAACTCCTGACCTCAGGT"; + final String hap = "CGGCTAATTTTTGTATTTTTAGTAGAGACAGGGTTTCACCATGTTGGTCAGGCTGGTCTTGAACTCCTGACCTCAGGT"; + + final SeqGraph graph = new SeqGraph(); + final SeqVertex v = new SeqVertex(hap); + graph.addVertex(v); + final Path path = new Path(v, graph); + final Cigar cigar = path.calculateCigar(ref.getBytes()); + Assert.assertEquals(cigar.toString(), "48M419D30M"); + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java index bd2e3cc2c..c72f426be 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java @@ -280,16 +280,15 @@ public class SeqGraphUnitTest extends BaseTest { all.addEdges(pre2, top, middle2, bottom, tail2); final SeqGraph expected = new SeqGraph(); + SeqVertex newPre1 = new SeqVertex(Utils.dupString("A", SeqGraph.MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES) + "C"); + SeqVertex newPre2 = new SeqVertex(Utils.dupString("A", SeqGraph.MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES) + "G"); + final SeqVertex newTop = new SeqVertex("TA"); final SeqVertex newMiddle1 = new SeqVertex("G"); final SeqVertex newMiddle2 = new SeqVertex("T"); final SeqVertex newBottom = new SeqVertex("C" + bottom.getSequenceString()); - final SeqVertex newTop = new SeqVertex(Utils.dupString("A", SeqGraph.MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES)); - final SeqVertex newTopDown1 = new SeqVertex("G"); - final SeqVertex newTopDown2 = new SeqVertex("C"); - final SeqVertex newTopBottomMerged = new SeqVertex("TA"); - expected.addVertices(newTop, newTopDown1, newTopDown2, newTopBottomMerged, newMiddle1, newMiddle2, newBottom, tail1, tail2); - expected.addEdges(newTop, newTopDown1, newTopBottomMerged, newMiddle1, newBottom, tail1); - expected.addEdges(newTop, newTopDown2, newTopBottomMerged, newMiddle2, newBottom, tail2); + expected.addVertices(newPre1, newPre2, newTop, newMiddle1, newMiddle2, newBottom, tail1, tail2); + expected.addEdges(newPre1, newTop, newMiddle1, newBottom, tail1); + expected.addEdges(newPre2, newTop, newMiddle2, newBottom, tail2); tests.add(new Object[]{all.clone(), expected.clone()}); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitterUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitterUnitTest.java index 2df783b19..5bc13f884 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitterUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitterUnitTest.java @@ -227,8 +227,8 @@ public class SharedVertexSequenceSplitterUnitTest extends BaseTest { } final Set haplotypes = new HashSet(); - final List> originalPaths = new KBestPaths().getKBestPaths((SeqGraph)graph.clone()); - for ( final Path path : originalPaths ) + final List> originalPaths = new KBestPaths().getKBestPaths((SeqGraph)graph.clone()); + for ( final Path path : originalPaths ) haplotypes.add(new String(path.getBases())); final SharedVertexSequenceSplitter splitter = new SharedVertexSequenceSplitter(graph, v); @@ -238,8 +238,8 @@ public class SharedVertexSequenceSplitterUnitTest extends BaseTest { splitter.updateGraph(top, bot); if ( PRINT_GRAPHS ) graph.printGraph(new File(Utils.join("_", strings) + ".updated.dot"), 0); - final List> splitPaths = new KBestPaths().getKBestPaths(graph); - for ( final Path path : splitPaths ) { + final List> splitPaths = new KBestPaths().getKBestPaths(graph); + for ( final Path path : splitPaths ) { final String h = new String(path.getBases()); Assert.assertTrue(haplotypes.contains(h), "Failed to find haplotype " + h); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java new file mode 100644 index 000000000..8269b9c20 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java @@ -0,0 +1,214 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading; + +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.haplotype.Haplotype; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.io.File; +import java.util.*; + +public class ReadThreadingAssemblerUnitTest extends BaseTest { + private final static boolean DEBUG = false; + + private static class TestAssembler { + final ReadThreadingAssembler assembler; + + Haplotype refHaplotype; + final List reads = new LinkedList(); + + private TestAssembler(final int kmerSize) { + this.assembler = new ReadThreadingAssembler(100000, Arrays.asList(kmerSize)); + assembler.setJustReturnRawGraph(true); + assembler.setPruneFactor(0); + } + + public void addSequence(final byte[] bases, final boolean isRef) { + if ( isRef ) { + refHaplotype = new Haplotype(bases, true); + } else { + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, Utils.dupBytes((byte)30,bases.length), bases.length + "M"); + reads.add(read); + } + } + + public SeqGraph assemble() { + assembler.removePathsNotConnectedToRef = false; // needed to pass some of the tests + assembler.setRecoverDanglingTails(false); // needed to pass some of the tests + assembler.setDebugGraphTransformations(true); + final SeqGraph graph = assembler.assemble(reads, refHaplotype, Collections.emptyList()).get(0); + if ( DEBUG ) graph.printGraph(new File("test.dot"), 0); + return graph; + } + } + + private void assertLinearGraph(final TestAssembler assembler, final String seq) { + final SeqGraph graph = assembler.assemble(); + graph.simplifyGraph(); + Assert.assertEquals(graph.vertexSet().size(), 1); + Assert.assertEquals(graph.vertexSet().iterator().next().getSequenceString(), seq); + } + + private void assertSingleBubble(final TestAssembler assembler, final String one, final String two) { + final SeqGraph graph = assembler.assemble(); + graph.simplifyGraph(); + List> paths = new KBestPaths().getKBestPaths(graph); + Assert.assertEquals(paths.size(), 2); + final Set expected = new HashSet(Arrays.asList(one, two)); + for ( final Path path : paths ) { + final String seq = new String(path.getBases()); + Assert.assertTrue(expected.contains(seq)); + expected.remove(seq); + } + } + + @Test(enabled = ! DEBUG) + public void testRefCreation() { + final String ref = "ACGTAACCGGTT"; + final TestAssembler assembler = new TestAssembler(3); + assembler.addSequence(ref.getBytes(), true); + assertLinearGraph(assembler, ref); + } + + @Test(enabled = ! DEBUG) + public void testRefNonUniqueCreation() { + final String ref = "GAAAAT"; + final TestAssembler assembler = new TestAssembler(3); + assembler.addSequence(ref.getBytes(), true); + assertLinearGraph(assembler, ref); + } + + @Test(enabled = ! DEBUG) + public void testRefAltCreation() { + final TestAssembler assembler = new TestAssembler(3); + final String ref = "ACAACTGA"; + final String alt = "ACAGCTGA"; + assembler.addSequence(ref.getBytes(), true); + assembler.addSequence(alt.getBytes(), false); + assertSingleBubble(assembler, ref, alt); + } + + @Test(enabled = ! DEBUG) + public void testPartialReadsCreation() { + final TestAssembler assembler = new TestAssembler(3); + final String ref = "ACAACTGA"; + final String alt1 = "ACAGCT"; + final String alt2 = "GCTGA"; + assembler.addSequence(ref.getBytes(), true); + assembler.addSequence(alt1.getBytes(), false); + assembler.addSequence(alt2.getBytes(), false); + assertSingleBubble(assembler, ref, "ACAGCTGA"); + } + + @Test(enabled = ! DEBUG) + public void testStartInMiddle() { + final TestAssembler assembler = new TestAssembler(3); + final String ref = "CAAAATG"; + final String read = "AAATG"; + assembler.addSequence(ref.getBytes(), true); + assembler.addSequence(read.getBytes(), false); + assertLinearGraph(assembler, ref); + } + + @Test(enabled = ! DEBUG) + public void testStartInMiddleWithBubble() { + final TestAssembler assembler = new TestAssembler(3); + final String ref = "CAAAATGGGG"; + final String read = "AAATCGGG"; + assembler.addSequence(ref.getBytes(), true); + assembler.addSequence(read.getBytes(), false); + assertSingleBubble(assembler, ref, "CAAAATCGGG"); + } + + @Test(enabled = ! DEBUG) + public void testNoGoodStarts() { + final TestAssembler assembler = new TestAssembler(3); + final String ref = "CAAAATGGGG"; + final String read = "AAATCGGG"; + assembler.addSequence(ref.getBytes(), true); + assembler.addSequence(read.getBytes(), false); + assertSingleBubble(assembler, ref, "CAAAATCGGG"); + } + + + @Test(enabled = !DEBUG) + public void testCreateWithBasesBeforeRefSource() { + final TestAssembler assembler = new TestAssembler(3); + final String ref = "ACTG"; + final String read = "CTGGGACT"; + assembler.addSequence(ReadThreadingGraphUnitTest.getBytes(ref), true); + assembler.addSequence(ReadThreadingGraphUnitTest.getBytes(read), false); + assertLinearGraph(assembler, "ACTGGGACT"); + } + + @Test(enabled = !DEBUG) + public void testSingleIndelAsDoubleIndel3Reads() { + final TestAssembler assembler = new TestAssembler(25); + // The single indel spans two repetitive structures + final String ref = "GTTTTTCCTAGGCAAATGGTTTCTATAAAATTATGTGTGTGTGTCTCTCTCTGTGTGTGTGTGTGTGTGTGTGTGTATACCTAATCTCACACTCTTTTTTCTGG"; + final String read1 = "GTTTTTCCTAGGCAAATGGTTTCTATAAAATTATGTGTGTGTGTCTCT----------GTGTGTGTGTGTGTGTGTATACCTAATCTCACACTCTTTTTTCTGG"; + final String read2 = "GTTTTTCCTAGGCAAATGGTTTCTATAAAATTATGTGTGTGTGTCTCT----------GTGTGTGTGTGTGTGTGTATACCTAATCTCACACTCTTTTTTCTGG"; + assembler.addSequence(ReadThreadingGraphUnitTest.getBytes(ref), true); + assembler.addSequence(ReadThreadingGraphUnitTest.getBytes(read1), false); + assembler.addSequence(ReadThreadingGraphUnitTest.getBytes(read2), false); + + final SeqGraph graph = assembler.assemble(); + final KBestPaths pathFinder = new KBestPaths(); + final List> paths = pathFinder.getKBestPaths(graph); + Assert.assertEquals(paths.size(), 2); + final byte[] refPath = paths.get(0).getBases().length == ref.length() ? paths.get(0).getBases() : paths.get(1).getBases(); + final byte[] altPath = paths.get(0).getBases().length == ref.length() ? paths.get(1).getBases() : paths.get(0).getBases(); + Assert.assertEquals(refPath, ReadThreadingGraphUnitTest.getBytes(ref)); + Assert.assertEquals(altPath, ReadThreadingGraphUnitTest.getBytes(read1)); + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java new file mode 100644 index 000000000..ed91cccb3 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java @@ -0,0 +1,324 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading; + +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.Kmer; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.testng.Assert; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.io.File; +import java.util.*; + +public class ReadThreadingGraphUnitTest extends BaseTest { + private final static boolean DEBUG = false; + + public static byte[] getBytes(final String alignment) { + return alignment.replace("-","").getBytes(); + } + + private void assertNonUniques(final ReadThreadingGraph assembler, String ... nonUniques) { + final Set actual = new HashSet<>(); + assembler.buildGraphIfNecessary(); + for ( final Kmer kmer : assembler.getNonUniqueKmers() ) actual.add(kmer.baseString()); + final Set expected = new HashSet<>(Arrays.asList(nonUniques)); + Assert.assertEquals(actual, expected); + } + + @Test(enabled = ! DEBUG) + public void testNonUniqueMiddle() { + final ReadThreadingGraph assembler = new ReadThreadingGraph(3); + final String ref = "GACACACAGTCA"; + final String read1 = "GACAC---GTCA"; + final String read2 = "CAC---GTCA"; + assembler.addSequence(getBytes(ref), true); + assembler.addSequence(getBytes(read1), false); + assembler.addSequence(getBytes(read2), false); + assertNonUniques(assembler, "ACA", "CAC"); + } + + @Test(enabled = ! DEBUG) + public void testReadsCreateNonUnique() { + final ReadThreadingGraph assembler = new ReadThreadingGraph(3); + final String ref = "GCAC--GTCA"; // CAC is unique + final String read1 = "GCACACGTCA"; // makes CAC non unique because it has a duplication + final String read2 = "CACGTCA"; // shouldn't be allowed to match CAC as start + assembler.addSequence(getBytes(ref), true); + assembler.addSequence(getBytes(read1), false); + assembler.addSequence(getBytes(read2), false); +// assembler.convertToSequenceGraph().printGraph(new File("test.dot"), 0); + + assertNonUniques(assembler, "CAC"); + //assertSingleBubble(assembler, ref, "CAAAATCGGG"); + } + + @Test(enabled = ! DEBUG) + public void testCountingOfStartEdges() { + final ReadThreadingGraph assembler = new ReadThreadingGraph(3); + final String ref = "NNNGTCAAA"; // ref has some bases before start + final String read1 = "GTCAAA"; // starts at first non N base + + assembler.addSequence(getBytes(ref), true); + assembler.addSequence(getBytes(read1), false); + assembler.buildGraphIfNecessary(); +// assembler.printGraph(new File("test.dot"), 0); + + for ( final MultiSampleEdge edge : assembler.edgeSet() ) { + final MultiDeBruijnVertex source = assembler.getEdgeSource(edge); + final MultiDeBruijnVertex target = assembler.getEdgeTarget(edge); + final boolean headerVertex = source.getSuffix() == 'N' || target.getSuffix() == 'N'; + if ( headerVertex ) { + Assert.assertEquals(edge.getMultiplicity(), 1, "Bases in the unique reference header should have multiplicity of 1"); + } else { + Assert.assertEquals(edge.getMultiplicity(), 2, "Should have multiplicity of 2 for any edge outside the ref header but got " + edge + " " + source + " -> " + target); + } + } + } + + @Test(enabled = !DEBUG) + public void testCountingOfStartEdgesWithMultiplePrefixes() { + final ReadThreadingGraph assembler = new ReadThreadingGraph(3); + assembler.increaseCountsThroughBranches = true; + final String ref = "NNNGTCAXX"; // ref has some bases before start + final String alt1 = "NNNCTCAXX"; // alt1 has SNP right after N + final String read = "TCAXX"; // starts right after SNP, but merges right before branch + + assembler.addSequence(getBytes(ref), true); + assembler.addSequence(getBytes(alt1), false); + assembler.addSequence(getBytes(read), false); + assembler.buildGraphIfNecessary(); + assembler.printGraph(new File("test.dot"), 0); + + final List oneCountVertices = Arrays.asList("NNN", "NNG", "NNC", "NGT", "NCT"); + final List threeCountVertices = Arrays.asList("CAX", "AXX"); + + for ( final MultiSampleEdge edge : assembler.edgeSet() ) { + final MultiDeBruijnVertex source = assembler.getEdgeSource(edge); + final MultiDeBruijnVertex target = assembler.getEdgeTarget(edge); + final int expected = oneCountVertices.contains(target.getSequenceString()) ? 1 : (threeCountVertices.contains(target.getSequenceString()) ? 3 : 2); + Assert.assertEquals(edge.getMultiplicity(), expected, "Bases at edge " + edge + " from " + source + " to " + target + " has bad multiplicity"); + } + } + + @Test(enabled = !DEBUG) + public void testCyclesInGraph() { + + // b37 20:12655200-12655850 + final String ref = "CAATTGTCATAGAGAGTGACAAATGTTTCAAAAGCTTATTGACCCCAAGGTGCAGCGGTGCACATTAGAGGGCACCTAAGACAGCCTACAGGGGTCAGAAAAGATGTCTCAGAGGGACTCACACCTGAGCTGAGTTGTGAAGGAAGAGCAGGATAGAATGAGCCAAAGATAAAGACTCCAGGCAAAAGCAAATGAGCCTGAGGGAAACTGGAGCCAAGGCAAGAGCAGCAGAAAAGAGCAAAGCCAGCCGGTGGTCAAGGTGGGCTACTGTGTATGCAGAATGAGGAAGCTGGCCAAGTAGACATGTTTCAGATGATGAACATCCTGTATACTAGATGCATTGGAACTTTTTTCATCCCCTCAACTCCACCAAGCCTCTGTCCACTCTTGGTACCTCTCTCCAAGTAGACATATTTCAGATCATGAACATCCTGTGTACTAGATGCATTGGAAATTTTTTCATCCCCTCAACTCCACCCAGCCTCTGTCCACACTTGGTACCTCTCTCTATTCATATCTCTGGCCTCAAGGAGGGTATTTGGCATTAGTAAATAAATTCCAGAGATACTAAAGTCAGATTTTCTAAGACTGGGTGAATGACTCCATGGAAGAAGTGAAAAAGAGGAAGTTGTAATAGGGAGACCTCTTCGG"; + + // SNP at 20:12655528 creates a cycle for small kmers + final String alt = "CAATTGTCATAGAGAGTGACAAATGTTTCAAAAGCTTATTGACCCCAAGGTGCAGCGGTGCACATTAGAGGGCACCTAAGACAGCCTACAGGGGTCAGAAAAGATGTCTCAGAGGGACTCACACCTGAGCTGAGTTGTGAAGGAAGAGCAGGATAGAATGAGCCAAAGATAAAGACTCCAGGCAAAAGCAAATGAGCCTGAGGGAAACTGGAGCCAAGGCAAGAGCAGCAGAAAAGAGCAAAGCCAGCCGGTGGTCAAGGTGGGCTACTGTGTATGCAGAATGAGGAAGCTGGCCAAGTAGACATGTTTCAGATGATGAACATCCTGTGTACTAGATGCATTGGAACTTTTTTCATCCCCTCAACTCCACCAAGCCTCTGTCCACTCTTGGTACCTCTCTCCAAGTAGACATATTTCAGATCATGAACATCCTGTGTACTAGATGCATTGGAAATTTTTTCATCCCCTCAACTCCACCCAGCCTCTGTCCACACTTGGTACCTCTCTCTATTCATATCTCTGGCCTCAAGGAGGGTATTTGGCATTAGTAAATAAATTCCAGAGATACTAAAGTCAGATTTTCTAAGACTGGGTGAATGACTCCATGGAAGAAGTGAAAAAGAGGAAGTTGTAATAGGGAGACCTCTTCGG"; + + final List reads = new ArrayList<>(); + for ( int index = 0; index < alt.length() - 100; index += 20 ) + reads.add(ArtificialSAMUtils.createArtificialRead(Arrays.copyOfRange(alt.getBytes(), index, index + 100), Utils.dupBytes((byte) 30, 100), 100 + "M")); + + // test that there are cycles detected for small kmer + final ReadThreadingGraph rtgraph25 = new ReadThreadingGraph(25); + rtgraph25.addSequence("ref", ref.getBytes(), null, true); + for ( final GATKSAMRecord read : reads ) + rtgraph25.addRead(read); + rtgraph25.buildGraphIfNecessary(); + Assert.assertTrue(rtgraph25.hasCycles()); + + // test that there are no cycles detected for large kmer + final ReadThreadingGraph rtgraph75 = new ReadThreadingGraph(75); + rtgraph75.addSequence("ref", ref.getBytes(), null, true); + for ( final GATKSAMRecord read : reads ) + rtgraph75.addRead(read); + rtgraph75.buildGraphIfNecessary(); + Assert.assertFalse(rtgraph75.hasCycles()); + } + + @Test(enabled = !DEBUG) + public void testNsInReadsAreNotUsedForGraph() { + + final int length = 100; + final byte[] ref = Utils.dupBytes((byte)'A', length); + + final ReadThreadingGraph rtgraph = new ReadThreadingGraph(25); + rtgraph.addSequence("ref", ref, null, true); + + // add reads with Ns at any position + for ( int i = 0; i < length; i++ ) { + final byte[] bases = ref.clone(); + bases[i] = 'N'; + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, Utils.dupBytes((byte) 30, length), length + "M"); + rtgraph.addRead(read); + } + rtgraph.buildGraphIfNecessary(); + + final SeqGraph graph = rtgraph.convertToSequenceGraph(); + final KBestPaths pathFinder = new KBestPaths<>(false); + Assert.assertEquals(pathFinder.getKBestPaths(graph, length, graph.getReferenceSourceVertex(), graph.getReferenceSinkVertex()).size(), 1); + } + + @DataProvider(name = "DanglingTails") + public Object[][] makeDanglingTailsData() { + List tests = new ArrayList(); + + // add 1M to the expected CIGAR because it includes the previous (common) base too + tests.add(new Object[]{"AAAAAAAAAA", "CAAA", "5M", true, 3}); // incomplete haplotype + tests.add(new Object[]{"AAAAAAAAAA", "CAAAAAAAAAA", "1M1I10M", true, 10}); // insertion + tests.add(new Object[]{"CCAAAAAAAAAA", "AAAAAAAAAA", "1M2D10M", true, 10}); // deletion + tests.add(new Object[]{"AAAAAAAA", "CAAAAAAA", "9M", true, 7}); // 1 snp + tests.add(new Object[]{"AAAAAAAA", "CAAGATAA", "9M", true, 2}); // several snps + tests.add(new Object[]{"AAAAA", "C", "1M4D1M", true, -1}); // funky SW alignment + tests.add(new Object[]{"AAAAA", "CA", "1M3D2M", true, 1}); // very little data + tests.add(new Object[]{"AAAAAAA", "CAAAAAC", "8M", true, -1}); // ends in mismatch + tests.add(new Object[]{"AAAAAA", "CGAAAACGAA", "1M2I4M2I2M", false, 0}); // alignment is too complex + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "DanglingTails", enabled = !DEBUG) + public void testDanglingTails(final String refEnd, + final String altEnd, + final String cigar, + final boolean cigarIsGood, + final int mergePointDistanceFromSink) { + + final int kmerSize = 15; + + // construct the haplotypes + final String commonPrefix = "AAAAAAAAAACCCCCCCCCCGGGGGGGGGGTTTTTTTTTT"; + final String ref = commonPrefix + refEnd; + final String alt = commonPrefix + altEnd; + + // create the graph and populate it + final ReadThreadingGraph rtgraph = new ReadThreadingGraph(kmerSize); + rtgraph.addSequence("ref", ref.getBytes(), null, true); + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(alt.getBytes(), Utils.dupBytes((byte) 30, alt.length()), alt.length() + "M"); + rtgraph.addRead(read); + rtgraph.buildGraphIfNecessary(); + + // confirm that we have just a single dangling tail + MultiDeBruijnVertex altSink = null; + for ( final MultiDeBruijnVertex v : rtgraph.vertexSet() ) { + if ( rtgraph.isSink(v) && !rtgraph.isReferenceNode(v) ) { + Assert.assertTrue(altSink == null, "We found more than one non-reference sink"); + altSink = v; + } + } + + Assert.assertTrue(altSink != null, "We did not find a non-reference sink"); + + // confirm that the SW alignment agrees with our expectations + final ReadThreadingGraph.DanglingTailMergeResult result = rtgraph.generateCigarAgainstReferencePath(altSink); + Assert.assertTrue(cigar.equals(result.cigar.toString()), "SW generated cigar = " + result.cigar.toString()); + + // confirm that the goodness of the cigar agrees with our expectations + Assert.assertEquals(rtgraph.cigarIsOkayToMerge(result.cigar), cigarIsGood); + + // confirm that the tail merging works as expected + if ( cigarIsGood ) { + final int mergeResult = rtgraph.mergeDanglingTail(result); + Assert.assertTrue(mergeResult == 1 || mergePointDistanceFromSink == -1); + + // confirm that we created the appropriate edge + if ( mergePointDistanceFromSink >= 0 ) { + MultiDeBruijnVertex v = altSink; + for ( int i = 0; i < mergePointDistanceFromSink; i++ ) { + if ( rtgraph.inDegreeOf(v) != 1 ) + Assert.fail("Encountered vertex with multiple sources"); + v = rtgraph.getEdgeSource(rtgraph.incomingEdgeOf(v)); + } + Assert.assertTrue(rtgraph.outDegreeOf(v) > 1); + } + } + } + + +// TODO -- update to use determineKmerSizeAndNonUniques directly +// @DataProvider(name = "KmerSizeData") +// public Object[][] makeKmerSizeDataProvider() { +// List tests = new ArrayList(); +// +// // this functionality can be adapted to provide input data for whatever you might want in your data +// tests.add(new Object[]{3, 3, 3, Arrays.asList("ACG"), Arrays.asList()}); +// tests.add(new Object[]{3, 4, 3, Arrays.asList("CAGACG"), Arrays.asList()}); +// +// tests.add(new Object[]{3, 3, 3, Arrays.asList("AAAAC"), Arrays.asList("AAA")}); +// tests.add(new Object[]{3, 4, 4, Arrays.asList("AAAAC"), Arrays.asList()}); +// tests.add(new Object[]{3, 5, 4, Arrays.asList("AAAAC"), Arrays.asList()}); +// tests.add(new Object[]{3, 4, 3, Arrays.asList("CAAA"), Arrays.asList()}); +// tests.add(new Object[]{3, 4, 4, Arrays.asList("CAAAA"), Arrays.asList()}); +// tests.add(new Object[]{3, 5, 4, Arrays.asList("CAAAA"), Arrays.asList()}); +// tests.add(new Object[]{3, 5, 5, Arrays.asList("ACGAAAAACG"), Arrays.asList()}); +// +// for ( int maxSize = 3; maxSize < 20; maxSize++ ) { +// for ( int dupSize = 3; dupSize < 20; dupSize++ ) { +// final int expectedSize = Math.min(maxSize, dupSize); +// final String dup = Utils.dupString("C", dupSize); +// final List nonUnique = dupSize > maxSize ? Arrays.asList(Utils.dupString("C", maxSize)) : Collections.emptyList(); +// tests.add(new Object[]{3, maxSize, expectedSize, Arrays.asList("ACGT", "A" + dup + "GT"), nonUnique}); +// tests.add(new Object[]{3, maxSize, expectedSize, Arrays.asList("A" + dup + "GT", "ACGT"), nonUnique}); +// } +// } +// +// return tests.toArray(new Object[][]{}); +// } +// +// /** +// * Example testng test using MyDataProvider +// */ +// @Test(dataProvider = "KmerSizeData") +// public void testDynamicKmerSizing(final int min, final int max, final int expectKmer, final List seqs, final List expectedNonUniques) { +// final ReadThreadingGraph assembler = new ReadThreadingGraph(min, max); +// for ( String seq : seqs ) assembler.addSequence(seq.getBytes(), false); +// assembler.buildGraphIfNecessary(); +// Assert.assertEquals(assembler.getKmerSize(), expectKmer); +// assertNonUniques(assembler, expectedNonUniques.toArray(new String[]{})); +// } + + +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/SequenceForKmersUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/SequenceForKmersUnitTest.java new file mode 100644 index 000000000..7c3160c30 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/SequenceForKmersUnitTest.java @@ -0,0 +1,80 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading; + +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.Utils; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class SequenceForKmersUnitTest extends BaseTest { + @Test + public void testNoCount() { + final byte[] seq = "ACGT".getBytes(); + final SequenceForKmers sk = new SequenceForKmers("foo", seq, 0, seq.length, null, true); + Assert.assertEquals(sk.name, "foo"); + Assert.assertEquals(sk.sequence, seq); + Assert.assertEquals(sk.start, 0); + Assert.assertEquals(sk.stop, seq.length); + Assert.assertEquals(sk.isRef, true); + for ( int i = 0; i < seq.length; i++ ) + Assert.assertEquals(sk.getCount(i), 1); + } + + @Test + public void testWithCounts() { + final int len = 256; + final int[] counts = new int[len]; + for ( int i = 0; i < len; i++ ) counts[i] = i; + final byte[] seq = Utils.dupBytes((byte)'A', len); + + final SequenceForKmers sk = new SequenceForKmers("foo", seq, 0, seq.length, counts, true); + + for ( int i = 0; i < seq.length; i++ ) + Assert.assertEquals(sk.getCount(i), i); + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManagerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManagerUnitTest.java new file mode 100644 index 000000000..9bcd7a3a3 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManagerUnitTest.java @@ -0,0 +1,108 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.indels; + +import net.sf.samtools.SAMFileHeader; +import net.sf.samtools.SAMRecord; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +import java.util.List; + + +public class ConstrainedMateFixingManagerUnitTest extends BaseTest { + + private static SAMFileHeader header; + private static GenomeLocParser genomeLocParser; + + @BeforeClass + public void beforeClass() { + header = ArtificialSAMUtils.createArtificialSamHeader(3, 1, 100); + genomeLocParser = new GenomeLocParser(header.getSequenceDictionary()); + } + @Test + public void testSecondaryAlignmentsDoNotInterfere() { + final List properReads = ArtificialSAMUtils.createPair(header, "foo", 1, 10, 30, true, false); + final GATKSAMRecord read1 = properReads.get(0); + read1.setAlignmentStart(8); // move the read + read1.setFlags(99); // first in proper pair, mate negative strand + + final GATKSAMRecord read2Primary = properReads.get(1); + read2Primary.setFlags(147); // second in pair, mate unmapped, not primary alignment + + Assert.assertEquals(read1.getInferredInsertSize(), 21); + + final GATKSAMRecord read2NonPrimary = new GATKSAMRecord(read2Primary); + read2NonPrimary.setFlags(393); // second in proper pair, on reverse strand + + final ConstrainedMateFixingManager manager = new ConstrainedMateFixingManager(null, genomeLocParser, 1000, 1000, 1000); + manager.addRead(read1, true, false); + manager.addRead(read2NonPrimary, false, false); + manager.addRead(read2Primary, false, false); + + Assert.assertEquals(manager.getNReadsInQueue(), 3); + + for ( final SAMRecord read : manager.getReadsInQueueForTesting() ) { + if ( read.getFirstOfPairFlag() ) { + Assert.assertEquals(read.getFlags(), 99); + Assert.assertEquals(read.getInferredInsertSize(), 23); + } else if ( read.getNotPrimaryAlignmentFlag() ) { + Assert.assertEquals(read.getFlags(), 393); + Assert.assertEquals(read.getInferredInsertSize(), -21); + } else { + Assert.assertEquals(read.getFlags(), 147); + Assert.assertEquals(read.getInferredInsertSize(), -23); + } + } + } + +} \ No newline at end of file diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/CombineVariantsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/CombineVariantsIntegrationTest.java index 6c4072962..917cbd542 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/CombineVariantsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/CombineVariantsIntegrationTest.java @@ -189,4 +189,15 @@ public class CombineVariantsIntegrationTest extends WalkerTest { Arrays.asList("aa926eae333208dc1f41fe69dc95d7a6")); cvExecuteTest("combineDBSNPDuplicateSites:", spec, true); } + + @Test + public void combineLeavesUnfilteredRecordsUnfiltered() { + WalkerTestSpec spec = new WalkerTestSpec( + "-T CombineVariants --no_cmdline_in_header -o %s " + + " -R " + b37KGReference + + " -V " + privateTestDir + "combineVariantsLeavesRecordsUnfiltered.vcf", + 1, + Arrays.asList("f8c014d0af7e014475a2a448dc1f9cef")); + cvExecuteTest("combineLeavesUnfilteredRecordsUnfiltered: ", spec, false); + } } \ No newline at end of file diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetricsUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetricsUnitTest.java old mode 100644 new mode 100755 index bca912d63..bd9ff4f80 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetricsUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetricsUnitTest.java @@ -567,8 +567,10 @@ public class ConcordanceMetricsUnitTest extends BaseTest { table[5] = new int[] {12, 0, 34, 20, 10, 0}; double EXPEC_NRS = 0.8969957; double EXPEC_NRD = 0.1071429; + double EXPEC_OGC = 0.92592592; // (100+150+50)/(100+5+1+150+7+3+50+2+6) Assert.assertEquals(EXPEC_NRS,metrics.getOverallNRS(),1e-7); Assert.assertEquals(EXPEC_NRD,metrics.getOverallNRD(),1e-7); + Assert.assertEquals(EXPEC_OGC,metrics.getOverallOGC(),1e-7); int EXPEC_EVAL_REF = 124; int EXPEC_EVAL_HET = 169; int EXPEC_EVAL_VAR = 62; diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordanceIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordanceIntegrationTest.java old mode 100644 new mode 100755 index 2ebb1d7d8..830b9169d --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordanceIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordanceIntegrationTest.java @@ -47,7 +47,6 @@ package org.broadinstitute.sting.gatk.walkers.variantutils; import org.broadinstitute.sting.WalkerTest; -import org.broadinstitute.sting.utils.exceptions.UserException; import org.testng.annotations.Test; import java.util.Arrays; @@ -65,7 +64,7 @@ public class GenotypeConcordanceIntegrationTest extends WalkerTest { WalkerTestSpec spec = new WalkerTestSpec( baseTestString("NA12878.Jan2013.haplotypeCaller.subset.indels.vcf", "NA12878.Jan2013.bestPractices.subset.indels.vcf"), 0, - Arrays.asList("0f29a0c6dc44066228c8cb204fd53ec0") + Arrays.asList("e4368146ffed2c6abf8265f5fbc5875d") ); executeTest("test indel concordance", spec); @@ -76,7 +75,7 @@ public class GenotypeConcordanceIntegrationTest extends WalkerTest { WalkerTestSpec spec = new WalkerTestSpec( baseTestString("GenotypeConcordanceNonOverlapTest_Eval.vcf", "GenotypeConcordanceNonOverlapTest_Comp.vcf"), 0, - Arrays.asList("fc725022d47b4b5f8a6ef87f0f1ffe89") + Arrays.asList("361e00e430f36e4237f888c97d40efca") ); executeTest("test non-overlapping samples", spec); @@ -87,7 +86,7 @@ public class GenotypeConcordanceIntegrationTest extends WalkerTest { WalkerTestSpec spec = new WalkerTestSpec( baseTestString("GenotypeConcordanceNonOverlapTest_Eval.vcf", "GenotypeConcordanceNonOverlapTest_Comp.vcf") + " -moltenize", 0, - Arrays.asList("370141088362d0ab7054be5249c49c11") + Arrays.asList("9573b763303d70405ea48ab1515a0802") ); executeTest("Test moltenized output",spec); @@ -98,7 +97,7 @@ public class GenotypeConcordanceIntegrationTest extends WalkerTest { WalkerTestSpec spec = new WalkerTestSpec( baseTestString("GenotypeConcordance.multipleRecordsTest1.eval.vcf","GenotypeConcordance.multipleRecordsTest1.comp.vcf"), 0, - Arrays.asList("352d59c4ac0cee5eb8ddbc9404b19ce9") + Arrays.asList("0105fcde492fe55ee12a4a4508238806") ); executeTest("test multiple records per site",spec); @@ -109,7 +108,7 @@ public class GenotypeConcordanceIntegrationTest extends WalkerTest { WalkerTestSpec spec = new WalkerTestSpec( baseTestString("genotypeConcordanceFilterTest.vcf","genotypeConcordanceFilterTest.vcf") + " -gfe 'GQ<30'", 0, - Arrays.asList("b7b495ccfa6d50a6be3e095d3f6d3c52") + Arrays.asList("d70a7a90900560f525b58004ba258111") ); executeTest("Test filtering on the EVAL rod",spec); @@ -120,7 +119,7 @@ public class GenotypeConcordanceIntegrationTest extends WalkerTest { WalkerTestSpec spec = new WalkerTestSpec( baseTestString("genotypeConcordanceFilterTest.vcf","genotypeConcordanceFilterTest.vcf") + " -gfc 'LX<0.50'", 0, - Arrays.asList("6406b16cde7960b8943edf594303afd6") + Arrays.asList("2b01ef6285eefc27d86f5f8050272e51") ); executeTest("Test filtering on the COMP rod", spec); @@ -131,7 +130,7 @@ public class GenotypeConcordanceIntegrationTest extends WalkerTest { WalkerTestSpec spec = new WalkerTestSpec( baseTestString("genotypeConcordanceFilterTest.vcf","genotypeConcordanceFilterTest.vcf") + " -gfc 'LX<0.52' -gfe 'DP<5' -gfe 'GQ<37'", 0, - Arrays.asList("26ffd06215b6177acce0ea9f35d73d31") + Arrays.asList("323fba26a65596f142cfa387ca464c32") ); executeTest("Test filtering on both rods",spec); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/LiftoverVariantsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/LiftoverVariantsIntegrationTest.java index 25f6f3d97..c17c9ca55 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/LiftoverVariantsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/LiftoverVariantsIntegrationTest.java @@ -47,6 +47,7 @@ package org.broadinstitute.sting.gatk.walkers.variantutils; import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.utils.exceptions.UserException; import org.testng.annotations.Test; import java.util.Arrays; @@ -91,4 +92,13 @@ public class LiftoverVariantsIntegrationTest extends WalkerTest { Arrays.asList("0909a953291a5e701194668c9b8833ab")); executeTest("test liftover filtering of indels", spec); } + + @Test + public void testLiftoverFailsWithNoOutput() { + WalkerTestSpec spec = new WalkerTestSpec( + "-T LiftoverVariants -R " + hg18Reference + " --variant:vcf " + privateTestDir + "liftover_test.vcf -chain " + validationDataLocation + "hg18ToHg19.broad.over.chain -dict /seq/references/Homo_sapiens_assembly19/v0/Homo_sapiens_assembly19.dict", + 0, + UserException.class); + executeTest("test liftover fails with no output", spec); + } } diff --git a/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java b/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java index 9530ea41f..651beffc8 100644 --- a/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java @@ -233,7 +233,7 @@ public class PerReadAlleleLikelihoodMapUnitTest extends BaseTest { tests.add(new Object[]{100, 0.01, true, Arrays.asList(-5.0, -10.0)}); tests.add(new Object[]{100, 0.01, false, Arrays.asList(-5.0, -10.0, -3.0)}); tests.add(new Object[]{100, 0.01, false, Arrays.asList(-5.0, -10.0, -2.0)}); - tests.add(new Object[]{100, 0.01, true, Arrays.asList(-5.0, -10.0, -4.0)}); + tests.add(new Object[]{100, 0.01, true, Arrays.asList(-5.0, -10.0, -4.2)}); tests.add(new Object[]{100, 0.001, true, Arrays.asList(-5.0, -10.0)}); tests.add(new Object[]{100, 0.001, false, Arrays.asList(-5.0, -10.0, 0.0)}); @@ -243,7 +243,7 @@ public class PerReadAlleleLikelihoodMapUnitTest extends BaseTest { @Test(dataProvider = "PoorlyModelledReadData") public void testPoorlyModelledRead(final int readLen, final double maxErrorRatePerBase, final boolean expected, final List log10likelihoods) { final byte[] bases = Utils.dupBytes((byte)'A', readLen); - final byte[] quals = Utils.dupBytes((byte) 30, readLen); + final byte[] quals = Utils.dupBytes((byte) 40, readLen); final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, readLen + "M"); @@ -279,7 +279,7 @@ public class PerReadAlleleLikelihoodMapUnitTest extends BaseTest { final double likelihood = bad ? -100.0 : 0.0; final byte[] bases = Utils.dupBytes((byte)'A', readLen); - final byte[] quals = Utils.dupBytes((byte) 30, readLen); + final byte[] quals = Utils.dupBytes((byte) 40, readLen); final Allele allele = Allele.create(Utils.dupString("A", readI+1)); diff --git a/protected/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java b/protected/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java index 91a2988aa..0c76ad338 100644 --- a/protected/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java @@ -177,10 +177,10 @@ public class HaplotypeBAMWriterUnitTest extends BaseTest { final GATKSAMRecord originalReadCopy = (GATKSAMRecord)read.clone(); if ( expectedReadCigar == null ) { - Assert.assertNull(writer.createReadAlignedToRef(read, haplotype, refStart)); + Assert.assertNull(writer.createReadAlignedToRef(read, haplotype, refStart, true)); } else { final Cigar expectedCigar = TextCigarCodec.getSingleton().decode(expectedReadCigar); - final GATKSAMRecord alignedRead = writer.createReadAlignedToRef(read, haplotype, refStart); + final GATKSAMRecord alignedRead = writer.createReadAlignedToRef(read, haplotype, refStart, true); Assert.assertEquals(alignedRead.getReadName(), originalReadCopy.getReadName()); Assert.assertEquals(alignedRead.getAlignmentStart(), expectedReadStart); @@ -290,7 +290,7 @@ public class HaplotypeBAMWriterUnitTest extends BaseTest { @Test(dataProvider = "ComplexReadAlignedToRef", enabled = !DEBUG) public void testReadAlignedToRefComplexAlignment(final int testIndex, final GATKSAMRecord read, final String reference, final Haplotype haplotype, final int expectedMaxMismatches) throws Exception { final HaplotypeBAMWriter writer = new CalledHaplotypeBAMWriter(new MockBAMWriter()); - final GATKSAMRecord alignedRead = writer.createReadAlignedToRef(read, haplotype, 1); + final GATKSAMRecord alignedRead = writer.createReadAlignedToRef(read, haplotype, 1, true); if ( alignedRead != null ) { final int mismatches = AlignmentUtils.getMismatchCount(alignedRead, reference.getBytes(), alignedRead.getAlignmentStart() - 1).numMismatches; Assert.assertTrue(mismatches <= expectedMaxMismatches, diff --git a/protected/java/test/org/broadinstitute/sting/utils/recalibration/ReadGroupCovariateUnitTest.java b/protected/java/test/org/broadinstitute/sting/utils/recalibration/ReadGroupCovariateUnitTest.java index 0878fba82..0b2df6369 100644 --- a/protected/java/test/org/broadinstitute/sting/utils/recalibration/ReadGroupCovariateUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/utils/recalibration/ReadGroupCovariateUnitTest.java @@ -75,26 +75,37 @@ public class ReadGroupCovariateUnitTest { final String expected = "SAMPLE.1"; GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord("MY.ID"); rg.setPlatformUnit(expected); - runTest(rg, expected); + runTest(rg, expected, covariate); } @Test(enabled = true) public void testMissingPlatformUnit() { final String expected = "MY.7"; GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord(expected); - runTest(rg, expected); + runTest(rg, expected, covariate); } - private void runTest(GATKSAMReadGroupRecord rg, String expected) { + @Test(enabled = true) + public void testForceReadgroup() { + final RecalibrationArgumentCollection forcedRAC = new RecalibrationArgumentCollection(); + forcedRAC.FORCE_READGROUP = "FOO"; + final ReadGroupCovariate forcedCovariate = new ReadGroupCovariate(); + forcedCovariate.initialize(forcedRAC); + + final GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord("NOT_FOO"); + runTest(rg, "FOO", forcedCovariate); + } + + private static void runTest(final GATKSAMReadGroupRecord rg, final String expected, final ReadGroupCovariate covariate) { GATKSAMRecord read = ReadUtils.createRandomRead(10); read.setReadGroup(rg); ReadCovariates readCovariates = new ReadCovariates(read.getReadLength(), 1); covariate.recordValues(read, readCovariates); - verifyCovariateArray(readCovariates.getMismatchesKeySet(), expected); + verifyCovariateArray(readCovariates.getMismatchesKeySet(), expected, covariate); } - private void verifyCovariateArray(int[][] values, String expected) { + private static void verifyCovariateArray(final int[][] values, final String expected, final ReadGroupCovariate covariate) { for (int[] value : values) { String actual = covariate.formatKey(value[0]); Assert.assertEquals(actual, expected); diff --git a/protected/java/test/org/broadinstitute/sting/utils/smithwaterman/GlobalEdgeGreedySWPairwiseAlignmentUnitTest.java b/protected/java/test/org/broadinstitute/sting/utils/smithwaterman/GlobalEdgeGreedySWPairwiseAlignmentUnitTest.java new file mode 100644 index 000000000..711a60436 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/utils/smithwaterman/GlobalEdgeGreedySWPairwiseAlignmentUnitTest.java @@ -0,0 +1,259 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.utils.smithwaterman; + +import net.sf.samtools.TextCigarCodec; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.sam.AlignmentUtils; +import org.testng.Assert; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class GlobalEdgeGreedySWPairwiseAlignmentUnitTest extends BaseTest { + + private final static boolean DEBUG = false; + + @Test(enabled = !DEBUG) + public void testReadAlignedToRefComplexAlignment() { + final String reference = "AAAGGACTGACTG"; + final String read = "ACTGACTGACTG"; + final GlobalEdgeGreedySWPairwiseAlignment sw = new GlobalEdgeGreedySWPairwiseAlignment(reference.getBytes(), read.getBytes()); + Assert.assertEquals(sw.getCigar().toString(), "1M1D11M"); + } + + @Test(enabled = !DEBUG) + public void testIndelsAtStartAndEnd() { + final String match = "CCCCC"; + final String reference = "AAA" + match; + final String read = match + "GGG"; + final int expectedStart = 0; + final String expectedCigar = "3D5M3I"; + final GlobalEdgeGreedySWPairwiseAlignment sw = new GlobalEdgeGreedySWPairwiseAlignment(reference.getBytes(), read.getBytes()); + Assert.assertEquals(sw.getAlignmentStart2wrt1(), expectedStart); + Assert.assertEquals(sw.getCigar().toString(), expectedCigar); + } + + @Test(enabled = !DEBUG) + public void testDegenerateAlignmentWithIndelsAtBothEnds() { + logger.warn("testDegenerateAlignmentWithIndelsAtBothEnds"); + final String ref = "TGTGTGTGTGTGTGACAGAGAGAGAGAGAGAGAGAGAGAGAGAGA"; + final String alt = "ACAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGA"; + final int expectedStart = 0; + final String expectedCigar = "6I45M"; + final GlobalEdgeGreedySWPairwiseAlignment sw = new GlobalEdgeGreedySWPairwiseAlignment(ref.getBytes(), alt.getBytes(), SWParameterSet.STANDARD_NGS); + Assert.assertEquals(sw.getAlignmentStart2wrt1(), expectedStart); + Assert.assertEquals(sw.getCigar().toString(), expectedCigar); + } + + @Test(enabled = !DEBUG) + public void testAlignReallyLongDeletion() { + final String ref = "CGGCTAATTTTTGTATTTTTAGTAGAGACAGGGTTTCACCATGTTGGCCAGGCTGGTCTTGAACTCCTGACCTCAGGTGATCCACTCGCCTCGGTCTCCCAAAGTGTTGGGATTACAGGCATGAACCACTGCACCTGGCCTAGTGTTTGGGAAAACTATACTAGGAAAAGAATAGTTGCTTTAAGTCATTCTTTGATTATTCTGAGAATTGGCATATAGCTGCCATTATAACCTACTTTTGCTAAATATAATAATAATAATCATTATTTTTATTTTTTGAGACAGGGTCTTGTTTTGTCACCCCGGCTGGAGTGAAGTGGCGCAATCTCGGCTCACTGCAACCTCCACCTCCGGGTGCAAGCAATTCTCCTGCCTCAGCCTCTTGAGTAGCTAGGATTACAGGCACAAGCCATCATGCCCAGCTAATTTTTGTATTTTTAGTAGAGACAGGGTTTCACCATGTTGGTCAGGCTGGTCTTGAACTCCTGACCTCAGGT"; + final String alt = "CGGCTAATTTTTGTATTTTTAGTAGAGACAGGGTTTCACCATGTTGGTCAGGCTGGTCTTGAACTCCTGACCTCAGGT"; + + final GlobalEdgeGreedySWPairwiseAlignment sw = new GlobalEdgeGreedySWPairwiseAlignment(ref.getBytes(), alt.getBytes(), SWParameterSet.STANDARD_NGS); + Assert.assertEquals(sw.getAlignmentStart2wrt1(), 0); + Assert.assertEquals(sw.getCigar().toString(), "47M419D31M"); + } + + public static final Parameters params = new Parameters(20.0, -10.0, -26.0, -1.1); + @DataProvider(name = "SWData") + public Object[][] makeSWData() { + List tests = new ArrayList(); + + // simple cases + tests.add(new Object[]{"A", "C", "1M"}); + tests.add(new Object[]{"AAA", "AAA", "3M"}); + tests.add(new Object[]{"AAA", "AGA", "3M"}); + tests.add(new Object[]{"AAA", "GAA", "3M"}); + tests.add(new Object[]{"AAA", "AAG", "3M"}); + + // small single indels + tests.add(new Object[]{"ACACACAC", "ACACAC", "6M2D"}); + tests.add(new Object[]{"ACACAC", "ACACACAC", "6M2I"}); + tests.add(new Object[]{"XXACACACXX", "XXACACACACXX", "8M2I2M"}); + tests.add(new Object[]{"XXACACACXX", "XXACACXX", "6M2D2M"}); + tests.add(new Object[]{"ACGT", "AACGT", "1I4M"}); + tests.add(new Object[]{"ACGT", "ACCGT", "2M1I2M"}); + tests.add(new Object[]{"ACGT", "ACGGT", "3M1I1M"}); + tests.add(new Object[]{"ACGT", "ACGTT", "4M1I"}); + tests.add(new Object[]{"ACGT", "CGT", "1D3M"}); + tests.add(new Object[]{"ACGT", "AGT", "1M1D2M"}); + tests.add(new Object[]{"ACGT", "ACT", "2M1D1M"}); + tests.add(new Object[]{"ACGT", "ACG", "3M1D"}); + + // mismatches through out the sequences + final String ref = "ACGTAACCGGTT"; + for ( int diff = 0; diff < ref.length(); diff++ ) { + final byte[] altBases = ref.getBytes(); + altBases[diff] = 'N'; + tests.add(new Object[]{ref, new String(altBases), ref.length() + "M"}); + } + for ( int diff1 = 0; diff1 < ref.length(); diff1++ ) { + for ( int diff2 = 0; diff2 < ref.length(); diff2++ ) { + final byte[] altBases = ref.getBytes(); + altBases[diff1] = 'N'; + altBases[diff2] = 'N'; + tests.add(new Object[]{ref, new String(altBases), ref.length() + "M"}); + } + } + + // prefixes and suffixes matching + final String totalPrefix = "ACG"; + final String totalSuffix = "GCT"; + for ( int prefixSize = 0; prefixSize < totalPrefix.length(); prefixSize++) { + for ( int suffixSize = 0; suffixSize < totalPrefix.length(); suffixSize++) { + if ( prefixSize + suffixSize == 0 ) + continue; + for ( int indelSize = 1; indelSize < 50; indelSize++ ) { + final String prefix = totalPrefix.substring(0, prefixSize); + final String suffix = totalSuffix.substring(0, suffixSize); + final String insert = Utils.dupString("N", indelSize); + tests.add(new Object[]{prefix + suffix, prefix + insert + suffix, prefix.length() + "M" + indelSize + "I" + suffix.length() + "M"}); + tests.add(new Object[]{prefix + insert + suffix, prefix + suffix, prefix.length() + "M" + indelSize + "D" + suffix.length() + "M"}); + } + } + } + + // larger indels with prefixes/suffixes + tests.add(new Object[]{"ACTGTTTTGAACATCAGTTATTTTAAACTTTTAAGTTGTTAGCACAGCAAAAGCAACAAAATTCTAAGTGCAGTAATCACTTTACTGCGTGGTCATATGAAATCAAGGCAATGTTATGAGTATTACTGGAAAGCTGGACAGAGTAACGGGAAAAGTGACTAAAACTATGC", "CCTGTTTTGAACATCAGTTATTTTAAACTTTTAAGTTGTTAGCACAGCAAAAGCAACAAAATTCTAAGTGCAGTAATCACTTTACTGCGTGGTCATATGAAATCAAGGCAATGTTATGAGTATTACTGGAAAGCTGGACAGAGTAACGGGAAAAGTGACT", "160M10D"}); + tests.add(new Object[]{"LLLLLTATTAAGTAGTGCTCTATGTTGTCAACTAATTTATTTCCCATTTCAAACATTAGTTGACATGTTTTCATTTCTCTTTTGGAAGGAAACAACTAAATATGTTATCAATCCATCATTTACTTGTACAATAAATAAAGTTCTAAATCACTGCACAGTGTAAAATGGCAAATAGACTTCCCCATAACACAAAGCCATCCTGAAAAGTTTTGTTCATTTTAGAAGRRRRR", "LLLLLARRRRR", "5M219D6M"}); + tests.add(new Object[]{"LLLLLTATTTTTTRRRRR", "LLLLLARRRRR", "5M7D6M"}); + + // systematic testing + for ( final int forwardMatches : Arrays.asList(0, 1, 5, 10)) { + for ( final int forwardMismatches : Arrays.asList(0, 1, 2)) { + for ( final int middleMatches : Arrays.asList(0, 1, 5, 10)) { + for ( final int delSize : Arrays.asList(0, 1, 2, 3 )) { + for ( final int insSize : Arrays.asList(0, 1, 2, 3 )) { + for ( final int reverseMismatches : Arrays.asList(0, 1, 2)) { + for ( final int reverseMatches : Arrays.asList(0, 1, 5, 10)) { + // if there is an insertion and deletion, they should cancel each other out (at least partially) + final int overlap = Math.min(delSize, insSize); + final int myDelSize = delSize - overlap; + final int myInsSize = insSize - overlap; + + // this case is too difficult to create a CIGAR for because SW will (legitimately) prefer to switch the indel and mismatches + final int totalMismatches = forwardMismatches + reverseMismatches; + if ( (myDelSize > 0 || myInsSize > 0 ) && (totalMismatches >= myDelSize || totalMismatches >= myInsSize) ) + continue; + + final StringBuilder refBuilder = new StringBuilder(); + final StringBuilder altBuilder = new StringBuilder(); + final StringBuilder cigarBuilder = new StringBuilder(); + + refBuilder.append(Utils.dupString('A', forwardMatches + forwardMismatches + middleMatches)); + altBuilder.append(Utils.dupString('A', forwardMatches)); + altBuilder.append(Utils.dupString('C', forwardMismatches)); + altBuilder.append(Utils.dupString('A', middleMatches)); + cigarBuilder.append(forwardMatches + forwardMismatches + middleMatches); + cigarBuilder.append("M"); + + if ( myDelSize > 0 ) { + refBuilder.append(Utils.dupString('G', myDelSize)); + cigarBuilder.append(myDelSize); + cigarBuilder.append("D"); + } + if ( myInsSize > 0 ) { + altBuilder.append(Utils.dupString('T', myInsSize)); + cigarBuilder.append(myInsSize); + cigarBuilder.append("I"); + } + if ( overlap > 0 ) { + refBuilder.append(Utils.dupString('G', overlap)); + altBuilder.append(Utils.dupString('T', overlap)); + cigarBuilder.append(overlap); + cigarBuilder.append("M"); + } + if ( delSize > 0 || insSize > 0 ) { + refBuilder.append(Utils.dupString('A', middleMatches)); + altBuilder.append(Utils.dupString('A', middleMatches)); + cigarBuilder.append(middleMatches); + cigarBuilder.append("M"); + } + + refBuilder.append(Utils.dupString('A', reverseMismatches + reverseMatches)); + altBuilder.append(Utils.dupString('C', reverseMismatches)); + altBuilder.append(Utils.dupString('A', reverseMatches)); + cigarBuilder.append(reverseMismatches + reverseMatches); + cigarBuilder.append("M"); + + if ( refBuilder.length() > 0 && altBuilder.length() > 0 ) + tests.add(new Object[]{refBuilder.toString(), altBuilder.toString(), cigarBuilder.toString()}); + } + } + } + } + } + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "SWData", enabled = !DEBUG) + public void testSW(final String seq1, final String seq2, final String expectedCigar) { + final GlobalEdgeGreedySWPairwiseAlignment alignment = new GlobalEdgeGreedySWPairwiseAlignment(seq1.getBytes(), seq2.getBytes(), new Parameters(5.0, -5.0, -25.0, -1.0)); + Assert.assertEquals(alignment.getCigar(), AlignmentUtils.consolidateCigar(TextCigarCodec.getSingleton().decode(expectedCigar))); + } + + /** + * For debugging purposes only + */ + @Test(enabled = DEBUG) + public void testDebugging() { + final String ref = "A"; + final String alt = "C"; + + final GlobalEdgeGreedySWPairwiseAlignment sw = new GlobalEdgeGreedySWPairwiseAlignment(ref.getBytes(), alt.getBytes(), new Parameters(5.0, -5.0, -25.0, -1.0)); + Assert.assertEquals(sw.getCigar().toString(), "1M"); + } +} diff --git a/public/R/scripts/org/broadinstitute/sting/queue/util/queueJobReport.R b/public/R/scripts/org/broadinstitute/sting/queue/util/queueJobReport.R index 36e6343cb..2bc0a2fa5 100644 --- a/public/R/scripts/org/broadinstitute/sting/queue/util/queueJobReport.R +++ b/public/R/scripts/org/broadinstitute/sting/queue/util/queueJobReport.R @@ -3,6 +3,7 @@ library(ggplot2) library(gplots) library(tools) library(reshape) +library(plyr) # # Standard command line switch. Can we loaded interactively for development @@ -14,7 +15,7 @@ if ( onCMDLine ) { inputFileName = args[1] outputPDF = args[2] } else { - inputFileName = "Q-26618@gsa4.jobreport.txt" + inputFileName = "~/Desktop/broadLocal/projects/pipelinePerformance/FullProcessingPipeline.jobreport.txt" #inputFileName = "/humgen/gsa-hpprojects/dev/depristo/oneOffProjects/Q-25718@node1149.jobreport.txt" #inputFileName = "/humgen/gsa-hpprojects/dev/depristo/oneOffProjects/rodPerformanceGoals/history/report.082711.txt" outputPDF = NA @@ -35,13 +36,11 @@ allJobsFromReport <- function(report) { # # Creates segmentation plots of time (x) vs. job (y) with segments for the duration of the job # -plotJobsGantt <- function(gatkReport, sortOverall, includeText) { +plotJobsGantt <- function(gatkReport, sortOverall, title, includeText) { allJobs = allJobsFromReport(gatkReport) if ( sortOverall ) { - title = "All jobs, by analysis, by start time" allJobs = allJobs[order(allJobs$analysisName, allJobs$startTime, decreasing=T), ] } else { - title = "All jobs, sorted by start time" allJobs = allJobs[order(allJobs$startTime, decreasing=T), ] } allJobs$index = 1:nrow(allJobs) @@ -54,11 +53,11 @@ plotJobsGantt <- function(gatkReport, sortOverall, includeText) { p <- p + theme_bw() p <- p + geom_segment(aes(xend=relDoneTime, yend=index), size=1, arrow=arrow(length = unit(0.1, "cm"))) if ( includeText ) - p <- p + geom_text(aes(x=relDoneTime, label=ganttName, hjust=-0.2), size=2) - p <- p + xlim(0, maxRelTime * 1.1) + p <- p + geom_text(aes(x=relStartTime, label=ganttName, hjust=0, vjust=-1), size=2) + p <- p + xlim(0, maxRelTime * 1.3) p <- p + xlab(paste("Start time, relative to first job", RUNTIME_UNITS)) p <- p + ylab("Job number") - p <- p + opts(title=title) + p <- p + ggtitle(title) print(p) } @@ -182,6 +181,27 @@ plotTimeByHost <- function(gatkReportData) { plotMe("Jittered points", geom_jitter) } +mergeScattersForAnalysis <- function(table) { + #allJobs$ganttName = paste(allJobs$jobName, "@", allJobs$exechosts) + + ddply(table, .(analysisName, iteration), summarize, + jobName = analysisName[1], + exechosts = paste(length(exechosts), "hosts"), + formattedStartTime = "NA", + formattedDoneTime = "NA", + intermediate = intermediate[1], + startTime = min(startTime), + doneTime = min(startTime) + sum(runtime), + runtime = sum(runtime)) +} + +mergeScatters <- function(report) { + newReport = list() + for ( name in names(gatkReportData) ) { + newReport[[name]] = mergeScattersForAnalysis(gatkReportData[[name]]) + } + newReport +} # read the table gatkReportData <- gsa.read.gatkreport(inputFileName) @@ -192,13 +212,24 @@ if ( ! is.na(outputPDF) ) { pdf(outputPDF, height=8.5, width=11) } -plotJobsGantt(gatkReportData, T, F) -plotJobsGantt(gatkReportData, F, F) +plotJobsGantt(gatkReportData, T, "All jobs, by analysis, by start time", F) +plotJobsGantt(gatkReportData, F, "All jobs, sorted by start time", F) plotProgressByTime(gatkReportData) + +# plots summarizing overall costs, merging scattered counts +merged.by.scatter = mergeScatters(gatkReportData) +plotJobsGantt(merged.by.scatter, F, "Jobs merged by scatter by start time", T) + +merged.as.df = do.call(rbind.data.frame, merged.by.scatter)[,c("analysisName", "runtime")] +merged.as.df$percent = merged.as.df$runtime / sum(merged.as.df$runtime) * 100 +merged.as.df.formatted = data.frame(analysisName=merged.as.df$analysisName,runtime=prettyNum(merged.as.df$runtime), percent=prettyNum(merged.as.df$percent,digits=2)) +textplot(merged.as.df.formatted[order(merged.as.df$runtime),], show.rownames=F) +title("Total runtime for each analysis") + plotTimeByHost(gatkReportData) for ( group in gatkReportData ) { - print(group) - plotGroup(group) + #print(group) + plotGroup(group) } if ( ! is.na(outputPDF) ) { diff --git a/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R b/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R index 8a9eecf48..b0055dd10 100644 --- a/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R +++ b/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R @@ -12,7 +12,27 @@ if ( interactive() ) { args <- commandArgs(TRUE) } data <- read.csv(args[1]) + +data$Recalibration = as.factor(sapply(as.character(data$Recalibration),function(x) { + xu = toupper(x); + if (xu == "ORIGINAL") "BEFORE" else + if (xu == "RECALIBRATED") "AFTER" else + if (xu == "RECALIBRATION") "BQSR" else + xu })); + gsa.report <- gsa.read.gatkreport(args[2]) + +gsa.report$Arguments$Value = as.character(gsa.report$Arguments$Value); +gsa.report$Arguments = subset(gsa.report$Arguments,subset= Argument != "plot_pdf_file"); +if (length(levels(data$Recalibration)) > 1) { + gsa.report$Arguments = subset(gsa.report$Arguments,subset= Argument != "recalibration_report"); +} +gsa.report$Arguments$Value[gsa.report$Argument$Value == "null"] = "None"; + +gsa.report.covariate.argnum = gsa.report$Arguments$Argument == "covariate"; +gsa.report$Arguments$Value[gsa.report.covariate.argnum] = sapply(strsplit(gsa.report$Arguments$Value[gsa.report.covariate.argnum],","),function(x) { + y = sub("(^.+)Covariate","\\1",x); paste(y,collapse=",") } ); + data <- within(data, EventType <- factor(EventType, levels = rev(levels(EventType)))) numRG = length(unique(data$ReadGroup)) @@ -54,31 +74,31 @@ for(cov in levels(data$CovariateName)) { # for each covariate in turn d=rbind(dSub, dIns, dDel) if( cov != "QualityScore" ) { - p <- ggplot(d, aes(x=CovariateValue,y=Accuracy,alpha=log10(Observations))) + + p <- ggplot(d, aes(x=CovariateValue,y=Accuracy,alpha=log10(Observations))) + ylim(min(-10,d$Accuracy),max(10,d$Accuracy)) + geom_abline(intercept=0, slope=0, linetype=2) + xlab(paste(cov,"Covariate")) + ylab("Quality Score Accuracy") + blankTheme if(cov == "Cycle") { - b <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + + b <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) + opts(axis.text.x=theme_text(angle=90, hjust=0)) p <- ggplot(d, aes(x=CovariateValue,y=AverageReportedQuality,alpha=log10(Observations))) + xlab(paste(cov,"Covariate")) + - ylab("Mean Quality Score") + + ylab("Mean Quality Score") + ylim(0,max(42,d$AverageReportedQuality)) + blankTheme - e <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + + e <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) + opts(axis.text.x=theme_text(angle=90, hjust=0)) } else { - c <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + + c <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) + opts(axis.text.x=theme_text(angle=90, hjust=0)) + xlab(paste(cov,"Covariate (3 base suffix)")) p <- ggplot(d, aes(x=CovariateValue,y=AverageReportedQuality,alpha=log10(Observations))) + xlab(paste(cov,"Covariate (3 base suffix)")) + ylab("Mean Quality Score") + blankTheme - f <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + + f <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) + opts(axis.text.x=theme_text(angle=90, hjust=0)) } @@ -88,14 +108,14 @@ for(cov in levels(data$CovariateName)) { # for each covariate in turn xlab("Reported Quality Score") + ylab("Empirical Quality Score") + blankTheme - a <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + a <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) p <- ggplot(d, aes(x=CovariateValue)) + xlab(paste(cov,"Covariate")) + ylab("No. of Observations (area normalized)") + blankTheme d <- p + geom_histogram(aes(fill=Recalibration,weight=Observations,y=..ndensity..),alpha=0.6,binwidth=1,position="identity") - d <- d + scale_fill_manual(values=c("maroon1","blue")) + d <- d + scale_fill_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) d <- d + facet_grid(.~EventType) # d <- d + scale_y_continuous(formatter="comma") } diff --git a/public/R/src/org/broadinstitute/sting/utils/R/gsalib/DESCRIPTION b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/DESCRIPTION index 6116e8c66..ecf76a95b 100644 --- a/public/R/src/org/broadinstitute/sting/utils/R/gsalib/DESCRIPTION +++ b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/DESCRIPTION @@ -3,8 +3,11 @@ Type: Package Title: Utility functions Version: 1.0 Date: 2010-10-02 +Imports: gplots, ggplot2, png Author: Kiran Garimella -Maintainer: Kiran Garimella +Maintainer: Mauricio Carneiro +BugReports: http://gatkforums.broadinstitute.org Description: Utility functions for GATK NGS analyses License: BSD LazyLoad: yes +NeedsCompilation: no diff --git a/public/R/src/org/broadinstitute/sting/utils/R/gsalib/NAMESPACE b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/NAMESPACE new file mode 100644 index 000000000..0bfe475b4 --- /dev/null +++ b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/NAMESPACE @@ -0,0 +1 @@ +exportPattern("^[^\\.]") \ No newline at end of file diff --git a/public/R/src/org/broadinstitute/sting/utils/R/gsalib/data/tearsheetdrop.jpg b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/data/tearsheetdrop.jpg deleted file mode 100755 index c9d480fa0..000000000 Binary files a/public/R/src/org/broadinstitute/sting/utils/R/gsalib/data/tearsheetdrop.jpg and /dev/null differ diff --git a/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java b/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java index 82bee7826..c4f1a286d 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java @@ -60,6 +60,7 @@ import org.broadinstitute.sting.utils.classloader.PluginManager; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.interval.IntervalUtils; +import org.broadinstitute.sting.utils.progressmeter.ProgressMeter; import org.broadinstitute.sting.utils.recalibration.BQSRArgumentSet; import org.broadinstitute.sting.utils.threading.ThreadEfficiencyMonitor; @@ -169,6 +170,14 @@ public class GenomeAnalysisEngine { this.walker = walker; } + /** + * The short name of the current GATK walker as a string + * @return a non-null String + */ + public String getWalkerName() { + return getWalkerName(walker.getClass()); + } + /** * A processed collection of SAM reader identifiers. */ @@ -194,6 +203,11 @@ public class GenomeAnalysisEngine { */ private ThreadEfficiencyMonitor threadEfficiencyMonitor = null; + /** + * The global progress meter we are using to track our progress through the genome + */ + private ProgressMeter progressMeter = null; + /** * Set the reference metadata files to use for this traversal. * @param referenceMetaDataFiles Collection of files and descriptors over which to traverse. @@ -202,6 +216,12 @@ public class GenomeAnalysisEngine { this.referenceMetaDataFiles = referenceMetaDataFiles; } + /** + * The maximum runtime of this engine, in nanoseconds, set during engine initialization + * from the GATKArgumentCollection command line value + */ + private long runtimeLimitInNanoseconds = -1; + /** * Static random number generator and seed. */ @@ -252,6 +272,9 @@ public class GenomeAnalysisEngine { if (args.BQSR_RECAL_FILE != null) setBaseRecalibration(args); + // setup the runtime limits + setupRuntimeLimits(args); + // Determine how the threads should be divided between CPU vs. IO. determineThreadAllocation(); @@ -278,9 +301,11 @@ public class GenomeAnalysisEngine { // create the output streams initializeOutputStreams(microScheduler.getOutputTracker()); - logger.info("Creating shard strategy for " + readsDataSource.getReaderIDs().size() + " BAM files"); + // Initializing the shard iterator / BAM schedule might take some time, so let the user know vaguely what's going on + logger.info("Preparing for traversal" + + (readsDataSource.getReaderIDs().size() > 0 ? String.format(" over %d BAM files", readsDataSource.getReaderIDs().size()) : "")); Iterable shardStrategy = getShardStrategy(readsDataSource,microScheduler.getReference(),intervals); - logger.info("Done creating shard strategy"); + logger.info("Done preparing for traversal"); // execute the microscheduler, storing the results return microScheduler.execute(this.walker, shardStrategy); @@ -327,11 +352,18 @@ public class GenomeAnalysisEngine { * @return A collection of available filters. */ public Collection createFilters() { - final List filters = WalkerManager.getReadFilters(walker,this.getFilterManager()); + final List filters = new LinkedList<>(); + + // First add the user requested filters if (this.getArguments().readGroupBlackList != null && this.getArguments().readGroupBlackList.size() > 0) filters.add(new ReadGroupBlackListFilter(this.getArguments().readGroupBlackList)); for(final String filterName: this.getArguments().readFilters) filters.add(this.getFilterManager().createByName(filterName)); + + // now add the walker default filters. This ordering is critical important if + // users need to apply filters that fix up reads that would be removed by default walker filters + filters.addAll(WalkerManager.getReadFilters(walker,this.getFilterManager())); + return Collections.unmodifiableList(filters); } @@ -448,9 +480,8 @@ public class GenomeAnalysisEngine { DownsamplingMethod commandLineMethod = argCollection.getDownsamplingMethod(); DownsamplingMethod walkerMethod = WalkerManager.getDownsamplingMethod(walker); - DownsamplingMethod defaultMethod = DownsamplingMethod.getDefaultDownsamplingMethod(walker); - DownsamplingMethod method = commandLineMethod != null ? commandLineMethod : (walkerMethod != null ? walkerMethod : defaultMethod); + DownsamplingMethod method = commandLineMethod != null ? commandLineMethod : walkerMethod; method.checkCompatibilityWithWalker(walker); return method; } @@ -570,9 +601,9 @@ public class GenomeAnalysisEngine { if (readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.coordinate) throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.coordinate, "Active region walkers can only traverse coordinate-sorted data. Please resort your input BAM file(s) or set the Sort Order tag in the header appropriately."); if(intervals == null) - return readsDataSource.createShardIteratorOverMappedReads(new LocusShardBalancer()); + return readsDataSource.createShardIteratorOverMappedReads(new ActiveRegionShardBalancer()); else - return readsDataSource.createShardIteratorOverIntervals(((ActiveRegionWalker)walker).extendIntervals(intervals, this.genomeLocParser, this.getReferenceDataSource().getReference()), new LocusShardBalancer()); + return readsDataSource.createShardIteratorOverIntervals(((ActiveRegionWalker)walker).extendIntervals(intervals, this.genomeLocParser, this.getReferenceDataSource().getReference()), new ActiveRegionShardBalancer()); } else if(walker instanceof ReadWalker || walker instanceof ReadPairWalker || walker instanceof DuplicateWalker) { // Apply special validation to read pair walkers. @@ -1067,22 +1098,52 @@ public class GenomeAnalysisEngine { return CommandLineUtils.createApproximateCommandLineArgumentString(parsingEngine,argumentProviders); } + // ------------------------------------------------------------------------------------- + // + // code for working with progress meter + // + // ------------------------------------------------------------------------------------- + + /** + * Register the global progress meter with this engine + * + * Calling this function more than once will result in an IllegalStateException + * + * @param meter a non-null progress meter + */ + public void registerProgressMeter(final ProgressMeter meter) { + if ( meter == null ) throw new IllegalArgumentException("Meter cannot be null"); + if ( progressMeter != null ) throw new IllegalStateException("Progress meter already set"); + + progressMeter = meter; + } + + /** + * Get the progress meter being used by this engine. May be null if no meter has been registered yet + * @return a potentially null pointer to the progress meter + */ + public ProgressMeter getProgressMeter() { + return progressMeter; + } + /** * Does the current runtime in unit exceed the runtime limit, if one has been provided? * - * @param runtime the runtime of this GATK instance in minutes - * @param unit the time unit of runtime * @return false if not limit was requested or if runtime <= the limit, true otherwise */ - public boolean exceedsRuntimeLimit(final long runtime, final TimeUnit unit) { + public boolean exceedsRuntimeLimit() { + if ( progressMeter == null ) + // not yet initialized or not set because of testing + return false; + + final long runtime = progressMeter.getRuntimeInNanosecondsUpdatedPeriodically(); if ( runtime < 0 ) throw new IllegalArgumentException("runtime must be >= 0 but got " + runtime); if ( getArguments().maxRuntime == NO_RUNTIME_LIMIT ) return false; else { - final long actualRuntimeNano = TimeUnit.NANOSECONDS.convert(runtime, unit); final long maxRuntimeNano = getRuntimeLimitInNanoseconds(); - return actualRuntimeNano > maxRuntimeNano; + return runtime > maxRuntimeNano; } } @@ -1090,9 +1151,22 @@ public class GenomeAnalysisEngine { * @return the runtime limit in nanoseconds, or -1 if no limit was specified */ public long getRuntimeLimitInNanoseconds() { - if ( getArguments().maxRuntime == NO_RUNTIME_LIMIT ) - return -1; - else - return TimeUnit.NANOSECONDS.convert(getArguments().maxRuntime, getArguments().maxRuntimeUnits); + return runtimeLimitInNanoseconds; + } + + /** + * Setup the runtime limits for this engine, updating the runtimeLimitInNanoseconds + * as appropriate + * + * @param args the GATKArgumentCollection to retrieve our runtime limits from + */ + private void setupRuntimeLimits(final GATKArgumentCollection args) { + if ( args.maxRuntime == NO_RUNTIME_LIMIT ) + runtimeLimitInNanoseconds = -1; + else if (args.maxRuntime < 0 ) + throw new UserException.BadArgumentValue("maxRuntime", "must be >= 0 or == -1 (meaning no limit) but received negative value " + args.maxRuntime); + else { + runtimeLimitInNanoseconds = TimeUnit.NANOSECONDS.convert(args.maxRuntime, args.maxRuntimeUnits); + } } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java b/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java index aadb57985..29372abcd 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java +++ b/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java @@ -40,37 +40,27 @@ public class ReadMetrics implements Cloneable { private long nRecords; // How many reads have we processed, along with those skipped for various reasons private long nReads; - private long nSkippedReads; - private long nUnmappedReads; - private long nNotPrimary; - private long nBadAlignments; - private long nSkippedIndels; - private long nDuplicates; - private Map counter = new HashMap(); + + // keep track of filtered records by filter type (class) + private Map filterCounter = new HashMap<>(); /** * Combines these metrics with a set of other metrics, storing the results in this class. * @param metrics The metrics to fold into this class. */ - public void incrementMetrics(ReadMetrics metrics) { + public synchronized void incrementMetrics(ReadMetrics metrics) { nRecords += metrics.nRecords; nReads += metrics.nReads; - nSkippedReads += metrics.nSkippedReads; - nUnmappedReads += metrics.nUnmappedReads; - nNotPrimary += metrics.nNotPrimary; - nBadAlignments += metrics.nBadAlignments; - nSkippedIndels += metrics.nSkippedIndels; - nDuplicates += metrics.nDuplicates; - for(Map.Entry counterEntry: metrics.counter.entrySet()) { - Class counterType = counterEntry.getKey(); - long newValue = (counter.containsKey(counterType) ? counter.get(counterType) : 0) + counterEntry.getValue(); - counter.put(counterType,newValue); + for(Map.Entry counterEntry: metrics.filterCounter.entrySet()) { + final String counterType = counterEntry.getKey(); + final long newValue = (filterCounter.containsKey(counterType) ? filterCounter.get(counterType) : 0) + counterEntry.getValue(); + filterCounter.put(counterType, newValue); } } /** * Create a copy of the given read metrics. - * @return + * @return a non-null clone */ public ReadMetrics clone() { ReadMetrics newMetrics; @@ -82,33 +72,18 @@ public class ReadMetrics implements Cloneable { } newMetrics.nRecords = nRecords; newMetrics.nReads = nReads; - newMetrics.nSkippedReads = nSkippedReads; - newMetrics.nUnmappedReads = nUnmappedReads; - newMetrics.nNotPrimary = nNotPrimary; - newMetrics.nBadAlignments = nBadAlignments; - newMetrics.nSkippedIndels = nSkippedIndels; - newMetrics.nDuplicates = nDuplicates; - newMetrics.counter = new HashMap(counter); + newMetrics.filterCounter = new HashMap<>(filterCounter); return newMetrics; } - public void incrementFilter(SamRecordFilter filter) { - long c = 0; - if ( counter.containsKey(filter.getClass()) ) { - c = counter.get(filter.getClass()); - } - - counter.put(filter.getClass(), c + 1L); + public void setFilterCount(final String filter, final long count) { + filterCounter.put(filter, count); } public Map getCountsByFilter() { - final TreeMap sortedCounts = new TreeMap(); - for(Map.Entry counterEntry: counter.entrySet()) { - sortedCounts.put(counterEntry.getKey().getSimpleName(),counterEntry.getValue()); - } - return sortedCounts; + return new TreeMap<>(filterCounter); } /** @@ -143,95 +118,4 @@ public class ReadMetrics implements Cloneable { public void incrementNumReadsSeen() { nReads++; } - - /** - * Gets the cumulative number of reads skipped in the course of this run. - * @return Cumulative number of reads skipped in the course of this run. - */ - public long getNumSkippedReads() { - return nSkippedReads; - } - - /** - * Increments the cumulative number of reads skipped in the course of this run. - */ - public void incrementNumSkippedReads() { - nSkippedReads++; - } - - /** - * Gets the number of unmapped reads skipped in the course of this run. - * @return The number of unmapped reads skipped. - */ - public long getNumUnmappedReads() { - return nUnmappedReads; - } - - /** - * Increments the number of unmapped reads skipped in the course of this run. - */ - public void incrementNumUnmappedReads() { - nUnmappedReads++; - } - - /** - * - * @return - */ - public long getNumNonPrimaryReads() { - return nNotPrimary; - } - - /** - * - */ - public void incrementNumNonPrimaryReads() { - nNotPrimary++; - } - - /** - * - * @return - */ - public long getNumBadAlignments() { - return nBadAlignments; - } - - /** - * - */ - public void incrementNumBadAlignments() { - nBadAlignments++; - } - - /** - * - * @return - */ - public long getNumSkippedIndels() { - return nSkippedIndels; - } - - /** - * - */ - public void incrementNumSkippedIndels() { - nSkippedIndels++; - } - - /** - * - * @return - */ - public long getNumDuplicates() { - return nDuplicates; - } - - /** - * - */ - public void incrementNumDuplicates() { - nDuplicates++; - } - } diff --git a/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java b/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java index 8d1fa4638..b5113fdea 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java +++ b/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java @@ -69,8 +69,8 @@ public class GATKArgumentCollection { // // -------------------------------------------------------------------------------------------------------------- - @Argument(fullName = "phone_home", shortName = "et", doc="What kind of GATK run report should we generate? STANDARD is the default, can be NO_ET so nothing is posted to the run repository. Please see " + UserException.PHONE_HOME_DOCS_URL + " for details.", required = false) - public GATKRunReport.PhoneHomeOption phoneHomeType = GATKRunReport.PhoneHomeOption.STANDARD; + @Argument(fullName = "phone_home", shortName = "et", doc="What kind of GATK run report should we generate? AWS is the default, can be NO_ET so nothing is posted to the run repository. Please see " + UserException.PHONE_HOME_DOCS_URL + " for details.", required = false) + public GATKRunReport.PhoneHomeOption phoneHomeType = GATKRunReport.PhoneHomeOption.AWS; @Argument(fullName = "gatk_key", shortName = "K", doc="GATK Key file. Required if running with -et NO_ET. Please see " + UserException.PHONE_HOME_DOCS_URL + " for details.", required = false) public File gatkKeyFile = null; @@ -125,7 +125,14 @@ public class GATKArgumentCollection { @Argument(fullName = "downsample_to_fraction", shortName = "dfrac", doc = "Fraction [0.0-1.0] of reads to downsample to", required = false) public Double downsampleFraction = null; - @Argument(fullName = "downsample_to_coverage", shortName = "dcov", doc = "Coverage [integer] to downsample to at any given locus; note that downsampled reads are randomly selected from all possible reads at a locus. For non-locus-based traversals (eg., ReadWalkers), this sets the maximum number of reads at each alignment start position.", required = false) + @Argument(fullName = "downsample_to_coverage", shortName = "dcov", + doc = "Coverage [integer] to downsample to. For locus-based traversals (eg., LocusWalkers and ActiveRegionWalkers)," + + "this controls the maximum depth of coverage at each locus. For non-locus-based traversals (eg., ReadWalkers), " + + "this controls the maximum number of reads sharing the same alignment start position. Note that the " + + "coverage target is an approximate goal that is not guaranteed to be met exactly: the GATK's approach " + + "to downsampling is based on even representation of reads from all alignment start positions, and the " + + "downsampling algorithm will under some circumstances retain slightly more coverage than requested.", + required = false) public Integer downsampleCoverage = null; /** @@ -180,6 +187,12 @@ public class GATKArgumentCollection { @Argument(fullName = "allow_potentially_misencoded_quality_scores", shortName="allowPotentiallyMisencodedQuals", doc="Do not fail when encountering base qualities that are too high and that seemingly indicate a problem with the base quality encoding of the BAM file", required = false) public boolean ALLOW_POTENTIALLY_MISENCODED_QUALS = false; + @Argument(fullName="useOriginalQualities", shortName = "OQ", doc = "If set, use the original base quality scores from the OQ tag when present instead of the standard scores", required=false) + public Boolean useOriginalBaseQualities = false; + + @Argument(fullName="defaultBaseQualities", shortName = "DBQ", doc = "If reads are missing some or all base quality scores, this value will be used for all base quality scores", required=false) + public byte defaultBaseQualities = -1; + // -------------------------------------------------------------------------------------------------------------- // // performance log arguments @@ -194,9 +207,6 @@ public class GATKArgumentCollection { @Argument(fullName = "performanceLog", shortName="PF", doc="If provided, a GATK runtime performance log will be written to this file", required = false) public File performanceLog = null; - @Argument(fullName="useOriginalQualities", shortName = "OQ", doc = "If set, use the original base quality scores from the OQ tag when present instead of the standard scores", required=false) - public Boolean useOriginalBaseQualities = false; - // -------------------------------------------------------------------------------------------------------------- // // BQSR arguments @@ -260,9 +270,6 @@ public class GATKArgumentCollection { // // -------------------------------------------------------------------------------------------------------------- - @Argument(fullName="defaultBaseQualities", shortName = "DBQ", doc = "If reads are missing some or all base quality scores, this value will be used for all base quality scores", required=false) - public byte defaultBaseQualities = -1; - @Argument(fullName = "validation_strictness", shortName = "S", doc = "How strict should we be with validation", required = false) public SAMFileReader.ValidationStringency strictnessLevel = SAMFileReader.ValidationStringency.SILENT; diff --git a/public/java/src/org/broadinstitute/sting/gatk/arguments/ValidationExclusion.java b/public/java/src/org/broadinstitute/sting/gatk/arguments/ValidationExclusion.java index f8f56f89e..75a68d978 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/arguments/ValidationExclusion.java +++ b/public/java/src/org/broadinstitute/sting/gatk/arguments/ValidationExclusion.java @@ -36,6 +36,8 @@ public class ValidationExclusion { // our validation options public enum TYPE { + ALLOW_N_CIGAR_READS, // ignore the presence of N operators in CIGARs: do not blow up and process reads that contain one or more N operators. + // This exclusion does not have effect on reads that get filtered {@see MalformedReadFilter}. ALLOW_UNINDEXED_BAM, // allow bam files that do not have an index; we'll traverse them using monolithic shard ALLOW_UNSET_BAM_SORT_ORDER, // assume that the bam is sorted, even if the SO (sort-order) flag is not set NO_READ_ORDER_VERIFICATION, // do not validate that the reads are in order as we take them from the bam file diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalOverlappingRODsFromStream.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalOverlappingRODsFromStream.java index fe3a0c6ce..3aff745fa 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalOverlappingRODsFromStream.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalOverlappingRODsFromStream.java @@ -72,8 +72,6 @@ class IntervalOverlappingRODsFromStream { /** * Get the list of RODs overlapping loc from this stream of RODs. * - * Sequential calls to this function must obey the rule that loc2.getStart >= loc1.getStart - * * @param loc the interval to query * @return a non-null RODRecordList containing the overlapping RODs, which may be empty */ @@ -84,7 +82,6 @@ class IntervalOverlappingRODsFromStream { if ( lastQuery != null && loc.getStart() < lastQuery.getStart() ) throw new IllegalArgumentException(String.format("BUG: query interval (%s) starts before the previous interval %s", loc, lastQuery)); - trimCurrentFeaturesToLoc(loc); readOverlappingFutureFeatures(loc); return new RODRecordListImpl(name, subsetToOverlapping(loc, currentFeatures), loc); } @@ -128,11 +125,14 @@ class IntervalOverlappingRODsFromStream { /** * Update function. Remove all elements of currentFeatures that end before loc * + * Must be called by clients periodically when they know they they will never ask for data before + * loc, so that the running cache of RODs doesn't grow out of control. + * * @param loc the location to use */ @Requires("loc != null") @Ensures("currentFeatures.size() <= old(currentFeatures.size())") - private void trimCurrentFeaturesToLoc(final GenomeLoc loc) { + public void trimCurrentFeaturesToLoc(final GenomeLoc loc) { final ListIterator it = currentFeatures.listIterator(); while ( it.hasNext() ) { final GATKFeature feature = it.next(); diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedView.java new file mode 100644 index 000000000..5e884ce53 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedView.java @@ -0,0 +1,184 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.datasources.providers; + +import net.sf.picard.util.PeekableIterator; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.datasources.reads.ReadShard; +import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.refdata.utils.LocationAwareSeekableRODIterator; +import org.broadinstitute.sting.gatk.refdata.utils.RODRecordList; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +/** + * a ROD view that allows for requests for RODs that overlap intervals on the genome to produce a RefMetaDataTracker + */ +public class IntervalReferenceOrderedView implements ReferenceOrderedView { + /** a list of the RMDDataState (location->iterators) */ + private final List states = new ArrayList<>(1); + + /** + * Used to get genome locs for reads + */ + protected final GenomeLocParser genomeLocParser; + + /** + * The total extent of all reads in this span. We create iterators from our RODs + * from the start of this span, to the end. + */ + private final GenomeLoc shardSpan; + + /** + * Create a new IntervalReferenceOrderedView taking data from provider and capable of + * servicing ROD overlap requests within the genomic interval span + * + * @param provider a ShardDataProvider to give us data + * @param span a GenomeLoc span, or null indicating take the entire genome + */ + public IntervalReferenceOrderedView(final ShardDataProvider provider, final GenomeLoc span) { + if ( provider == null ) throw new IllegalArgumentException("provider cannot be null"); + if ( provider.hasReferenceOrderedData() && span == null ) throw new IllegalArgumentException("span cannot be null when provider has reference ordered data"); + + this.genomeLocParser = provider.getGenomeLocParser(); + this.shardSpan = span; + provider.register(this); + + // conditional to optimize the case where we don't have any ROD data + if ( provider.hasReferenceOrderedData() && ! shardSpan.isUnmapped() ) { + for (final ReferenceOrderedDataSource dataSource : provider.getReferenceOrderedData()) + states.add(new RMDDataState(dataSource, dataSource.seek(shardSpan))); + } + } + + /** + * Testing constructor + */ + protected IntervalReferenceOrderedView(final GenomeLocParser genomeLocParser, + final GenomeLoc shardSpan, + final List names, + final List> featureSources) { + this.genomeLocParser = genomeLocParser; + this.shardSpan = shardSpan; + for ( int i = 0; i < names.size(); i++ ) + states.add(new RMDDataState(names.get(i), featureSources.get(i))); + } + + public Collection> getConflictingViews() { + List> classes = new ArrayList<>(); + classes.add(ManagingReferenceOrderedView.class); + return classes; + } + + /** + * Get a RefMetaDataTracker containing bindings for all RODs overlapping the start position of loc + * @param loc a GenomeLoc of size == 1 + * @return a non-null RefMetaDataTracker + */ + @Override + public RefMetaDataTracker getReferenceOrderedDataAtLocus(GenomeLoc loc) { + if ( loc == null ) throw new IllegalArgumentException("loc cannot be null"); + if ( loc.size() != 1 ) throw new IllegalArgumentException("GenomeLoc must have size == 1 but got " + loc); + return getReferenceOrderedDataForInterval(loc); + } + + /** + * Get a RefMetaDataTracker containing bindings for all RODs overlapping interval + * + * @param interval a non=null interval + * @return a non-null RefMetaDataTracker + */ + public RefMetaDataTracker getReferenceOrderedDataForInterval(final GenomeLoc interval) { + if ( interval == null ) throw new IllegalArgumentException("Interval cannot be null"); + + if ( states.isEmpty() || shardSpan.isUnmapped() ) // optimization for no bindings (common for read walkers) + return RefMetaDataTracker.EMPTY_TRACKER; + else { + final List bindings = new ArrayList<>(states.size()); + for ( final RMDDataState state : states ) + bindings.add(state.stream.getOverlapping(interval)); + return new RefMetaDataTracker(bindings); + } + } + + /** + * Trim down all of the ROD managers so that they only hold ROD bindings wit start >= startOfDataToKeep.getStart() + * + * @param startOfDataToKeep a non-null genome loc + */ + public void trimCurrentFeaturesToLoc(final GenomeLoc startOfDataToKeep) { + if ( startOfDataToKeep == null ) throw new IllegalArgumentException("startOfDataToKeep cannot be null"); + + for ( final RMDDataState state : states ) + state.stream.trimCurrentFeaturesToLoc(startOfDataToKeep); + } + + /** + * Closes the current view. + */ + public void close() { + for (final RMDDataState state : states) + state.close(); + + // Clear out the existing data so that post-close() accesses to this data will fail-fast. + states.clear(); + } + + /** + * Models the traversal state of a given ROD lane. + */ + private static class RMDDataState { + public final ReferenceOrderedDataSource dataSource; + public final IntervalOverlappingRODsFromStream stream; + private final LocationAwareSeekableRODIterator iterator; + + public RMDDataState(ReferenceOrderedDataSource dataSource, LocationAwareSeekableRODIterator iterator) { + this.dataSource = dataSource; + this.iterator = iterator; + this.stream = new IntervalOverlappingRODsFromStream(dataSource.getName(), new PeekableIterator<>(iterator)); + } + + /** + * For testing + */ + public RMDDataState(final String name, final PeekableIterator iterator) { + this.dataSource = null; + this.iterator = null; + this.stream = new IntervalOverlappingRODsFromStream(name, new PeekableIterator<>(iterator)); + } + + public void close() { + if ( dataSource != null ) + dataSource.close( iterator ); + } + } +} + diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusReferenceView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusReferenceView.java index d5b7d0487..b5efbc693 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusReferenceView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusReferenceView.java @@ -176,13 +176,13 @@ public class LocusReferenceView extends ReferenceView { /** * Gets the reference context associated with this particular point or extended interval on the genome. - * @param genomeLoc Region for which to retrieve the base(s). If region spans beyond contig end or beoynd current bounds, it will be trimmed down. + * @param genomeLoc Region for which to retrieve the base(s). If region spans beyond contig end or beyond current bounds, it will be trimmed down. * @return The base at the position represented by this genomeLoc. */ public ReferenceContext getReferenceContext( GenomeLoc genomeLoc ) { //validateLocation( genomeLoc ); - GenomeLoc window = genomeLocParser.createGenomeLoc( genomeLoc.getContig(), bounds.getContigIndex(), + GenomeLoc window = genomeLocParser.createGenomeLoc( genomeLoc.getContig(), genomeLoc.getContigIndex(), getWindowStart(genomeLoc), getWindowStop(genomeLoc) ); int refStart = -1; diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java index 09b72f5eb..50f2369cb 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java @@ -76,7 +76,8 @@ public class ManagingReferenceOrderedView implements ReferenceOrderedView { * @param loc Locus at which to track. * @return A tracker containing information about this locus. */ - public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc, ReferenceContext referenceContext ) { + @Override + public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc ) { if ( states.isEmpty() ) return RefMetaDataTracker.EMPTY_TRACKER; else { diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedView.java index 52f490972..84e27c953 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedView.java @@ -42,52 +42,9 @@ import java.util.Collection; import java.util.List; /** a ROD view for reads. This provides the Read traversals a way of getting a RefMetaDataTracker */ -public class ReadBasedReferenceOrderedView implements View { - // a list of the RMDDataState (location->iterators) - private final List states = new ArrayList(1); - private final static RefMetaDataTracker EMPTY_TRACKER = new RefMetaDataTracker(); - - /** - * Used to get genome locs for reads - */ - private final GenomeLocParser genomeLocParser; - - /** - * The total extent of all reads in this span. We create iterators from our RODs - * from the start of this span, to the end. - */ - private final GenomeLoc shardSpan; - +public class ReadBasedReferenceOrderedView extends IntervalReferenceOrderedView { public ReadBasedReferenceOrderedView(final ShardDataProvider provider) { - this.genomeLocParser = provider.getGenomeLocParser(); - // conditional to optimize the case where we don't have any ROD data - this.shardSpan = provider.getReferenceOrderedData() != null ? ((ReadShard)provider.getShard()).getReadsSpan() : null; - provider.register(this); - - if ( provider.getReferenceOrderedData() != null && ! shardSpan.isUnmapped() ) { - for (ReferenceOrderedDataSource dataSource : provider.getReferenceOrderedData()) - states.add(new RMDDataState(dataSource, dataSource.seek(shardSpan))); - } - } - - - /** - * Testing constructor - */ - protected ReadBasedReferenceOrderedView(final GenomeLocParser genomeLocParser, - final GenomeLoc shardSpan, - final List names, - final List> featureSources) { - this.genomeLocParser = genomeLocParser; - this.shardSpan = shardSpan; - for ( int i = 0; i < names.size(); i++ ) - states.add(new RMDDataState(names.get(i), featureSources.get(i))); - } - - public Collection> getConflictingViews() { - List> classes = new ArrayList>(); - classes.add(ManagingReferenceOrderedView.class); - return classes; + super(provider, provider.hasReferenceOrderedData() ? ((ReadShard)provider.getShard()).getReadsSpan() : null); } /** @@ -101,60 +58,11 @@ public class ReadBasedReferenceOrderedView implements View { @Ensures("result != null") public RefMetaDataTracker getReferenceOrderedDataForRead(final SAMRecord rec) { if ( rec.getReadUnmappedFlag() ) - // empty RODs for unmapped reads - return new RefMetaDataTracker(); - else - return getReferenceOrderedDataForInterval(genomeLocParser.createGenomeLoc(rec)); - } - - @Requires({"interval != null", "shardSpan == null || shardSpan.isUnmapped() || shardSpan.containsP(interval)"}) - @Ensures("result != null") - public RefMetaDataTracker getReferenceOrderedDataForInterval(final GenomeLoc interval) { - if ( states.isEmpty() || shardSpan.isUnmapped() ) // optimization for no bindings (common for read walkers) - return EMPTY_TRACKER; + return RefMetaDataTracker.EMPTY_TRACKER; else { - final List bindings = new ArrayList(states.size()); - for ( final RMDDataState state : states ) - bindings.add(state.stream.getOverlapping(interval)); - return new RefMetaDataTracker(bindings); - } - } - - /** - * Closes the current view. - */ - public void close() { - for (final RMDDataState state : states) - state.close(); - - // Clear out the existing data so that post-close() accesses to this data will fail-fast. - states.clear(); - } - - /** Models the traversal state of a given ROD lane. */ - private static class RMDDataState { - public final ReferenceOrderedDataSource dataSource; - public final IntervalOverlappingRODsFromStream stream; - private final LocationAwareSeekableRODIterator iterator; - - public RMDDataState(ReferenceOrderedDataSource dataSource, LocationAwareSeekableRODIterator iterator) { - this.dataSource = dataSource; - this.iterator = iterator; - this.stream = new IntervalOverlappingRODsFromStream(dataSource.getName(), new PeekableIterator(iterator)); - } - - /** - * For testing - */ - public RMDDataState(final String name, final PeekableIterator iterator) { - this.dataSource = null; - this.iterator = null; - this.stream = new IntervalOverlappingRODsFromStream(name, new PeekableIterator(iterator)); - } - - public void close() { - if ( dataSource != null ) - dataSource.close( iterator ); + final GenomeLoc readSpan = genomeLocParser.createGenomeLoc(rec); + trimCurrentFeaturesToLoc(readSpan); + return getReferenceOrderedDataForInterval(readSpan); } } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedView.java index fa83dff82..85c20a6c3 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedView.java @@ -25,10 +25,9 @@ package org.broadinstitute.sting.gatk.datasources.providers; -import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.utils.GenomeLoc; public interface ReferenceOrderedView extends View { - RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc, ReferenceContext refContext ); + RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc ); } diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/RodLocusView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/RodLocusView.java index 3fb4c7352..1b6c14628 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/RodLocusView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/RodLocusView.java @@ -98,7 +98,8 @@ public class RodLocusView extends LocusView implements ReferenceOrderedView { rodQueue = new RODMergingIterator(iterators); } - public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc, ReferenceContext referenceContext ) { + @Override + public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc ) { // special case the interval again -- add it into the ROD if ( interval != null ) { allTracksHere.add(interval); } return new RefMetaDataTracker(allTracksHere); diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ActiveRegionShardBalancer.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ActiveRegionShardBalancer.java new file mode 100644 index 000000000..febdc788e --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ActiveRegionShardBalancer.java @@ -0,0 +1,85 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.datasources.reads; + +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; + +/** + * ActiveRegionShardBalancer + * + * Merges all of the file pointer information for a single contig index into a single + * combined shard. The purpose of doing this is to ensure that the HaplotypeCaller, which + * doesn't support TreeReduction by construction, gets all of the data on a single + * contig together so the the NanoSchedule runs efficiently + */ +public class ActiveRegionShardBalancer extends ShardBalancer { + /** + * Convert iterators of file pointers into balanced iterators of shards. + * @return An iterator over balanced shards. + */ + public Iterator iterator() { + return new Iterator() { + public boolean hasNext() { + return filePointers.hasNext(); + } + + public Shard next() { + FilePointer current = getCombinedFilePointersOnSingleContig(); + + // FilePointers have already been combined as necessary at the IntervalSharder level. No + // need to do so again here. + + return new LocusShard(parser,readsDataSource,current.getLocations(),current.fileSpans); + } + + public void remove() { + throw new UnsupportedOperationException("Unable to remove from shard balancing iterator"); + } + }; + } + + /** + * Combine all of the file pointers in the filePointers iterator into a single combined + * FilePointer that spans all of the file pointers on a single contig + * @return a non-null FilePointer + */ + private FilePointer getCombinedFilePointersOnSingleContig() { + FilePointer current = filePointers.next(); + + final List toCombine = new LinkedList<>(); + toCombine.add(current); + + while ( filePointers.hasNext() && + current.isRegionUnmapped == filePointers.peek().isRegionUnmapped && + (current.getContigIndex() == filePointers.peek().getContigIndex() || current.isRegionUnmapped) ) { + toCombine.add(filePointers.next()); + } + + return FilePointer.union(toCombine, parser); + } +} diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/FilePointer.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/FilePointer.java index 56bf5197d..517903da3 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/FilePointer.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/FilePointer.java @@ -407,10 +407,10 @@ public class FilePointer { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("FilePointer:%n"); + builder.append("FilePointer:\n"); builder.append("\tlocations = {"); builder.append(Utils.join(";",locations)); - builder.append("}%n\tregions = %n"); + builder.append("}\n\tregions = \n"); for(Map.Entry entry: fileSpans.entrySet()) { builder.append(entry.getKey()); builder.append("= {"); diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ReadShardBalancer.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ReadShardBalancer.java index 7772dbc1f..dc1b80efd 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ReadShardBalancer.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ReadShardBalancer.java @@ -177,7 +177,9 @@ public class ReadShardBalancer extends ShardBalancer { currentContigFilePointer = null; List nextContigFilePointers = new ArrayList(); - logger.info("Loading BAM index data for next contig"); + if ( filePointers.hasNext() ) { + logger.info("Loading BAM index data"); + } while ( filePointers.hasNext() ) { @@ -215,8 +217,8 @@ public class ReadShardBalancer extends ShardBalancer { } if ( currentContigFilePointer != null ) { - logger.info("Done loading BAM index data for next contig"); - logger.debug(String.format("Next contig FilePointer: %s", currentContigFilePointer)); + logger.info("Done loading BAM index data"); + logger.debug(String.format("Next FilePointer: %s", currentContigFilePointer)); } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java index 1223dd2af..a36667ec4 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java @@ -440,9 +440,8 @@ public class SAMDataSource { * @return Cumulative read metrics. */ public ReadMetrics getCumulativeReadMetrics() { - synchronized(readMetrics) { - return readMetrics.clone(); - } + // don't return a clone here because the engine uses a pointer to this object + return readMetrics; } /** @@ -450,9 +449,7 @@ public class SAMDataSource { * @param readMetrics The 'incremental' read metrics, to be incorporated into the cumulative metrics. */ public void incorporateReadMetrics(final ReadMetrics readMetrics) { - synchronized(this.readMetrics) { - this.readMetrics.incrementMetrics(readMetrics); - } + this.readMetrics.incrementMetrics(readMetrics); } public StingSAMIterator seek(Shard shard) { @@ -548,7 +545,10 @@ public class SAMDataSource { MergingSamRecordIterator mergingIterator = readers.createMergingIterator(iteratorMap); - return applyDecoratingIterators(shard.getReadMetrics(), + // The readMetrics object being passed in should be that of this dataSource and NOT the shard: the dataSource's + // metrics is intended to keep track of the reads seen (and hence passed to the CountingFilteringIterator when + // we apply the decorators), whereas the shard's metrics is used to keep track the "records" seen. + return applyDecoratingIterators(readMetrics, enableVerification, readProperties.useOriginalBaseQualities(), new ReleasingIterator(readers,StingSAMIteratorAdapter.adapt(mergingIterator)), @@ -625,12 +625,15 @@ public class SAMDataSource { byte defaultBaseQualities, boolean isLocusBasedTraversal ) { - // ************************************************************************************************ // - // * NOTE: ALL FILTERING/DOWNSAMPLING SHOULD BE DONE BEFORE ANY ITERATORS THAT MODIFY THE READS! * // - // * (otherwise we will process something that we may end up throwing away) * // - // ************************************************************************************************ // + // Always apply the ReadFormattingIterator before both ReadFilters and ReadTransformers. At a minimum, + // this will consolidate the cigar strings into canonical form. This has to be done before the read + // filtering, because not all read filters will behave correctly with things like zero-length cigar + // elements. If useOriginalBaseQualities is true or defaultBaseQualities >= 0, this iterator will also + // modify the base qualities. + wrappedIterator = new ReadFormattingIterator(wrappedIterator, useOriginalBaseQualities, defaultBaseQualities); - // Filters: + // Read Filters: these are applied BEFORE downsampling, so that we downsample within the set of reads + // that actually survive filtering. Otherwise we could get much less coverage than requested. wrappedIterator = StingSAMIteratorAdapter.adapt(new CountingFilteringIterator(readMetrics,wrappedIterator,supplementalFilters)); // Downsampling: @@ -654,11 +657,8 @@ public class SAMDataSource { if (!noValidationOfReadOrder && enableVerification) wrappedIterator = new VerifyingSamIterator(wrappedIterator); - if (useOriginalBaseQualities || defaultBaseQualities >= 0) - // only wrap if we are replacing the original qualities or using a default base quality - wrappedIterator = new ReadFormattingIterator(wrappedIterator, useOriginalBaseQualities, defaultBaseQualities); - - // set up read transformers + // Read transformers: these are applied last, so that we don't bother transforming reads that get discarded + // by the read filters or downsampler. for ( final ReadTransformer readTransformer : readTransformers ) { if ( readTransformer.enabled() && readTransformer.getApplicationTime() == ReadTransformer.ApplicationTime.ON_INPUT ) wrappedIterator = new ReadTransformingIterator(wrappedIterator, readTransformer); diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reference/ReferenceDataSource.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reference/ReferenceDataSource.java index 01edd44ba..edd3d324c 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/reference/ReferenceDataSource.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reference/ReferenceDataSource.java @@ -68,8 +68,8 @@ public class ReferenceDataSource { final File indexFile = new File(fastaFile.getAbsolutePath() + ".fai"); // determine the name for the dict file - final String fastaExt = fastaFile.getAbsolutePath().endsWith("fa") ? ".fa" : ".fasta"; - final File dictFile = new File(fastaFile.getAbsolutePath().replace(fastaExt, ".dict")); + final String fastaExt = fastaFile.getAbsolutePath().endsWith("fa") ? "\\.fa$" : "\\.fasta$"; + final File dictFile = new File(fastaFile.getAbsolutePath().replaceAll(fastaExt, ".dict")); // It's an error if either the fai or dict file does not exist. The user is now responsible // for creating these files. diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/Downsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/Downsampler.java index 23b16cff2..466ade1ed 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/Downsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/Downsampler.java @@ -25,19 +25,27 @@ package org.broadinstitute.sting.gatk.downsampling; +import org.broadinstitute.sting.utils.locusiterator.AlignmentStateMachine; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; + import java.util.Collection; import java.util.List; /** * The basic downsampler API, with no reads-specific operations. * - * Downsamplers that extend this interface rather than the ReadsDownsampler interface can handle + * Downsamplers that extend this class rather than the ReadsDownsampler class can handle * any kind of item, however they cannot be wrapped within a DownsamplingReadsIterator or a * PerSampleDownsamplingReadsIterator. * * @author David Roazen */ -public interface Downsampler { +public abstract class Downsampler { + + /** + * Number of items discarded by this downsampler since the last call to resetStats() + */ + protected int numDiscardedItems = 0; /** * Submit one item to the downsampler for consideration. Some downsamplers will be able to determine @@ -46,7 +54,7 @@ public interface Downsampler { * * @param item the individual item to submit to the downsampler for consideration */ - public void submit( T item ); + public abstract void submit( final T item ); /** * Submit a collection of items to the downsampler for consideration. Should be equivalent to calling @@ -54,21 +62,29 @@ public interface Downsampler { * * @param items the collection of items to submit to the downsampler for consideration */ - public void submit( Collection items ); + public void submit( final Collection items ) { + if ( items == null ) { + throw new IllegalArgumentException("submitted items must not be null"); + } + + for ( final T item : items ) { + submit(item); + } + } /** * Are there items that have survived the downsampling process waiting to be retrieved? * * @return true if this downsampler has > 0 finalized items, otherwise false */ - public boolean hasFinalizedItems(); + public abstract boolean hasFinalizedItems(); /** * Return (and *remove*) all items that have survived downsampling and are waiting to be retrieved. * * @return a list of all finalized items this downsampler contains, or an empty list if there are none */ - public List consumeFinalizedItems(); + public abstract List consumeFinalizedItems(); /** * Are there items stored in this downsampler that it doesn't yet know whether they will @@ -76,7 +92,7 @@ public interface Downsampler { * * @return true if this downsampler has > 0 pending items, otherwise false */ - public boolean hasPendingItems(); + public abstract boolean hasPendingItems(); /** * Peek at the first finalized item stored in this downsampler (or null if there are no finalized items) @@ -84,7 +100,7 @@ public interface Downsampler { * @return the first finalized item in this downsampler (the item is not removed from the downsampler by this call), * or null if there are none */ - public T peekFinalized(); + public abstract T peekFinalized(); /** * Peek at the first pending item stored in this downsampler (or null if there are no pending items) @@ -92,7 +108,7 @@ public interface Downsampler { * @return the first pending item stored in this downsampler (the item is not removed from the downsampler by this call), * or null if there are none */ - public T peekPending(); + public abstract T peekPending(); /** * Get the current number of items in this downsampler @@ -103,7 +119,7 @@ public interface Downsampler { * * @return a positive integer */ - public int size(); + public abstract int size(); /** * Returns the number of items discarded (so far) during the downsampling process @@ -111,21 +127,46 @@ public interface Downsampler { * @return the number of items that have been submitted to this downsampler and discarded in the process of * downsampling */ - public int getNumberOfDiscardedItems(); + public int getNumberOfDiscardedItems() { + return numDiscardedItems; + } /** * Used to tell the downsampler that no more items will be submitted to it, and that it should * finalize any pending items. */ - public void signalEndOfInput(); + public abstract void signalEndOfInput(); /** * Empty the downsampler of all finalized/pending items */ - public void clear(); + public abstract void clearItems(); /** * Reset stats in the downsampler such as the number of discarded items *without* clearing the downsampler of items */ - public void reset(); + public void resetStats() { + numDiscardedItems = 0; + } + + /** + * Indicates whether an item should be excluded from elimination during downsampling. By default, + * all items representing reduced reads are excluded from downsampling, but individual downsamplers + * may override if they are able to handle reduced reads correctly. Downsamplers should check + * the return value of this method before discarding an item. + * + * @param item The item to test + * @return true if the item should not be subject to elimination during downsampling, otherwise false + */ + protected boolean doNotDiscardItem( final Object item ) { + // Use getClass() rather than instanceof for performance reasons. Ugly but fast. + if ( item.getClass() == GATKSAMRecord.class ) { + return ((GATKSAMRecord)item).isReducedRead(); + } + else if ( item.getClass() == AlignmentStateMachine.class ) { + return ((AlignmentStateMachine)item).isReducedRead(); + } + + return false; + } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/DownsamplingMethod.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/DownsamplingMethod.java index 5aa27608d..8e92b1ff3 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/DownsamplingMethod.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/DownsamplingMethod.java @@ -61,20 +61,10 @@ public class DownsamplingMethod { public static final DownsampleType DEFAULT_DOWNSAMPLING_TYPE = DownsampleType.BY_SAMPLE; /** - * Default target coverage for locus-based traversals + * Don't allow dcov values below this threshold for locus-based traversals (ie., Locus + * and ActiveRegion walkers), as they can result in problematic downsampling artifacts */ - public static final int DEFAULT_LOCUS_TRAVERSAL_DOWNSAMPLING_COVERAGE = 1000; - - /** - * Default downsampling method for locus-based traversals - */ - public static final DownsamplingMethod DEFAULT_LOCUS_TRAVERSAL_DOWNSAMPLING_METHOD = - new DownsamplingMethod(DEFAULT_DOWNSAMPLING_TYPE, DEFAULT_LOCUS_TRAVERSAL_DOWNSAMPLING_COVERAGE, null); - - /** - * Default downsampling method for read-based traversals - */ - public static final DownsamplingMethod DEFAULT_READ_TRAVERSAL_DOWNSAMPLING_METHOD = NONE; + public static final int MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS = 200; public DownsamplingMethod( DownsampleType type, Integer toCoverage, Double toFraction ) { @@ -118,6 +108,16 @@ public class DownsamplingMethod { if ( isLocusTraversal && type == DownsampleType.ALL_READS && toCoverage != null ) { throw new UserException("Downsampling to coverage with the ALL_READS method for locus-based traversals (eg., LocusWalkers) is not currently supported (though it is supported for ReadWalkers)."); } + + // For locus traversals, ensure that the dcov value (if present) is not problematically low + if ( isLocusTraversal && type != DownsampleType.NONE && toCoverage != null && + toCoverage < MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS ) { + throw new UserException(String.format("Locus-based traversals (ie., Locus and ActiveRegion walkers) require " + + "a minimum -dcov value of %d when downsampling to coverage. Values less " + + "than this can produce problematic downsampling artifacts while providing " + + "only insignificant improvements in memory usage in most cases.", + MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS)); + } } public String toString() { @@ -139,13 +139,4 @@ public class DownsamplingMethod { return builder.toString(); } - - public static DownsamplingMethod getDefaultDownsamplingMethod( Walker walker ) { - if ( walker instanceof LocusWalker || walker instanceof ActiveRegionWalker ) { - return DEFAULT_LOCUS_TRAVERSAL_DOWNSAMPLING_METHOD; - } - else { - return DEFAULT_READ_TRAVERSAL_DOWNSAMPLING_METHOD; - } - } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/FractionalDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/FractionalDownsampler.java index 1cede9c33..c40f8019e 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/FractionalDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/FractionalDownsampler.java @@ -30,7 +30,6 @@ import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import java.util.ArrayList; -import java.util.Collection; import java.util.List; /** @@ -41,13 +40,11 @@ import java.util.List; * * @author David Roazen */ -public class FractionalDownsampler implements ReadsDownsampler { +public class FractionalDownsampler extends ReadsDownsampler { private ArrayList selectedReads; - private int cutoffForInclusion; - - private int numDiscardedItems; + private final int cutoffForInclusion; private static final int RANDOM_POOL_SIZE = 10000; @@ -57,18 +54,19 @@ public class FractionalDownsampler implements ReadsDownsamp * @param fraction Fraction of reads to preserve, between 0.0 (inclusive) and 1.0 (inclusive). * Actual number of reads preserved may differ randomly. */ - public FractionalDownsampler( double fraction ) { + public FractionalDownsampler( final double fraction ) { if ( fraction < 0.0 || fraction > 1.0 ) { throw new ReviewedStingException("Fraction of reads to include must be between 0.0 and 1.0, inclusive"); } cutoffForInclusion = (int)(fraction * RANDOM_POOL_SIZE); - clear(); - reset(); + clearItems(); + resetStats(); } - public void submit( T newRead ) { - if ( GenomeAnalysisEngine.getRandomGenerator().nextInt(10000) < cutoffForInclusion ) { + @Override + public void submit( final T newRead ) { + if ( GenomeAnalysisEngine.getRandomGenerator().nextInt(10000) < cutoffForInclusion || doNotDiscardItem(newRead) ) { selectedReads.add(newRead); } else { @@ -76,61 +74,56 @@ public class FractionalDownsampler implements ReadsDownsamp } } - public void submit( Collection newReads ) { - for ( T read : newReads ) { - submit(read); - } - } - + @Override public boolean hasFinalizedItems() { return selectedReads.size() > 0; } + @Override public List consumeFinalizedItems() { // pass by reference rather than make a copy, for speed List downsampledItems = selectedReads; - clear(); + clearItems(); return downsampledItems; } + @Override public boolean hasPendingItems() { return false; } + @Override public T peekFinalized() { return selectedReads.isEmpty() ? null : selectedReads.get(0); } + @Override public T peekPending() { return null; } - public int getNumberOfDiscardedItems() { - return numDiscardedItems; - } - @Override public int size() { return selectedReads.size(); } + @Override public void signalEndOfInput() { // NO-OP } - public void clear() { + @Override + public void clearItems() { selectedReads = new ArrayList(); } - public void reset() { - numDiscardedItems = 0; - } - + @Override public boolean requiresCoordinateSortOrder() { return false; } - public void signalNoMoreReadsBefore( T read ) { + @Override + public void signalNoMoreReadsBefore( final T read ) { // NO-OP } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/LevelingDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/LevelingDownsampler.java index 4ff729537..3ce4d09d6 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/LevelingDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/LevelingDownsampler.java @@ -46,16 +46,15 @@ import java.util.*; * * @author David Roazen */ -public class LevelingDownsampler, E> implements Downsampler { +public class LevelingDownsampler, E> extends Downsampler { private final int minElementsPerStack; + private final int targetSize; private List groups; private boolean groupsAreFinalized; - private int numDiscardedItems; - /** * Construct a LevelingDownsampler * @@ -65,7 +64,7 @@ public class LevelingDownsampler, E> implements Downsampler * this value -- if it does, items are removed from Lists evenly until the total size * is <= this value */ - public LevelingDownsampler( int targetSize ) { + public LevelingDownsampler( final int targetSize ) { this(targetSize, 1); } @@ -79,55 +78,58 @@ public class LevelingDownsampler, E> implements Downsampler * if a stack has only 3 elements and minElementsPerStack is 3, no matter what * we'll not reduce this stack below 3. */ - public LevelingDownsampler(final int targetSize, final int minElementsPerStack) { + public LevelingDownsampler( final int targetSize, final int minElementsPerStack ) { if ( targetSize < 0 ) throw new IllegalArgumentException("targetSize must be >= 0 but got " + targetSize); if ( minElementsPerStack < 0 ) throw new IllegalArgumentException("minElementsPerStack must be >= 0 but got " + minElementsPerStack); this.targetSize = targetSize; this.minElementsPerStack = minElementsPerStack; - clear(); - reset(); + clearItems(); + resetStats(); } - public void submit( T item ) { + @Override + public void submit( final T item ) { groups.add(item); } - public void submit( Collection items ){ + @Override + public void submit( final Collection items ){ groups.addAll(items); } + @Override public boolean hasFinalizedItems() { return groupsAreFinalized && groups.size() > 0; } + @Override public List consumeFinalizedItems() { if ( ! hasFinalizedItems() ) { return new ArrayList(); } // pass by reference rather than make a copy, for speed - List toReturn = groups; - clear(); + final List toReturn = groups; + clearItems(); return toReturn; } + @Override public boolean hasPendingItems() { return ! groupsAreFinalized && groups.size() > 0; } + @Override public T peekFinalized() { return hasFinalizedItems() ? groups.get(0) : null; } + @Override public T peekPending() { return hasPendingItems() ? groups.get(0) : null; } - public int getNumberOfDiscardedItems() { - return numDiscardedItems; - } - @Override public int size() { int s = 0; @@ -137,26 +139,24 @@ public class LevelingDownsampler, E> implements Downsampler return s; } + @Override public void signalEndOfInput() { levelGroups(); groupsAreFinalized = true; } - public void clear() { + @Override + public void clearItems() { groups = new ArrayList(); groupsAreFinalized = false; } - public void reset() { - numDiscardedItems = 0; - } - private void levelGroups() { + final int[] groupSizes = new int[groups.size()]; int totalSize = 0; - int[] groupSizes = new int[groups.size()]; int currentGroupIndex = 0; - for ( T group : groups ) { + for ( final T group : groups ) { groupSizes[currentGroupIndex] = group.size(); totalSize += groupSizes[currentGroupIndex]; currentGroupIndex++; @@ -191,20 +191,18 @@ public class LevelingDownsampler, E> implements Downsampler // Now we actually go through and reduce each group to its new count as specified in groupSizes currentGroupIndex = 0; - for ( T group : groups ) { + for ( final T group : groups ) { downsampleOneGroup(group, groupSizes[currentGroupIndex]); currentGroupIndex++; } } - private void downsampleOneGroup( T group, int numItemsToKeep ) { + private void downsampleOneGroup( final T group, final int numItemsToKeep ) { if ( numItemsToKeep >= group.size() ) { return; } - numDiscardedItems += group.size() - numItemsToKeep; - - BitSet itemsToKeep = new BitSet(group.size()); + final BitSet itemsToKeep = new BitSet(group.size()); for ( Integer selectedIndex : MathUtils.sampleIndicesWithoutReplacement(group.size(), numItemsToKeep) ) { itemsToKeep.set(selectedIndex); } @@ -213,12 +211,13 @@ public class LevelingDownsampler, E> implements Downsampler // If our group is a linked list, we can remove the desired items in a single O(n) pass with an iterator if ( group instanceof LinkedList ) { - Iterator iter = group.iterator(); + final Iterator iter = group.iterator(); while ( iter.hasNext() ) { - iter.next(); + final E item = iter.next(); - if ( ! itemsToKeep.get(currentIndex) ) { + if ( ! itemsToKeep.get(currentIndex) && ! doNotDiscardItem(item) ) { iter.remove(); + numDiscardedItems++; } currentIndex++; @@ -227,14 +226,15 @@ public class LevelingDownsampler, E> implements Downsampler // If it's not a linked list, it's more efficient to copy the desired items into a new list and back rather // than suffer O(n^2) of item shifting else { - List keptItems = new ArrayList(numItemsToKeep); + final List keptItems = new ArrayList(group.size()); - for ( E item : group ) { - if ( itemsToKeep.get(currentIndex) ) { + for ( final E item : group ) { + if ( itemsToKeep.get(currentIndex) || doNotDiscardItem(item) ) { keptItems.add(item); } currentIndex++; } + numDiscardedItems += group.size() - keptItems.size(); group.clear(); group.addAll(keptItems); } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java index 3aaed6c73..1eabf5038 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java @@ -27,7 +27,6 @@ package org.broadinstitute.sting.gatk.downsampling; import net.sf.samtools.SAMRecord; -import java.util.Collection; import java.util.LinkedList; import java.util.List; @@ -39,25 +38,21 @@ import java.util.List; * * @author David Roazen */ -public class PassThroughDownsampler implements ReadsDownsampler { +public class PassThroughDownsampler extends ReadsDownsampler { private LinkedList selectedReads; public PassThroughDownsampler() { - clear(); + clearItems(); } + @Override public void submit( T newRead ) { // All reads pass-through, no reads get downsampled selectedReads.add(newRead); } - public void submit( Collection newReads ) { - for ( T read : newReads ) { - submit(read); - } - } - + @Override public boolean hasFinalizedItems() { return ! selectedReads.isEmpty(); } @@ -66,50 +61,50 @@ public class PassThroughDownsampler implements ReadsDownsam * Note that this list is a linked list and so doesn't support fast random access * @return */ + @Override public List consumeFinalizedItems() { // pass by reference rather than make a copy, for speed - List downsampledItems = selectedReads; - clear(); + final List downsampledItems = selectedReads; + clearItems(); return downsampledItems; } + @Override public boolean hasPendingItems() { return false; } + @Override public T peekFinalized() { return selectedReads.isEmpty() ? null : selectedReads.getFirst(); } + @Override public T peekPending() { return null; } - public int getNumberOfDiscardedItems() { - return 0; - } - @Override public int size() { return selectedReads.size(); } + @Override public void signalEndOfInput() { // NO-OP } - public void clear() { + @Override + public void clearItems() { selectedReads = new LinkedList(); } - public void reset() { - // NO-OP - } - + @Override public boolean requiresCoordinateSortOrder() { return false; } + @Override public void signalNoMoreReadsBefore( T read ) { // NO-OP } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReadsDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReadsDownsampler.java index a878d7553..a8df014e5 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReadsDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReadsDownsampler.java @@ -32,14 +32,14 @@ import net.sf.samtools.SAMRecord; * * @author David Roazen */ -public interface ReadsDownsampler extends Downsampler { +public abstract class ReadsDownsampler extends Downsampler { /** * Does this downsampler require that reads be fed to it in coordinate order? * * @return true if reads must be submitted to this downsampler in coordinate order, otherwise false */ - public boolean requiresCoordinateSortOrder(); + public abstract boolean requiresCoordinateSortOrder(); /** * Tell this downsampler that no more reads located before the provided read (according to @@ -52,5 +52,5 @@ public interface ReadsDownsampler extends Downsampler { * @param read the downsampler will assume that no reads located before this read will ever * be submitted to it in the future */ - public void signalNoMoreReadsBefore( T read ); + public abstract void signalNoMoreReadsBefore( final T read ); } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java index 0e6bbfcb6..ff085d17b 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java @@ -39,7 +39,12 @@ import java.util.*; * * @author David Roazen */ -public class ReservoirDownsampler implements ReadsDownsampler { +public class ReservoirDownsampler extends ReadsDownsampler { + + /** + * size of our reservoir -- ie., the maximum number of reads from the stream that will be retained + * (not including any undiscardable items) + */ private final int targetSampleSize; /** @@ -58,17 +63,33 @@ public class ReservoirDownsampler implements ReadsDownsampl */ private List reservoir; + /** + * Certain items (eg., reduced reads) cannot be discarded at all during downsampling. We store + * these items separately so as not to impact the fair selection of items for inclusion in the + * reservoir. These items are returned (and cleared) along with any items in the reservoir in + * calls to consumeFinalizedItems(). + */ + private List undiscardableItems; + + /** + * Are we currently using a linked list for the reservoir? + */ private boolean isLinkedList; - private int totalReadsSeen; + /** + * Count of the number of reads seen that were actually eligible for discarding. Used by the reservoir downsampling + * algorithm to ensure that all discardable reads have an equal chance of making it into the reservoir. + */ + private int totalDiscardableReadsSeen; - private int numDiscardedItems; /** * Construct a ReservoirDownsampler * * @param targetSampleSize Size of the reservoir used by this downsampler. Number of items retained - * after downsampling will be min(totalReads, targetSampleSize) + * after downsampling will be min(totalDiscardableReads, targetSampleSize) + any + * undiscardable reads (eg., reduced reads). + * * @param expectFewOverflows if true, this downsampler will be optimized for the case * where most of the time we won't fill up anything like the * targetSampleSize elements. If this is false, we will allocate @@ -76,15 +97,15 @@ public class ReservoirDownsampler implements ReadsDownsampl * the cost of allocation if we often use targetSampleSize or more * elements. */ - public ReservoirDownsampler ( final int targetSampleSize, final boolean expectFewOverflows) { + public ReservoirDownsampler ( final int targetSampleSize, final boolean expectFewOverflows ) { if ( targetSampleSize <= 0 ) { throw new ReviewedStingException("Cannot do reservoir downsampling with a sample size <= 0"); } this.targetSampleSize = targetSampleSize; this.expectFewOverflows = expectFewOverflows; - clear(); - reset(); + clearItems(); + resetStats(); } /** @@ -93,15 +114,21 @@ public class ReservoirDownsampler implements ReadsDownsampl * @param targetSampleSize Size of the reservoir used by this downsampler. Number of items retained * after downsampling will be min(totalReads, targetSampleSize) */ - public ReservoirDownsampler ( int targetSampleSize ) { + public ReservoirDownsampler ( final int targetSampleSize ) { this(targetSampleSize, false); } + @Override + public void submit ( final T newRead ) { + if ( doNotDiscardItem(newRead) ) { + undiscardableItems.add(newRead); + return; + } - public void submit ( T newRead ) { - totalReadsSeen++; + // Only count reads that are actually eligible for discarding for the purposes of the reservoir downsampling algorithm + totalDiscardableReadsSeen++; - if ( totalReadsSeen <= targetSampleSize ) { + if ( totalDiscardableReadsSeen <= targetSampleSize ) { reservoir.add(newRead); } else { @@ -110,7 +137,7 @@ public class ReservoirDownsampler implements ReadsDownsampl isLinkedList = false; } - final int randomSlot = GenomeAnalysisEngine.getRandomGenerator().nextInt(totalReadsSeen); + final int randomSlot = GenomeAnalysisEngine.getRandomGenerator().nextInt(totalDiscardableReadsSeen); if ( randomSlot < targetSampleSize ) { reservoir.set(randomSlot, newRead); } @@ -118,49 +145,46 @@ public class ReservoirDownsampler implements ReadsDownsampl } } - public void submit ( Collection newReads ) { - for ( T read : newReads ) { - submit(read); - } - } - + @Override public boolean hasFinalizedItems() { - return reservoir.size() > 0; + return ! reservoir.isEmpty() || ! undiscardableItems.isEmpty(); } + @Override public List consumeFinalizedItems() { - if ( reservoir.isEmpty() ) { - // if there's nothing here, don't both allocating a new list completely + if ( ! hasFinalizedItems() ) { + // if there's nothing here, don't bother allocating a new list return Collections.emptyList(); } else { - // pass by reference rather than make a copy, for speed - List downsampledItems = reservoir; - clear(); + // pass reservoir by reference rather than make a copy, for speed + final List downsampledItems = reservoir; + downsampledItems.addAll(undiscardableItems); + clearItems(); return downsampledItems; } } + @Override public boolean hasPendingItems() { return false; } + @Override public T peekFinalized() { - return reservoir.isEmpty() ? null : reservoir.get(0); + return ! reservoir.isEmpty() ? reservoir.get(0) : (! undiscardableItems.isEmpty() ? undiscardableItems.get(0) : null); } + @Override public T peekPending() { return null; } - public int getNumberOfDiscardedItems() { - return numDiscardedItems; + @Override + public int size() { + return reservoir.size() + undiscardableItems.size(); } @Override - public int size() { - return reservoir.size(); - } - public void signalEndOfInput() { // NO-OP } @@ -168,25 +192,27 @@ public class ReservoirDownsampler implements ReadsDownsampl /** * Clear the data structures used to hold information */ - public void clear() { + @Override + public void clearItems() { // if we aren't expecting many overflows, allocate a linked list not an arraylist reservoir = expectFewOverflows ? new LinkedList() : new ArrayList(targetSampleSize); + // there's no possibility of overflow with the undiscardable items, so we always use a linked list for them + undiscardableItems = new LinkedList<>(); + // it's a linked list if we allocate one isLinkedList = expectFewOverflows; - // an internal stat used by the downsampling process, so not cleared by reset() below - totalReadsSeen = 0; - } - - public void reset() { - numDiscardedItems = 0; + // an internal stat used by the downsampling process, so not cleared by resetStats() below + totalDiscardableReadsSeen = 0; } + @Override public boolean requiresCoordinateSortOrder() { return false; } + @Override public void signalNoMoreReadsBefore( T read ) { // NO-OP } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsampler.java index 7c6c043c2..897e2c05e 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsampler.java @@ -35,11 +35,11 @@ import java.util.*; * * @author David Roazen */ -public class SimplePositionalDownsampler implements ReadsDownsampler { +public class SimplePositionalDownsampler extends ReadsDownsampler { - private int targetCoverage; + private final int targetCoverage; - private ReservoirDownsampler reservoir; + private final ReservoirDownsampler reservoir; private int currentContigIndex; @@ -51,97 +51,93 @@ public class SimplePositionalDownsampler implements ReadsDo private ArrayList finalizedReads; - private int numDiscardedItems; /** * Construct a SimplePositionalDownsampler * * @param targetCoverage Maximum number of reads that may share any given alignment start position */ - public SimplePositionalDownsampler( int targetCoverage ) { + public SimplePositionalDownsampler( final int targetCoverage ) { this.targetCoverage = targetCoverage; reservoir = new ReservoirDownsampler(targetCoverage); finalizedReads = new ArrayList(); - clear(); - reset(); + clearItems(); + resetStats(); } - public void submit( T newRead ) { + @Override + public void submit( final T newRead ) { updatePositionalState(newRead); if ( unmappedReadsReached ) { // don't downsample the unmapped reads at the end of the stream finalizedReads.add(newRead); } else { - int reservoirPreviouslyDiscardedItems = reservoir.getNumberOfDiscardedItems(); + final int reservoirPreviouslyDiscardedItems = reservoir.getNumberOfDiscardedItems(); + // our reservoir downsampler will call doNotDiscardItem() for us to exclude items from elimination as appropriate reservoir.submit(newRead); numDiscardedItems += reservoir.getNumberOfDiscardedItems() - reservoirPreviouslyDiscardedItems; } } - public void submit( Collection newReads ) { - for ( T read : newReads ) { - submit(read); - } - } - + @Override public boolean hasFinalizedItems() { return finalizedReads.size() > 0; } + @Override public List consumeFinalizedItems() { // pass by reference rather than make a copy, for speed - List toReturn = finalizedReads; + final List toReturn = finalizedReads; finalizedReads = new ArrayList(); return toReturn; } + @Override public boolean hasPendingItems() { return reservoir.hasFinalizedItems(); } + @Override public T peekFinalized() { return finalizedReads.isEmpty() ? null : finalizedReads.get(0); } + @Override public T peekPending() { return reservoir.peekFinalized(); } - public int getNumberOfDiscardedItems() { - return numDiscardedItems; - } - @Override public int size() { return finalizedReads.size() + reservoir.size(); } + @Override public void signalEndOfInput() { finalizeReservoir(); } - public void clear() { - reservoir.clear(); - reservoir.reset(); + @Override + public void clearItems() { + reservoir.clearItems(); + reservoir.resetStats(); finalizedReads.clear(); positionEstablished = false; unmappedReadsReached = false; } - public void reset() { - numDiscardedItems = 0; - } - + @Override public boolean requiresCoordinateSortOrder() { return true; } - public void signalNoMoreReadsBefore( T read ) { + @Override + public void signalNoMoreReadsBefore( final T read ) { updatePositionalState(read); } - private void updatePositionalState( T newRead ) { + private void updatePositionalState( final T newRead ) { if ( readIsPastCurrentPosition(newRead) ) { if ( reservoir.hasFinalizedItems() ) { finalizeReservoir(); @@ -155,13 +151,13 @@ public class SimplePositionalDownsampler implements ReadsDo } } - private void setCurrentPosition( T read ) { + private void setCurrentPosition( final T read ) { currentContigIndex = read.getReferenceIndex(); currentAlignmentStart = read.getAlignmentStart(); positionEstablished = true; } - private boolean readIsPastCurrentPosition( T read ) { + private boolean readIsPastCurrentPosition( final T read ) { return ! positionEstablished || read.getReferenceIndex() > currentContigIndex || read.getAlignmentStart() > currentAlignmentStart || @@ -170,6 +166,6 @@ public class SimplePositionalDownsampler implements ReadsDo private void finalizeReservoir() { finalizedReads.addAll(reservoir.consumeFinalizedItems()); - reservoir.reset(); + reservoir.resetStats(); } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java b/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java index 415049228..dc46849df 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java @@ -37,7 +37,6 @@ import org.broadinstitute.sting.gatk.io.DirectOutputTracker; import org.broadinstitute.sting.gatk.io.OutputTracker; import org.broadinstitute.sting.gatk.resourcemanagement.ThreadAllocation; import org.broadinstitute.sting.gatk.traversals.TraversalEngine; -import org.broadinstitute.sting.gatk.traversals.TraverseActiveRegions; import org.broadinstitute.sting.gatk.walkers.Walker; import org.broadinstitute.sting.utils.SampleUtils; import org.broadinstitute.sting.utils.threading.ThreadEfficiencyMonitor; @@ -114,12 +113,6 @@ public class LinearMicroScheduler extends MicroScheduler { done = walker.isDone(); } - // Special function call to empty out the work queue. Ugly for now but will be cleaned up when we eventually push this functionality more into the engine - if( traversalEngine instanceof TraverseActiveRegions) { - final Object result = ((TraverseActiveRegions) traversalEngine).endTraversal(walker, accumulator.getReduceInit()); - accumulator.accumulate(null, result); // Assumes only used with StandardAccumulator - } - Object result = accumulator.finishTraversal(); outputTracker.close(); diff --git a/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java b/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java index dc9dfd77e..7077db49c 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java @@ -52,7 +52,6 @@ import javax.management.ObjectName; import java.io.File; import java.lang.management.ManagementFactory; import java.util.*; -import java.util.concurrent.TimeUnit; /** @@ -120,8 +119,6 @@ public abstract class MicroScheduler implements MicroSchedulerMBean { */ ThreadEfficiencyMonitor threadEfficiencyMonitor = null; - final ProgressMeter progressMeter; - /** * MicroScheduler factory function. Create a microscheduler appropriate for reducing the * selected walker. @@ -146,8 +143,6 @@ public abstract class MicroScheduler implements MicroSchedulerMBean { logger.warn(String.format("Number of requested GATK threads %d is more than the number of " + "available processors on this machine %d", threadAllocation.getTotalNumThreads(), Runtime.getRuntime().availableProcessors())); -// if ( threadAllocation.getNumDataThreads() > 1 && threadAllocation.getNumCPUThreadsPerDataThread() > 1) -// throw new UserException("The GATK currently doesn't support running with both -nt > 1 and -nct > 1"); } if ( threadAllocation.getNumDataThreads() > 1 ) { @@ -206,14 +201,14 @@ public abstract class MicroScheduler implements MicroSchedulerMBean { availableTraversalEngines.add(traversalEngine); } - // Create our progress meter - this.progressMeter = new ProgressMeter(progressLogFile, + // Create the progress meter, and register it with the analysis engine + engine.registerProgressMeter(new ProgressMeter(progressLogFile, availableTraversalEngines.peek().getTraversalUnits(), - engine.getRegionsOfGenomeBeingProcessed()); + engine.getRegionsOfGenomeBeingProcessed())); // Now that we have a progress meter, go through and initialize the traversal engines for ( final TraversalEngine traversalEngine : allCreatedTraversalEngines ) - traversalEngine.initialize(engine, walker, progressMeter); + traversalEngine.initialize(engine, walker, engine.getProgressMeter()); // JMX does not allow multiple instances with the same ObjectName to be registered with the same platform MXBean. // To get around this limitation and since we have no job identifier at this point, register a simple counter that @@ -245,7 +240,7 @@ public abstract class MicroScheduler implements MicroSchedulerMBean { } else if (walker instanceof ReadPairWalker) { return new TraverseReadPairs(); } else if (walker instanceof ActiveRegionWalker) { - return new TraverseActiveRegions(); + return new TraverseActiveRegions(threadAllocation.getNumCPUThreadsPerDataThread()); } else { throw new UnsupportedOperationException("Unable to determine traversal type, the walker is an unknown type."); } @@ -282,7 +277,7 @@ public abstract class MicroScheduler implements MicroSchedulerMBean { * @return true if we should abort execution, or false otherwise */ protected boolean abortExecution() { - final boolean abort = engine.exceedsRuntimeLimit(progressMeter.getRuntimeInNanoseconds(), TimeUnit.NANOSECONDS); + final boolean abort = engine.exceedsRuntimeLimit(); if ( abort ) { final AutoFormattingTime aft = new AutoFormattingTime(engine.getRuntimeLimitInNanoseconds(), -1, 4); logger.info("Aborting execution (cleanly) because the runtime has exceeded the requested maximum " + aft); @@ -308,7 +303,7 @@ public abstract class MicroScheduler implements MicroSchedulerMBean { * Currently only starts the progress meter timer running, but other start up activities could be incorporated */ protected void startingExecution() { - progressMeter.start(); + engine.getProgressMeter().start(); } /** @@ -330,7 +325,7 @@ public abstract class MicroScheduler implements MicroSchedulerMBean { * Must be called by subclasses when execute is done */ protected void executionIsDone() { - progressMeter.notifyDone(engine.getCumulativeMetrics().getNumIterations()); + engine.getProgressMeter().notifyDone(engine.getCumulativeMetrics().getNumIterations()); printReadFilteringStats(); shutdownTraversalEngines(); @@ -347,12 +342,6 @@ public abstract class MicroScheduler implements MicroSchedulerMBean { * pointers to the traversal engines */ public synchronized void shutdownTraversalEngines() { - // no longer applicable because engines are allocated to keys now -// if ( availableTraversalEngines.size() != allCreatedTraversalEngines.size() ) -// throw new IllegalStateException("Shutting down TraversalEngineCreator but not all engines " + -// "have been returned. Expected " + allCreatedTraversalEngines.size() + " but only " + availableTraversalEngines.size() -// + " have been returned"); - for ( final TraversalEngine te : allCreatedTraversalEngines) te.shutdown(); @@ -378,7 +367,7 @@ public abstract class MicroScheduler implements MicroSchedulerMBean { for ( final long countsByFilter : cumulativeMetrics.getCountsByFilter().values()) nSkippedReads += countsByFilter; - logger.info(String.format("%d reads were filtered out during traversal out of %d total (%.2f%%)", + logger.info(String.format("%d reads were filtered out during the traversal out of approximately %d total reads (%.2f%%)", nSkippedReads, cumulativeMetrics.getNumReadsSeen(), 100.0 * MathUtils.ratio(nSkippedReads, cumulativeMetrics.getNumReadsSeen()))); diff --git a/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java b/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java index 3e50632d9..1942fc19a 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java +++ b/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java @@ -1,28 +1,28 @@ -/* -* Copyright (c) 2012 The Broad Institute -* -* Permission is hereby granted, free of charge, to any person -* obtaining a copy of this software and associated documentation -* files (the "Software"), to deal in the Software without -* restriction, including without limitation the rights to use, -* copy, modify, merge, publish, distribute, sublicense, and/or sell -* copies of the Software, and to permit persons to whom the -* Software is furnished to do so, subject to the following -* conditions: -* -* The above copyright notice and this permission notice shall be -* included in all copies or substantial portions of the Software. -* -* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR -* THE USE OR OTHER DEALINGS IN THE SOFTWARE. -*/ - +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + package org.broadinstitute.sting.gatk.filters; import net.sf.picard.filter.SamRecordFilter; @@ -31,9 +31,7 @@ import net.sf.samtools.util.CloseableIterator; import net.sf.samtools.util.CloserUtil; import org.broadinstitute.sting.gatk.ReadMetrics; -import java.util.Collection; -import java.util.Iterator; -import java.util.NoSuchElementException; +import java.util.*; /** * Filtering Iterator which takes a filter and an iterator and iterates @@ -41,11 +39,30 @@ import java.util.NoSuchElementException; * @author Mark DePristo */ public class CountingFilteringIterator implements CloseableIterator { - private final ReadMetrics runtimeMetrics; + private final ReadMetrics globalRuntimeMetrics; + private final ReadMetrics privateRuntimeMetrics; private final Iterator iterator; - private final Collection filters; + private final List filters = new ArrayList<>(); private SAMRecord next = null; + // wrapper around ReadFilters to count the number of filtered reads + private final class CountingReadFilter extends ReadFilter { + protected final ReadFilter readFilter; + protected long counter = 0L; + + public CountingReadFilter(final ReadFilter readFilter) { + this.readFilter = readFilter; + } + + @Override + public boolean filterOut(final SAMRecord record) { + final boolean result = readFilter.filterOut(record); + if ( result ) + counter++; + return result; + } + } + /** * Constructor * @@ -54,9 +71,11 @@ public class CountingFilteringIterator implements CloseableIterator { * @param filters the filter (which may be a FilterAggregator) */ public CountingFilteringIterator(ReadMetrics metrics, Iterator iterator, Collection filters) { - this.runtimeMetrics = metrics; + this.globalRuntimeMetrics = metrics; + privateRuntimeMetrics = new ReadMetrics(); this.iterator = iterator; - this.filters = filters; + for ( final ReadFilter filter : filters ) + this.filters.add(new CountingReadFilter(filter)); next = getNextRecord(); } @@ -95,6 +114,11 @@ public class CountingFilteringIterator implements CloseableIterator { public void close() { CloserUtil.close(iterator); + + // update the global metrics with all the data we collected here + globalRuntimeMetrics.incrementMetrics(privateRuntimeMetrics); + for ( final CountingReadFilter filter : filters ) + globalRuntimeMetrics.setFilterCount(filter.readFilter.getClass().getSimpleName(), filter.counter); } /** @@ -105,12 +129,14 @@ public class CountingFilteringIterator implements CloseableIterator { private SAMRecord getNextRecord() { while (iterator.hasNext()) { SAMRecord record = iterator.next(); - runtimeMetrics.incrementNumReadsSeen(); + + // update only the private copy of the metrics so that we don't need to worry about race conditions + // that can arise when trying to update the global copy; it was agreed that this is the cleanest solution. + privateRuntimeMetrics.incrementNumReadsSeen(); boolean filtered = false; for(SamRecordFilter filter: filters) { if(filter.filterOut(record)) { - runtimeMetrics.incrementFilter(filter); filtered = true; break; } diff --git a/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java b/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java index f7d1d0297..3167ba139 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java +++ b/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java @@ -25,14 +25,16 @@ package org.broadinstitute.sting.gatk.filters; -import net.sf.samtools.SAMFileHeader; -import net.sf.samtools.SAMRecord; -import net.sf.samtools.SAMSequenceRecord; -import net.sf.samtools.SAMTagUtil; +import net.sf.samtools.*; import org.broadinstitute.sting.commandline.Argument; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.gatk.ReadProperties; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.broadinstitute.sting.gatk.datasources.reads.SAMDataSource; import org.broadinstitute.sting.utils.exceptions.UserException; +import java.util.Collections; + /** * Filter out malformed reads. * @@ -40,20 +42,46 @@ import org.broadinstitute.sting.utils.exceptions.UserException; * @version 0.1 */ public class MalformedReadFilter extends ReadFilter { + + + private static final String FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME = "filter_reads_with_N_cigar" ; + private SAMFileHeader header; + @Argument(fullName = FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME, shortName = "filterRNC", doc = "filter out reads with CIGAR containing the N operator, instead of stop processing and report an error.", required = false) + boolean filterReadsWithNCigar = false; + + @Argument(fullName = "filter_mismatching_base_and_quals", shortName = "filterMBQ", doc = "if a read has mismatching number of bases and base qualities, filter out the read instead of blowing up.", required = false) boolean filterMismatchingBaseAndQuals = false; @Argument(fullName = "filter_bases_not_stored", shortName = "filterNoBases", doc = "if a read has no stored bases (i.e. a '*'), filter out the read instead of blowing up.", required = false) boolean filterBasesNotStored = false; + /** + * Indicates the applicable validation exclusions + */ + private boolean allowNCigars; + @Override - public void initialize(GenomeAnalysisEngine engine) { - this.header = engine.getSAMFileHeader(); + public void initialize(final GenomeAnalysisEngine engine) { + header = engine.getSAMFileHeader(); + ValidationExclusion validationExclusions = null; + final SAMDataSource rds = engine.getReadsDataSource(); + if (rds != null) { + final ReadProperties rps = rds.getReadsInfo(); + if (rps != null) { + validationExclusions = rps.getValidationExclusionList(); + } + } + if (validationExclusions == null) { + allowNCigars = false; + } else { + allowNCigars = validationExclusions.contains(ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS); + } } - public boolean filterOut(SAMRecord read) { + public boolean filterOut(final SAMRecord read) { // slowly changing the behavior to blow up first and filtering out if a parameter is explicitly provided return !checkInvalidAlignmentStart(read) || !checkInvalidAlignmentEnd(read) || @@ -61,7 +89,8 @@ public class MalformedReadFilter extends ReadFilter { !checkHasReadGroup(read) || !checkMismatchingBasesAndQuals(read, filterMismatchingBaseAndQuals) || !checkCigarDisagreesWithAlignment(read) || - !checkSeqStored(read, filterBasesNotStored); + !checkSeqStored(read, filterBasesNotStored) || + !checkCigarIsSupported(read,filterReadsWithNCigar,allowNCigars); } private static boolean checkHasReadGroup(final SAMRecord read) { @@ -80,7 +109,7 @@ public class MalformedReadFilter extends ReadFilter { * @param read The read to validate. * @return true if read start is valid, false otherwise. */ - private static boolean checkInvalidAlignmentStart( SAMRecord read ) { + private static boolean checkInvalidAlignmentStart(final SAMRecord read ) { // read is not flagged as 'unmapped', but alignment start is NO_ALIGNMENT_START if( !read.getReadUnmappedFlag() && read.getAlignmentStart() == SAMRecord.NO_ALIGNMENT_START ) return false; @@ -95,7 +124,7 @@ public class MalformedReadFilter extends ReadFilter { * @param read The read to validate. * @return true if read end is valid, false otherwise. */ - private static boolean checkInvalidAlignmentEnd( SAMRecord read ) { + private static boolean checkInvalidAlignmentEnd(final SAMRecord read ) { // Alignment aligns to negative number of bases in the reference. if( !read.getReadUnmappedFlag() && read.getAlignmentEnd() != -1 && (read.getAlignmentEnd()-read.getAlignmentStart()+1)<0 ) return false; @@ -108,11 +137,11 @@ public class MalformedReadFilter extends ReadFilter { * @param read The read to verify. * @return true if alignment agrees with header, false othrewise. */ - private static boolean checkAlignmentDisagreesWithHeader( SAMFileHeader header, SAMRecord read ) { + private static boolean checkAlignmentDisagreesWithHeader(final SAMFileHeader header, final SAMRecord read ) { // Read is aligned to nonexistent contig if( read.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && read.getAlignmentStart() != SAMRecord.NO_ALIGNMENT_START ) return false; - SAMSequenceRecord contigHeader = header.getSequence( read.getReferenceIndex() ); + final SAMSequenceRecord contigHeader = header.getSequence( read.getReferenceIndex() ); // Read is aligned to a point after the end of the contig if( !read.getReadUnmappedFlag() && read.getAlignmentStart() > contigHeader.getSequenceLength() ) return false; @@ -124,7 +153,7 @@ public class MalformedReadFilter extends ReadFilter { * @param read The read to validate. * @return true if cigar agrees with alignment, false otherwise. */ - private static boolean checkCigarDisagreesWithAlignment(SAMRecord read) { + private static boolean checkCigarDisagreesWithAlignment(final SAMRecord read) { // Read has a valid alignment start, but the CIGAR string is empty if( !read.getReadUnmappedFlag() && read.getAlignmentStart() != -1 && @@ -134,19 +163,81 @@ public class MalformedReadFilter extends ReadFilter { return true; } + /** + * Check for unsupported CIGAR operators. + * Currently the N operator is not supported. + * @param read The read to validate. + * @param filterReadsWithNCigar whether the offending read should just + * be silently filtered or not. + * @param allowNCigars whether reads that contain N operators in their CIGARs + * can be processed or an exception should be thrown instead. + * @throws UserException.UnsupportedCigarOperatorException + * if {@link #filterReadsWithNCigar} is false and + * the input read has some unsupported operation. + * @return true if the read CIGAR operations are + * fully supported, otherwise false, as long as + * no exception has been thrown. + */ + private static boolean checkCigarIsSupported(final SAMRecord read, final boolean filterReadsWithNCigar, final boolean allowNCigars) { + if( containsNOperator(read)) { + if (! filterReadsWithNCigar && !allowNCigars) { + throw new UserException.UnsupportedCigarOperatorException( + CigarOperator.N,read, + "Perhaps you are" + + " trying to use RNA-Seq data?" + + " While we are currently actively working to" + + " support this data type unfortunately the" + + " GATK cannot be used with this data in its" + + " current form. You have the option of either" + + " filtering out all reads with operator " + + CigarOperator.N + " in their CIGAR string" + + " (please add --" + + FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME + + " to your command line) or" + + " assume the risk of processing those reads as they" + + " are including the pertinent unsafe flag (please add -U" + + ' ' + ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS + + " to your command line). Notice however that if you were" + + " to choose the latter, an unspecified subset of the" + + " analytical outputs of an unspecified subset of the tools" + + " will become unpredictable. Consequently the GATK team" + + " might well not be able to provide you with the usual support" + + " with any issue regarding any output"); + } + return ! filterReadsWithNCigar; + } + return true; + } + + private static boolean containsNOperator(final SAMRecord read) { + final Cigar cigar = read.getCigar(); + if (cigar == null) { + return false; + } + for (final CigarElement ce : cigar.getCigarElements()) { + if (ce.getOperator() == CigarOperator.N) { + return true; + } + } + return false; + } + /** * Check if the read has the same number of bases and base qualities * @param read the read to validate * @return true if they have the same number. False otherwise. */ - private static boolean checkMismatchingBasesAndQuals(SAMRecord read, boolean filterMismatchingBaseAndQuals) { - boolean result; + private static boolean checkMismatchingBasesAndQuals(final SAMRecord read, final boolean filterMismatchingBaseAndQuals) { + final boolean result; if (read.getReadLength() == read.getBaseQualities().length) result = true; else if (filterMismatchingBaseAndQuals) result = false; else - throw new UserException.MalformedBAM(read, String.format("BAM file has a read with mismatching number of bases and base qualities. Offender: %s [%d bases] [%d quals]", read.getReadName(), read.getReadLength(), read.getBaseQualities().length)); + throw new UserException.MalformedBAM(read, + String.format("BAM file has a read with mismatching number of bases and base qualities. Offender: %s [%d bases] [%d quals].%s", + read.getReadName(), read.getReadLength(), read.getBaseQualities().length, + read.getBaseQualities().length == 0 ? " You can use --defaultBaseQualities to assign a default base quality for all reads, but this can be dangerous in you don't know what you are doing." : "")); return result; } diff --git a/public/java/src/org/broadinstitute/sting/gatk/io/storage/VariantContextWriterStorage.java b/public/java/src/org/broadinstitute/sting/gatk/io/storage/VariantContextWriterStorage.java index 84709d6d8..80841bae7 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/io/storage/VariantContextWriterStorage.java +++ b/public/java/src/org/broadinstitute/sting/gatk/io/storage/VariantContextWriterStorage.java @@ -67,12 +67,16 @@ public class VariantContextWriterStorage implements Storage, Var if ( header.isWriteEngineHeaders() ) { // skip writing the command line header if requested if ( ! skipWritingCommandLineHeader && header.isWriteCommandLine() ) { - // Check for the command-line argument header line. If not present, add it in. - final VCFHeaderLine commandLineArgHeaderLine = getCommandLineArgumentHeaderLine(); - final boolean foundCommandLineHeaderLine = vcfHeader.getMetaDataLine(commandLineArgHeaderLine.getKey()) != null; - if ( ! foundCommandLineHeaderLine ) - vcfHeader.addMetaDataLine(commandLineArgHeaderLine); + // Always add the header line, as the current format allows multiple entries + final VCFHeaderLine commandLineArgHeaderLine = GATKVCFUtils.getCommandLineArgumentHeaderLine(engine, argumentSources); + vcfHeader.addMetaDataLine(commandLineArgHeaderLine); } if ( UPDATE_CONTIG_HEADERS ) @@ -275,13 +271,4 @@ public class VariantContextWriterStub implements Stub, Var getOutputFile() != null && // that are going to disk engine.getArguments().generateShadowBCF; // and we actually want to do it } - - /** - * Gets the appropriately formatted header for a VCF file - * @return VCF file header. - */ - private VCFHeaderLine getCommandLineArgumentHeaderLine() { - CommandLineExecutable executable = JVMUtils.getObjectOfType(argumentSources,CommandLineExecutable.class); - return new VCFHeaderLine(executable.getAnalysisName(), "\"" + engine.createApproximateCommandLineArgumentString(argumentSources.toArray()) + "\""); - } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/iterators/ReadFormattingIterator.java b/public/java/src/org/broadinstitute/sting/gatk/iterators/ReadFormattingIterator.java index c3b4aaa0a..f9d2f4802 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/iterators/ReadFormattingIterator.java +++ b/public/java/src/org/broadinstitute/sting/gatk/iterators/ReadFormattingIterator.java @@ -27,6 +27,7 @@ package org.broadinstitute.sting.gatk.iterators; import net.sf.samtools.SAMRecord; import org.apache.log4j.Logger; +import org.broadinstitute.sting.utils.sam.AlignmentUtils; /** * An iterator which does post-processing of a read, including potentially wrapping @@ -104,6 +105,10 @@ public class ReadFormattingIterator implements StingSAMIterator { public SAMRecord next() { SAMRecord rec = wrappedIterator.next(); + // Always consolidate the cigar string into canonical form, collapsing zero-length / repeated cigar elements. + // Downstream code (like LocusIteratorByState) cannot necessarily handle non-consolidated cigar strings. + rec.setCigar(AlignmentUtils.consolidateCigar(rec.getCigar())); + // if we are using default quals, check if we need them, and add if necessary. // 1. we need if reads are lacking or have incomplete quality scores // 2. we add if defaultBaseQualities has a positive value diff --git a/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java b/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java index de84809bd..67d72189c 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java +++ b/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java @@ -78,22 +78,6 @@ public class GATKRunReport { private static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy/MM/dd HH.mm.ss"); - /** - * The root file system directory where we keep common report data - */ - private final static File REPORT_DIR = new File("/humgen/gsa-hpprojects/GATK/reports"); - - /** - * The full path to the direct where submitted (and uncharacterized) report files are written - */ - private final static File REPORT_SUBMIT_DIR = new File(REPORT_DIR.getAbsolutePath() + "/submitted"); - - /** - * Full path to the sentinel file that controls whether reports are written out. If this file doesn't - * exist, no long will be written - */ - private final static File REPORT_SENTINEL = new File(REPORT_DIR.getAbsolutePath() + "/ENABLE"); - /** * our log */ @@ -181,8 +165,6 @@ public class GATKRunReport { public enum PhoneHomeOption { /** Disable phone home */ NO_ET, - /** Standard option. Writes to local repository if it can be found, or S3 otherwise */ - STANDARD, /** Forces the report to go to S3 */ AWS, /** Force output to STDOUT. For debugging only */ @@ -365,14 +347,9 @@ public class GATKRunReport { switch (type) { case NO_ET: // don't do anything return false; - case STANDARD: case AWS: - if ( type == PhoneHomeOption.STANDARD && repositoryIsOnline() ) { - return postReportToLocalDisk(getLocalReportFullPath()) != null; - } else { - wentToAWS = true; - return postReportToAWSS3() != null; - } + wentToAWS = true; + return postReportToAWSS3() != null; case STDOUT: return postReportToStream(System.out); default: @@ -404,50 +381,6 @@ public class GATKRunReport { } } - /** - * Get the full path as a file where we'll write this report to local disl - * @return a non-null File - */ - @Ensures("result != null") - protected File getLocalReportFullPath() { - return new File(REPORT_SUBMIT_DIR, getReportFileName()); - } - - /** - * Is the local GATKRunReport repository available for writing reports? - * - * @return true if and only if the common run report repository is available and online to receive reports - */ - private boolean repositoryIsOnline() { - return REPORT_SENTINEL.exists(); - } - - - /** - * Main entry point to writing reports to disk. Posts the XML report to the common GATK run report repository. - * If this process fails for any reason, all exceptions are handled and this routine merely prints a warning. - * That is, postReport() is guarenteed not to fail for any reason. - * - * @return the path where the file was written, or null if any failure occurred - */ - @Requires("destination != null") - private File postReportToLocalDisk(final File destination) { - try { - final BufferedOutputStream out = new BufferedOutputStream( - new GZIPOutputStream( - new FileOutputStream(destination))); - postReportToStream(out); - out.close(); - logger.debug("Wrote report to " + destination); - return destination; - } catch ( Exception e ) { - // we catch everything, and no matter what eat the error - exceptDuringRunReport("Couldn't read report file", e); - destination.delete(); - return null; - } - } - // --------------------------------------------------------------------------- // // Code for sending reports to s3 diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCache.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCache.java index 80da8f8eb..424bd489e 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCache.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCache.java @@ -43,17 +43,42 @@ import java.util.List; * Time: 11:23 AM */ public class TAROrderedReadCache { - final int maxCapacity; - final Downsampler downsampler; + private final int maxCapacity; + private ArrayList undownsampledCache; + private Downsampler downsampler; + + private static final int UNDOWNSAMPLED_CACHE_MAX_INITIAL_SIZE = 10000; /** * Create a new empty ReadCache * @param maxCapacity the max capacity of the read cache. */ - public TAROrderedReadCache(int maxCapacity) { + public TAROrderedReadCache( final int maxCapacity ) { if ( maxCapacity < 0 ) throw new IllegalArgumentException("maxCapacity must be >= 0 but got " + maxCapacity); this.maxCapacity = maxCapacity; - this.downsampler = new ReservoirDownsampler(maxCapacity); + + // The one we're not currently using will always be null: + initializeUndownsampledCache(); + this.downsampler = null; + } + + /** + * Moves all reads over to the downsampler, causing it to be used from this point on. Should be called + * when the undownsampledCache fills up and we need to start discarding reads. Since the + * ReservoirDownsampler doesn't preserve relative ordering, pop operations become expensive + * after this point, as they require a O(n log n) sort. + */ + private void activateDownsampler() { + downsampler = new ReservoirDownsampler<>(maxCapacity, false); + downsampler.submit(undownsampledCache); + undownsampledCache = null; // preferable to the O(n) clear() method + } + + /** + * Allocate the undownsampled cache used when we have fewer than maxCapacity items + */ + private void initializeUndownsampledCache() { + undownsampledCache = new ArrayList<>(Math.min(maxCapacity + 1, UNDOWNSAMPLED_CACHE_MAX_INITIAL_SIZE)); } /** @@ -68,18 +93,31 @@ public class TAROrderedReadCache { * Add a single read to this cache. Assumed to be in sorted order w.r.t. the previously added reads * @param read a read to add */ - public void add(final GATKSAMRecord read) { + public void add( final GATKSAMRecord read ) { if ( read == null ) throw new IllegalArgumentException("Read cannot be null"); - downsampler.submit(read); + + if ( downsampler != null ) { + downsampler.submit(read); + } + else { + undownsampledCache.add(read); + + // No more room in the undownsampledCache? Time to start downsampling + if ( undownsampledCache.size() > maxCapacity ) { + activateDownsampler(); + } + } } /** * Add a collection of reads to this cache. Assumed to be in sorted order w.r.t. the previously added reads and each other * @param reads a collection of reads to add */ - public void addAll(final List reads) { + public void addAll( final List reads ) { if ( reads == null ) throw new IllegalArgumentException("Reads cannot be null"); - downsampler.submit(reads); + for ( final GATKSAMRecord read : reads ) { + add(read); + } } /** @@ -87,40 +125,44 @@ public class TAROrderedReadCache { * @return a positive integer */ public int size() { - return downsampler.size(); + return downsampler != null ? downsampler.size() : undownsampledCache.size(); } /** * How many reads were discarded since the last call to popCurrentReads - * @return + * + * @return number of items discarded during downsampling since last pop operation */ public int getNumDiscarded() { - return downsampler.getNumberOfDiscardedItems(); + return downsampler != null ? downsampler.getNumberOfDiscardedItems() : 0; } /** * Removes all reads currently in the cache, and returns them in sorted order (w.r.t. alignmentStart) * - * Flushes this cache, so after this call the cache will contain no reads and all downsampling stats will - * be reset. + * Flushes this cache, so after this call the cache will contain no reads, and we'll be in the same + * initial state as the constructor would put us in, with a non-null undownsampledCache and a null + * downsampler. * * @return a list of GATKSAMRecords in this cache */ public List popCurrentReads() { - final List maybeUnordered = downsampler.consumeFinalizedItems(); + final List poppedReads; - final List ordered; - if ( downsampler.getNumberOfDiscardedItems() == 0 ) { - // haven't discarded anything, so the reads are ordered properly - ordered = maybeUnordered; - } else { - // we need to sort these damn things: O(n log n) - ordered = new ArrayList(maybeUnordered); - Collections.sort(ordered, new AlignmentStartComparator()); + if ( downsampler == null ) { + poppedReads = undownsampledCache; // avoid making a copy here, since we're going to allocate a new cache + } + else { + // If we triggered the downsampler, we need to sort the reads before returning them, + // since the ReservoirDownsampler is not guaranteed to preserve relative ordering of items. + // After consuming the downsampled items in this call to popCurrentReads(), we switch back + // to using the undownsampledCache until we fill up again. + poppedReads = downsampler.consumeFinalizedItems(); // avoid making a copy here + Collections.sort(poppedReads, new AlignmentStartComparator()); + downsampler = null; } - // reset the downsampler stats so getNumberOfDiscardedItems is 0 - downsampler.reset(); - return ordered; + initializeUndownsampledCache(); + return poppedReads; } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java index 0811e5e70..529b3ef17 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java @@ -100,15 +100,6 @@ public abstract class TraversalEngine,Provide // by default there's nothing to do } - /** - * Update the cumulative traversal metrics according to the data in this shard - * - * @param shard a non-null shard - */ - public void updateCumulativeMetrics(final Shard shard) { - updateCumulativeMetrics(shard.getReadMetrics()); - } - /** * Update the cumulative traversal metrics according to the data in this shard * diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java index 1daaaf1da..b85365366 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java @@ -29,24 +29,32 @@ import com.google.java.contract.Ensures; import com.google.java.contract.Requires; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; -import org.broadinstitute.sting.gatk.WalkerManager; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.datasources.providers.*; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.ActiveRegionTraversalParameters; import org.broadinstitute.sting.gatk.walkers.ActiveRegionWalker; -import org.broadinstitute.sting.gatk.walkers.DataSource; import org.broadinstitute.sting.gatk.walkers.Walker; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.SampleUtils; import org.broadinstitute.sting.utils.Utils; -import org.broadinstitute.sting.utils.activeregion.*; +import org.broadinstitute.sting.utils.activeregion.ActiveRegion; +import org.broadinstitute.sting.utils.activeregion.ActivityProfile; +import org.broadinstitute.sting.utils.activeregion.ActivityProfileState; +import org.broadinstitute.sting.utils.activeregion.BandPassActivityProfile; +import org.broadinstitute.sting.utils.nanoScheduler.NSMapFunction; +import org.broadinstitute.sting.utils.nanoScheduler.NSProgressFunction; +import org.broadinstitute.sting.utils.nanoScheduler.NSReduceFunction; +import org.broadinstitute.sting.utils.nanoScheduler.NanoScheduler; import org.broadinstitute.sting.utils.progressmeter.ProgressMeter; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import java.io.PrintStream; -import java.util.*; +import java.util.Collection; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; /** * Implement active region traversal @@ -67,7 +75,8 @@ import java.util.*; * variable spanOfLastReadSeen * */ -public class TraverseActiveRegions extends TraversalEngine,LocusShardDataProvider> { +public final class TraverseActiveRegions extends TraversalEngine,LocusShardDataProvider> { + private final static boolean DEBUG = false; protected final static Logger logger = Logger.getLogger(TraversalEngine.class); protected final static boolean LOG_READ_CARRYING = false; @@ -81,10 +90,49 @@ public class TraverseActiveRegions extends TraversalEngine walker; + + final NanoScheduler nanoScheduler; + + /** + * Data to use in the ActiveRegionWalker.map function produced by the NanoScheduler input iterator + */ + private static class MapData { + public ActiveRegion activeRegion; + public RefMetaDataTracker tracker; + + private MapData(ActiveRegion activeRegion, RefMetaDataTracker tracker) { + this.activeRegion = activeRegion; + this.tracker = tracker; + } + } + + /** + * Create a single threaded active region traverser + */ + public TraverseActiveRegions() { + this(1); + } + + /** + * Create an active region traverser that uses nThreads for getting its work done + * @param nThreads number of threads + */ + public TraverseActiveRegions(final int nThreads) { + nanoScheduler = new NanoScheduler<>(nThreads); + nanoScheduler.setProgressFunction(new NSProgressFunction() { + @Override + public void progress(MapData lastActiveRegion) { + if ( lastActiveRegion != null ) + // note, need to use getStopLocation so we don't give an interval to ProgressMeterDaemon + printProgress(lastActiveRegion.activeRegion.getLocation().getStopLocation()); + } + }); + } /** * Have the debugging output streams been initialized already? @@ -98,7 +146,7 @@ public class TraverseActiveRegions extends TraversalEngine)walker; if ( this.walker.wantsExtendedReads() && ! this.walker.wantsNonPrimaryReads() ) { throw new IllegalArgumentException("Active region walker " + this.walker + " requested extended events but not " + "non-primary reads, an inconsistent state. Please modify the walker"); @@ -113,13 +161,6 @@ public class TraverseActiveRegions extends TraversalEngine extends TraversalEngine extends TraversalEngine walker, - final LocusShardDataProvider dataProvider, - final LocusView locusView) { - if ( WalkerManager.getWalkerDataSource(walker) != DataSource.REFERENCE_ORDERED_DATA ) - return new ManagingReferenceOrderedView( dataProvider ); - else - return (RodLocusView)locusView; - } - - // ------------------------------------------------------------------------------------- // // Actual traverse function @@ -217,66 +266,140 @@ public class TraverseActiveRegions extends TraversalEngine activeRegionIterator = new ActiveRegionIterator(dataProvider); + final TraverseActiveRegionMap myMap = new TraverseActiveRegionMap(); + final TraverseActiveRegionReduce myReduce = new TraverseActiveRegionReduce(); + final T result = nanoScheduler.execute(activeRegionIterator, myMap, sum, myReduce); - // We keep processing while the next reference location is within the interval - final GenomeLoc locOfLastReadAtTraversalStart = spanOfLastSeenRead(); - - while( locusView.hasNext() ) { - final AlignmentContext locus = locusView.next(); - final GenomeLoc location = locus.getLocation(); - - rememberLastLocusLocation(location); - - // get all of the new reads that appear in the current pileup, and them to our list of reads - // provided we haven't seen them before - final Collection reads = locusView.getLIBS().transferReadsFromAllPreviousPileups(); - for( final GATKSAMRecord read : reads ) { - if ( ! appearedInLastShard(locOfLastReadAtTraversalStart, read) ) { - rememberLastReadLocation(read); - myReads.add(read); - } - } - - // skip this location -- it's not part of our engine intervals - if ( outsideEngineIntervals(location) ) - continue; - - // we've move across some interval boundary, restart profile - final boolean flushProfile = ! activityProfile.isEmpty() - && ( activityProfile.getContigIndex() != location.getContigIndex() - || location.getStart() != activityProfile.getStop() + 1); - sum = processActiveRegions(walker, sum, flushProfile, false); - - dataProvider.getShard().getReadMetrics().incrementNumIterations(); - - // create reference context. Note that if we have a pileup of "extended events", the context will - // hold the (longest) stretch of deleted reference bases (if deletions are present in the pileup). - final ReferenceContext refContext = referenceView.getReferenceContext(location); - - // Iterate forward to get all reference ordered data covering this location - final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataAtLocus(locus.getLocation(), refContext); - - // Call the walkers isActive function for this locus and add them to the list to be integrated later - addIsActiveResult(walker, tracker, refContext, locus); - - maxReadsInMemory = Math.max(myReads.size(), maxReadsInMemory); - printProgress(location); - } - - updateCumulativeMetrics(dataProvider.getShard()); - - return sum; + return result; } - /** - * Special function called in LinearMicroScheduler to empty out the work queue. - * Ugly for now but will be cleaned up when we push this functionality more into the engine - */ - public T endTraversal(final Walker walker, T sum) { - return processActiveRegions((ActiveRegionWalker)walker, sum, true, true); + private class ActiveRegionIterator implements Iterator { + private final LocusShardDataProvider dataProvider; + private LinkedList readyActiveRegions = new LinkedList<>(); + private boolean done = false; + private final LocusView locusView; + private final LocusReferenceView referenceView; + private final GenomeLoc locOfLastReadAtTraversalStart; + private final IntervalReferenceOrderedView referenceOrderedDataView; + private final GenomeLoc currentWindow; + private final boolean processRemainingActiveRegions; + + public ActiveRegionIterator( final LocusShardDataProvider dataProvider ) { + this.dataProvider = dataProvider; + locusView = new AllLocusView(dataProvider); + referenceView = new LocusReferenceView( walker, dataProvider ); + + // The data shard may carry a number of locations to process (due to being indexed together). + // This value is just the interval we are processing within the entire provider + currentWindow = dataProvider.getLocus(); + final int currentWindowPos = dataProvider.getShard().getGenomeLocs().indexOf(currentWindow); + if ( currentWindowPos == -1 ) throw new IllegalStateException("Data provider " + dataProvider + " didn't have our current window in it " + currentWindow); + processRemainingActiveRegions = currentWindowPos == dataProvider.getShard().getGenomeLocs().size() - 1; + + // the rodSpan covers all of the bases in the activity profile, including all of the bases + // through the current window interval. This is because we may issue a query to get data for an + // active region spanning before the current interval as far back as the start of the current profile, + // if we have pending work to do that finalizes in this interval. + final GenomeLoc rodSpan = activityProfile.getSpan() == null ? currentWindow : activityProfile.getSpan().endpointSpan(currentWindow); + if ( ! dataProvider.getShard().getLocation().containsP(rodSpan) ) throw new IllegalStateException("Rod span " + rodSpan + " isn't contained within the data shard " + dataProvider.getShard().getLocation() + ", meaning we wouldn't get all of the data we need"); + referenceOrderedDataView = new IntervalReferenceOrderedView( dataProvider, rodSpan ); + + // We keep processing while the next reference location is within the interval + locOfLastReadAtTraversalStart = spanOfLastSeenRead(); + + // load in the workQueue the present regions that span the current contig, if it's different from the last one + if ( walkerHasPresetRegions && ( lastRegionProcessed == null || ! currentWindow.onSameContig(lastRegionProcessed)) ) { + loadPresetRegionsForContigToWorkQueue(currentWindow.getContig()); + } + + // remember the last region we processed for sanity checking later + lastRegionProcessed = currentWindow; + } + + @Override public void remove() { throw new UnsupportedOperationException("Cannot remove from ActiveRegionIterator"); } + + @Override + public MapData next() { + return readyActiveRegions.pop(); + } + @Override + public boolean hasNext() { + if ( engine.exceedsRuntimeLimit() ) // too much time has been dedicated to doing work, just stop + return false; + if ( ! readyActiveRegions.isEmpty() ) + return true; + if ( done ) + return false; + else { + + while( locusView.hasNext() ) { + final AlignmentContext locus = locusView.next(); + final GenomeLoc location = locus.getLocation(); + + rememberLastLocusLocation(location); + + // get all of the new reads that appear in the current pileup, and them to our list of reads + // provided we haven't seen them before + final Collection reads = locusView.getLIBS().transferReadsFromAllPreviousPileups(); + for( final GATKSAMRecord read : reads ) { + // note that ActiveRegionShards span entire contigs, so this check is in some + // sense no longer necessary, as any read that appeared in the last shard would now + // by definition be on a different contig. However, the logic here doesn't hurt anything + // and makes us robust should we decided to provide shards that don't fully span + // contigs at some point in the future + if ( ! appearedInLastShard(locOfLastReadAtTraversalStart, read) ) { + rememberLastReadLocation(read); + myReads.add(read); + } + } + + // skip this location -- it's not part of our engine intervals + if ( outsideEngineIntervals(location) ) + continue; + + // we've move across some interval boundary, restart profile + final boolean flushProfile = ! activityProfile.isEmpty() + && ( activityProfile.getContigIndex() != location.getContigIndex() + || location.getStart() != activityProfile.getStop() + 1); + final List newActiveRegions = prepActiveRegionsForProcessing(walker, flushProfile, false, referenceOrderedDataView); + + dataProvider.getShard().getReadMetrics().incrementNumIterations(); + + // create reference context. Note that if we have a pileup of "extended events", the context will + // hold the (longest) stretch of deleted reference bases (if deletions are present in the pileup). + final ReferenceContext refContext = referenceView.getReferenceContext(location); + + // Iterate forward to get all reference ordered data covering this location + final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataAtLocus(locus.getLocation()); + + // Call the walkers isActive function for this locus and add them to the list to be integrated later + addIsActiveResult(walker, tracker, refContext, locus); + + maxReadsInMemory = Math.max(myReads.size(), maxReadsInMemory); + printProgress(location); + + if ( ! newActiveRegions.isEmpty() ) { + readyActiveRegions.addAll(newActiveRegions); + if ( DEBUG ) + for ( final MapData region : newActiveRegions ) + logger.info("Adding region to queue for processing " + region.activeRegion); + return true; + } + } + + if ( processRemainingActiveRegions ) { + // we've run out of stuff to process, and since shards now span entire contig boundaries + // we should finalized our regions. This allows us to continue to use our referenceOrderedDataView + // which would otherwise be shutdown. Only followed when the microschedule says that we're + // inside of the last window in the current shard + readyActiveRegions.addAll(prepActiveRegionsForProcessing(walker, true, true, referenceOrderedDataView)); + } + + return ! readyActiveRegions.isEmpty(); + } + } } // ------------------------------------------------------------------------------------- @@ -438,7 +561,7 @@ public class TraverseActiveRegions extends TraversalEngine extends TraversalEngine walker, T sum, final boolean flushActivityProfile, final boolean forceAllRegionsToBeActive) { + private List prepActiveRegionsForProcessing(final ActiveRegionWalker walker, + final boolean flushActivityProfile, + final boolean forceAllRegionsToBeActive, + final IntervalReferenceOrderedView referenceOrderedDataView) { if ( ! walkerHasPresetRegions ) { // We don't have preset regions, so we get our regions from the activity profile final Collection activeRegions = activityProfile.popReadyActiveRegions(getActiveRegionExtension(), getMinRegionSize(), getMaxRegionSize(), flushActivityProfile); @@ -513,22 +639,26 @@ public class TraverseActiveRegions extends TraversalEngine readyRegions = new LinkedList<>(); while( workQueue.peek() != null ) { final ActiveRegion activeRegion = workQueue.peek(); if ( forceAllRegionsToBeActive || regionCompletelyWithinDeadZone(activeRegion) ) { writeActivityProfile(activeRegion.getSupportingStates()); writeActiveRegion(activeRegion); - sum = processActiveRegion( workQueue.remove(), sum, walker ); + readyRegions.add(prepActiveRegionForProcessing(workQueue.remove(), walker, referenceOrderedDataView)); } else { break; } } - return sum; + return readyRegions; + } - private T processActiveRegion(final ActiveRegion activeRegion, final T sum, final ActiveRegionWalker walker) { - final List stillLive = new LinkedList(); + private MapData prepActiveRegionForProcessing(final ActiveRegion activeRegion, + final ActiveRegionWalker walker, + final IntervalReferenceOrderedView referenceOrderedDataView) { + final List stillLive = new LinkedList<>(); for ( final GATKSAMRecord read : myReads.popCurrentReads() ) { boolean killed = false; final GenomeLoc readLoc = this.engine.getGenomeLocParser().createGenomeLoc( read ); @@ -561,7 +691,28 @@ public class TraverseActiveRegions extends TraversalEngine { + @Override + public M apply(final MapData mapData) { + if ( DEBUG ) logger.info("Executing walker.map for " + mapData.activeRegion + " in thread " + Thread.currentThread().getName()); + return walker.map(mapData.activeRegion, mapData.tracker); + } + } + + private class TraverseActiveRegionReduce implements NSReduceFunction { + @Override + public T apply(M one, T sum) { + return walker.reduce(one, sum); + } } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseDuplicates.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseDuplicates.java index efa042fdb..17f23de8f 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseDuplicates.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseDuplicates.java @@ -196,7 +196,6 @@ public class TraverseDuplicates extends TraversalEngine extends TraversalEngine, final TraverseResults result = traverse( walker, locusView, referenceView, referenceOrderedDataView, sum ); sum = result.reduceResult; dataProvider.getShard().getReadMetrics().incrementNumIterations(result.numIterations); - updateCumulativeMetrics(dataProvider.getShard()); } // We have a final map call to execute here to clean up the skipped based from the @@ -165,7 +164,7 @@ public class TraverseLociNano extends TraversalEngine, @Override public boolean hasNext() { - return locusView.hasNext(); + return locusView.hasNext() && ! engine.exceedsRuntimeLimit(); } @Override @@ -180,7 +179,7 @@ public class TraverseLociNano extends TraversalEngine, final ReferenceContext refContext = referenceView.getReferenceContext(location); // Iterate forward to get all reference ordered data covering this location - final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataAtLocus(location, refContext); + final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataAtLocus(location); numIterations++; return new MapData(locus, refContext, tracker); diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseReadPairs.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseReadPairs.java index aed88509e..764011a48 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseReadPairs.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseReadPairs.java @@ -90,7 +90,6 @@ public class TraverseReadPairs extends TraversalEngine extends TraversalEngine, final Iterator aggregatedInputs = aggregateMapData(dataProvider); final T result = nanoScheduler.execute(aggregatedInputs, myMap, sum, myReduce); - updateCumulativeMetrics(dataProvider.getShard()); - return result; } @@ -133,7 +131,7 @@ public class TraverseReadsNano extends TraversalEngine, final ReadBasedReferenceOrderedView rodView = new ReadBasedReferenceOrderedView(dataProvider); final Iterator readIterator = reads.iterator(); - @Override public boolean hasNext() { return readIterator.hasNext(); } + @Override public boolean hasNext() { return ! engine.exceedsRuntimeLimit() && readIterator.hasNext(); } @Override public MapData next() { diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/ActiveRegionWalker.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/ActiveRegionWalker.java index 9595b8f42..962f81d0d 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/ActiveRegionWalker.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/ActiveRegionWalker.java @@ -31,6 +31,7 @@ import org.broad.tribble.Feature; import org.broadinstitute.sting.commandline.*; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.filters.*; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.utils.GenomeLoc; @@ -57,6 +58,7 @@ import java.util.*; @PartitionBy(PartitionType.READ) @ActiveRegionTraversalParameters(extension=50,maxRegion=1500) @ReadFilters({UnmappedReadFilter.class, NotPrimaryAlignmentFilter.class, DuplicateReadFilter.class, FailsVendorQualityCheckFilter.class, MappingQualityUnavailableFilter.class}) +@Downsample(by = DownsampleType.BY_SAMPLE, toCoverage = 1000) @RemoveProgramRecords public abstract class ActiveRegionWalker extends Walker { /** diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/LocusWalker.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/LocusWalker.java index 788bf11f9..9997723b8 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/LocusWalker.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/LocusWalker.java @@ -27,6 +27,7 @@ package org.broadinstitute.sting.gatk.walkers; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.filters.DuplicateReadFilter; import org.broadinstitute.sting.gatk.filters.FailsVendorQualityCheckFilter; import org.broadinstitute.sting.gatk.filters.NotPrimaryAlignmentFilter; @@ -44,6 +45,7 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; @Requires({DataSource.READS,DataSource.REFERENCE}) @PartitionBy(PartitionType.LOCUS) @ReadFilters({UnmappedReadFilter.class,NotPrimaryAlignmentFilter.class,DuplicateReadFilter.class,FailsVendorQualityCheckFilter.class}) +@Downsample(by = DownsampleType.BY_SAMPLE, toCoverage = 1000) @RemoveProgramRecords public abstract class LocusWalker extends Walker { // Do we actually want to operate on the context? diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/Walker.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/Walker.java index 522414c00..40485596d 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/Walker.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/Walker.java @@ -29,6 +29,7 @@ import net.sf.samtools.SAMSequenceDictionary; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.filters.MalformedReadFilter; import org.broadinstitute.sting.gatk.iterators.ReadTransformer; import org.broadinstitute.sting.gatk.samples.Sample; @@ -50,6 +51,7 @@ import java.util.List; */ @ReadFilters(MalformedReadFilter.class) @PartitionBy(PartitionType.NONE) +@Downsample(by = DownsampleType.NONE) @BAQMode(QualityMode = BAQ.QualityMode.OVERWRITE_QUALS, ApplicationTime = ReadTransformer.ApplicationTime.ON_INPUT) @BQSRMode(ApplicationTime = ReadTransformer.ApplicationTime.ON_INPUT) @DocumentedGATKFeature(groupName = "Uncategorized", extraDocs = {CommandLineGATK.class}) diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/SnpEff.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/SnpEff.java index 288196d1b..8c068d3e4 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/SnpEff.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/SnpEff.java @@ -49,7 +49,7 @@ import java.util.regex.Pattern; * *

See http://snpeff.sourceforge.net/ for more information on the SnpEff tool

. * - *

For each variant, this tol chooses one of the effects of highest biological impact from the SnpEff + *

For each variant, this tool chooses one of the effects of highest biological impact from the SnpEff * output file (which must be provided on the command line via --snpEffFile filename.vcf), * and adds annotations on that effect.

* diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotator.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotator.java index f2bd6c14c..10ba4ca17 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotator.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotator.java @@ -180,9 +180,6 @@ public class VariantAnnotator extends RodWalker implements Ann @Argument(fullName="MendelViolationGenotypeQualityThreshold",shortName="mvq",required=false,doc="The genotype quality threshold in order to annotate mendelian violation ratio") public double minGenotypeQualityP = 0.0; - @Argument(fullName="requireStrictAlleleMatch", shortName="strict", doc="If provided only comp tracks that exactly match both reference and alternate alleles will be counted as concordant", required=false) - protected boolean requireStrictAlleleMatch = false; - private VariantAnnotatorEngine engine; /** @@ -204,7 +201,6 @@ public class VariantAnnotator extends RodWalker implements Ann else engine = new VariantAnnotatorEngine(annotationGroupsToUse, annotationsToUse, annotationsToExclude, this, getToolkit()); engine.initializeExpressions(expressionsToUse); - engine.setRequireStrictAlleleMatch(requireStrictAlleleMatch); // setup the header fields // note that if any of the definitions conflict with our new ones, then we want to overwrite the old ones diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java index 695868bb1..078a36dd9 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java @@ -34,26 +34,23 @@ import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.*; import org.broadinstitute.sting.utils.GenomeLoc; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; -import org.broadinstitute.sting.utils.variant.GATKVCFUtils; -import org.broadinstitute.variant.vcf.*; import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.variant.variantcontext.*; +import org.broadinstitute.variant.vcf.*; import java.util.*; public class VariantAnnotatorEngine { - private List requestedInfoAnnotations = Collections.emptyList(); private List requestedGenotypeAnnotations = Collections.emptyList(); - private List requestedExpressions = new ArrayList(); + private List requestedExpressions = new ArrayList<>(); - private final HashMap, String> dbAnnotations = new HashMap, String>(); private final AnnotatorCompatible walker; private final GenomeAnalysisEngine toolkit; - private boolean requireStrictAlleleMatch = false; + VariantOverlapAnnotator variantOverlapAnnotator = null; protected static class VAExpression { @@ -85,7 +82,7 @@ public class VariantAnnotatorEngine { requestedInfoAnnotations = AnnotationInterfaceManager.createAllInfoFieldAnnotations(); requestedGenotypeAnnotations = AnnotationInterfaceManager.createAllGenotypeAnnotations(); excludeAnnotations(annotationsToExclude); - initializeDBs(); + initializeDBs(toolkit); } // use this constructor if you want to select specific annotations (and/or interfaces) @@ -93,14 +90,7 @@ public class VariantAnnotatorEngine { this.walker = walker; this.toolkit = toolkit; initializeAnnotations(annotationGroupsToUse, annotationsToUse, annotationsToExclude); - initializeDBs(); - } - - // experimental constructor for active region traversal - public VariantAnnotatorEngine(GenomeAnalysisEngine toolkit) { - this.walker = null; - this.toolkit = toolkit; - requestedInfoAnnotations = AnnotationInterfaceManager.createInfoFieldAnnotations(Arrays.asList("ActiveRegionBasedAnnotation"), Collections.emptyList()); + initializeDBs(toolkit); } // select specific expressions to use @@ -138,16 +128,19 @@ public class VariantAnnotatorEngine { requestedGenotypeAnnotations = tempRequestedGenotypeAnnotations; } - private void initializeDBs() { - + private void initializeDBs(final GenomeAnalysisEngine engine) { // check to see whether comp rods were included - final RodBinding dbsnp = walker.getDbsnpRodBinding(); - if ( dbsnp != null && dbsnp.isBound() ) - dbAnnotations.put(dbsnp, VCFConstants.DBSNP_KEY); + RodBinding dbSNPBinding = walker.getDbsnpRodBinding(); + if ( dbSNPBinding != null && ! dbSNPBinding.isBound() ) + dbSNPBinding = null; - final List> comps = walker.getCompRodBindings(); - for ( RodBinding rod : comps ) - dbAnnotations.put(rod, rod.getName()); + final Map, String> overlapBindings = new LinkedHashMap<>(); + for ( final RodBinding b : walker.getCompRodBindings()) + if ( b.isBound() ) overlapBindings.put(b, b.getName()); + if ( dbSNPBinding != null && ! overlapBindings.keySet().contains(VCFConstants.DBSNP_KEY) ) + overlapBindings.put(dbSNPBinding, VCFConstants.DBSNP_KEY); // add overlap detection with DBSNP by default + + variantOverlapAnnotator = new VariantOverlapAnnotator(dbSNPBinding, overlapBindings, engine.getGenomeLocParser()); } public void invokeAnnotationInitializationMethods( Set headerLines ) { @@ -161,14 +154,13 @@ public class VariantAnnotatorEngine { } public Set getVCFAnnotationDescriptions() { - Set descriptions = new HashSet(); for ( InfoFieldAnnotation annotation : requestedInfoAnnotations ) descriptions.addAll(annotation.getDescriptions()); for ( GenotypeAnnotation annotation : requestedGenotypeAnnotations ) descriptions.addAll(annotation.getDescriptions()); - for ( String db : dbAnnotations.values() ) { + for ( String db : variantOverlapAnnotator.getOverlapNames() ) { if ( VCFStandardHeaderLines.getInfoLine(db, false) != null ) descriptions.add(VCFStandardHeaderLines.getInfoLine(db)); else @@ -178,10 +170,6 @@ public class VariantAnnotatorEngine { return descriptions; } - public void setRequireStrictAlleleMatch( final boolean requireStrictAlleleMatch ) { - this.requireStrictAlleleMatch = requireStrictAlleleMatch; - } - public VariantContext annotateContext(final RefMetaDataTracker tracker, final ReferenceContext ref, final Map stratifiedContexts, @@ -192,13 +180,10 @@ public class VariantAnnotatorEngine { public VariantContext annotateContext(final RefMetaDataTracker tracker, final ReferenceContext ref, final Map stratifiedContexts, - VariantContext vc, + final VariantContext vc, final Map perReadAlleleLikelihoodMap) { Map infoAnnotations = new LinkedHashMap(vc.getAttributes()); - // annotate db occurrences - vc = annotateDBs(tracker, ref.getLocus(), vc, infoAnnotations); - // annotate expressions where available annotateExpressions(tracker, ref.getLocus(), infoAnnotations); @@ -213,11 +198,16 @@ public class VariantAnnotatorEngine { VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(infoAnnotations); // annotate genotypes, creating another new VC in the process - return builder.genotypes(annotateGenotypes(tracker, ref, stratifiedContexts, vc, perReadAlleleLikelihoodMap)).make(); + final VariantContext annotated = builder.genotypes(annotateGenotypes(tracker, ref, stratifiedContexts, vc, perReadAlleleLikelihoodMap)).make(); + + // annotate db occurrences + return annotateDBs(tracker, annotated); } - public VariantContext annotateContext(final Map perReadAlleleLikelihoodMap, VariantContext vc) { - Map infoAnnotations = new LinkedHashMap(vc.getAttributes()); + public VariantContext annotateContextForActiveRegion(final RefMetaDataTracker tracker, + final Map perReadAlleleLikelihoodMap, + final VariantContext vc) { + final Map infoAnnotations = new LinkedHashMap<>(vc.getAttributes()); // go through all the requested info annotationTypes for ( InfoFieldAnnotation annotationType : requestedInfoAnnotations ) { @@ -231,76 +221,26 @@ public class VariantAnnotatorEngine { } // generate a new annotated VC - VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(infoAnnotations); + final VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(infoAnnotations); // annotate genotypes, creating another new VC in the process - return builder.genotypes(annotateGenotypes(null, null, null, vc, perReadAlleleLikelihoodMap)).make(); + final VariantContext annotated = builder.genotypes(annotateGenotypes(null, null, null, vc, perReadAlleleLikelihoodMap)).make(); + + // annotate db occurrences + return annotateDBs(tracker, annotated); } /** * Annotate the ID field and other DBs for the given Variant Context * * @param tracker ref meta data tracker (cannot be null) - * @param loc location of the vc * @param vc variant context to annotate * @return non-null annotated version of vc */ - @Requires({"tracker != null && loc != null && vc != null"}) - @Ensures("result != null") - public VariantContext annotateDBs(final RefMetaDataTracker tracker, final GenomeLoc loc, VariantContext vc) { - final Map newInfoAnnotations = new HashMap(0); - vc = annotateDBs(tracker, loc, vc, newInfoAnnotations); - - if ( !newInfoAnnotations.isEmpty() ) { - final VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(newInfoAnnotations); - vc = builder.make(); - } - - return vc; - } - - /** - * Annotate the ID field and other DBs for the given Variant Context - * - * @param tracker ref meta data tracker (cannot be null) - * @param loc location of the vc - * @param vc variant context to annotate - * @param infoAnnotations info annotation map to populate - * @return non-null annotated version of vc - */ @Requires({"tracker != null && loc != null && vc != null && infoAnnotations != null"}) @Ensures("result != null") - private VariantContext annotateDBs(final RefMetaDataTracker tracker, final GenomeLoc loc, VariantContext vc, final Map infoAnnotations) { - for ( Map.Entry, String> dbSet : dbAnnotations.entrySet() ) { - if ( dbSet.getValue().equals(VCFConstants.DBSNP_KEY) ) { - final String rsID = GATKVCFUtils.rsIDOfFirstRealVariant(tracker.getValues(dbSet.getKey(), loc), vc.getType()); - - // add the ID if appropriate - if ( rsID != null ) { - // put the DB key into the INFO field - infoAnnotations.put(VCFConstants.DBSNP_KEY, true); - - if ( vc.emptyID() ) { - vc = new VariantContextBuilder(vc).id(rsID).make(); - } else if ( walker.alwaysAppendDbsnpId() && vc.getID().indexOf(rsID) == -1 ) { - final String newRsID = vc.getID() + VCFConstants.ID_FIELD_SEPARATOR + rsID; - vc = new VariantContextBuilder(vc).id(newRsID).make(); - } - } - } else { - boolean overlapsComp = false; - for ( VariantContext comp : tracker.getValues(dbSet.getKey(), loc) ) { - if ( !comp.isFiltered() && ( !requireStrictAlleleMatch || comp.getAlleles().equals(vc.getAlleles()) ) ) { - overlapsComp = true; - break; - } - } - if ( overlapsComp ) - infoAnnotations.put(dbSet.getValue(), overlapsComp); - } - } - - return vc; + private VariantContext annotateDBs(final RefMetaDataTracker tracker, VariantContext vc) { + return variantOverlapAnnotator.annotateOverlaps(tracker, variantOverlapAnnotator.annotateRsID(tracker, vc)); } private void annotateExpressions(final RefMetaDataTracker tracker, final GenomeLoc loc, final Map infoAnnotations) { diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotator.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotator.java new file mode 100644 index 000000000..07af4bd74 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotator.java @@ -0,0 +1,224 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.walkers.annotator; + +import org.broadinstitute.sting.commandline.RodBinding; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.variant.variantcontext.Allele; +import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.variantcontext.VariantContextBuilder; +import org.broadinstitute.variant.vcf.VCFConstants; + +import java.util.*; + +/** + * Annotate the ID field and attribute overlap FLAGs for a VariantContext against a RefMetaDataTracker or a list + * of VariantContexts + */ +public final class VariantOverlapAnnotator { + final RodBinding dbSNPBinding; + final Map, String> overlapBindings; + final GenomeLocParser genomeLocParser; + + /** + * Create a new VariantOverlapAnnotator without overall bindings + * + * @see #VariantOverlapAnnotator(org.broadinstitute.sting.commandline.RodBinding, java.util.Map, org.broadinstitute.sting.utils.GenomeLocParser) + */ + public VariantOverlapAnnotator(RodBinding dbSNPBinding, GenomeLocParser genomeLocParser) { + this(dbSNPBinding, Collections., String>emptyMap(), genomeLocParser); + } + + /** + * Create a new VariantOverlapAnnotator + * + * @param dbSNPBinding the RodBinding to use for updating ID field values, or null if that behavior isn't desired + * @param overlapBindings a map of RodBindings / name to use for overlap annotation. Each binding will be used to + * add name => true for variants that overlap with variants found to a + * RefMetaDataTracker at each location. Can be empty but not null + * @param genomeLocParser the genome loc parser we'll use to create GenomeLocs for VariantContexts + */ + public VariantOverlapAnnotator(RodBinding dbSNPBinding, Map, String> overlapBindings, GenomeLocParser genomeLocParser) { + if ( overlapBindings == null ) throw new IllegalArgumentException("overlapBindings cannot be null"); + if ( genomeLocParser == null ) throw new IllegalArgumentException("genomeLocParser cannot be null"); + + this.dbSNPBinding = dbSNPBinding; + this.overlapBindings = overlapBindings; + this.genomeLocParser = genomeLocParser; + } + + /** + * Update rsID in vcToAnnotate with rsIDs from dbSNPBinding fetched from tracker + * @see #annotateOverlap(java.util.List, String, org.broadinstitute.variant.variantcontext.VariantContext) + * + * @param tracker non-null tracker, which we will use to update the rsID of vcToAnnotate + * for VariantContexts bound to dbSNPBinding that start at vcToAnnotate + * @param vcToAnnotate a variant context to annotate + * @return a VariantContext (may be == to vcToAnnotate) with updated rsID value + */ + public VariantContext annotateRsID(final RefMetaDataTracker tracker, final VariantContext vcToAnnotate) { + if ( dbSNPBinding != null ) { + final GenomeLoc loc = getLoc(vcToAnnotate); + return annotateRsID(tracker.getValues(dbSNPBinding, loc), vcToAnnotate); + } else { + return vcToAnnotate; + } + } + + /** + * Update rsID of vcToAnnotate with rsID match found in vcsAtLoc, if one exists + * + * @param vcsAtLoc a list of variant contexts starting at this location to use as sources for rsID values + * @param vcToAnnotate a variant context to annotate + * @return a VariantContext (may be == to vcToAnnotate) with updated rsID value + */ + public VariantContext annotateRsID(final List vcsAtLoc, final VariantContext vcToAnnotate ) { + final String rsID = getRsID(vcsAtLoc, vcToAnnotate); + + // add the ID if appropriate + if ( rsID != null ) { + final VariantContextBuilder vcb = new VariantContextBuilder(vcToAnnotate); + + if ( ! vcToAnnotate.hasID() ) { + return vcb.id(rsID).make(); + } else if ( ! vcToAnnotate.getID().contains(rsID) ) { + return vcb.id(vcToAnnotate.getID() + VCFConstants.ID_FIELD_SEPARATOR + rsID).make(); + } // falling through to return VC lower down + } + + // nothing to do, just return vc + return vcToAnnotate; + } + + private GenomeLoc getLoc(final VariantContext vc) { + return genomeLocParser.createGenomeLoc(vc); + } + + /** + * Add overlap attributes to vcToAnnotate against all overlapBindings in tracker + * + * @see #annotateOverlap(java.util.List, String, org.broadinstitute.variant.variantcontext.VariantContext) + * for more information + * + * @param tracker non-null tracker, which we will use to update the rsID of vcToAnnotate + * for VariantContexts bound to dbSNPBinding that start at vcToAnnotate + * @param vcToAnnotate a variant context to annotate + * @return a VariantContext (may be == to vcToAnnotate) with updated overlaps update fields value + */ + public VariantContext annotateOverlaps(final RefMetaDataTracker tracker, final VariantContext vcToAnnotate) { + if ( overlapBindings.isEmpty() ) return vcToAnnotate; + + VariantContext annotated = vcToAnnotate; + final GenomeLoc loc = getLoc(vcToAnnotate); + for ( final Map.Entry, String> overlapBinding : overlapBindings.entrySet() ) { + annotated = annotateOverlap(tracker.getValues(overlapBinding.getKey(), loc), overlapBinding.getValue(), vcToAnnotate); + } + + return annotated; + } + + /** + * Add overlaps flag attributes to vcToAnnotate binding overlapTestVCs.getSource() => true if + * an overlapping variant context can be found in overlapTestVCs with vcToAnnotate + * + * Overlaps here means that the reference alleles are the same and at least one alt + * allele in vcToAnnotate is equals to one of the alt alleles in overlapTestVCs + * + * @param overlapTestVCs a non-null list of potential overlaps that start at vcToAnnotate + * @param attributeKey the key to set to true in the attribute map for vcToAnnotate if it overlaps + * @param vcToAnnotate a non-null VariantContext to annotate + * @return + */ + public VariantContext annotateOverlap(final List overlapTestVCs, final String attributeKey, VariantContext vcToAnnotate) { + if ( overlapBindings.isEmpty() ) return vcToAnnotate; + + final boolean overlaps = overlaps(overlapTestVCs, vcToAnnotate); + if ( overlaps ) { + return new VariantContextBuilder(vcToAnnotate).attribute(attributeKey, true).make(); + } else { + return vcToAnnotate; + } + } + + /** + * Returns the ID field of the first VariantContext in rsIDSourceVCs that has the same reference allele + * as vcToAnnotate and all of the alternative alleles in vcToAnnotate. + * + * Doesn't require vcToAnnotate to be a complete match, so + * + * A/C/G in VC in rsIDSourceVCs + * + * would match the a VC with A/C but not A/T. Also we don't require all alleles to match + * so we would also match A/C/T to A/C/G. + * + * Will only match rsIDSourceVCs that aren't failing filters. + * + * @param rsIDSourceVCs a non-null list of potential overlaps that start at vcToAnnotate + * @param vcToAnnotate a non-null VariantContext to annotate + * @return a String to use for the rsID from rsIDSourceVCs if one matches, or null if none matches + */ + private String getRsID(final List rsIDSourceVCs, final VariantContext vcToAnnotate) { + if ( rsIDSourceVCs == null ) throw new IllegalArgumentException("rsIDSourceVCs cannot be null"); + if ( vcToAnnotate == null ) throw new IllegalArgumentException("vcToAnnotate cannot be null"); + + for ( final VariantContext vcComp : rsIDSourceVCs ) { + if ( vcComp.isFiltered() ) continue; // don't process any failed VCs + + if ( ! vcComp.getChr().equals(vcToAnnotate.getChr()) || vcComp.getStart() != vcToAnnotate.getStart() ) + throw new IllegalArgumentException("source rsID VariantContext " + vcComp + " doesn't start at the same position as vcToAnnotate " + vcToAnnotate); + + if ( vcToAnnotate.getReference().equals(vcComp.getReference()) ) { + for ( final Allele allele : vcToAnnotate.getAlternateAlleles() ) { + if ( vcComp.getAlternateAlleles().contains(allele) ) + return vcComp.getID(); + } + } + } + + return null; + } + + /** + * Does vcToAnnotate overlap with any of the records in potentialOverlaps? + * + * @param potentialOverlaps a non-null list of potential overlaps that start at vcToAnnotate + * @param vcToAnnotate a non-null VariantContext to annotate + * @return true if vcToAnnotate overlaps (position and all alt alleles) with some variant in potentialOverlaps + */ + private boolean overlaps(final List potentialOverlaps, final VariantContext vcToAnnotate) { + return getRsID(potentialOverlaps, vcToAnnotate) != null; + } + + /** + * Get the collection of the RodBinding names for those being used for overlap detection + * @return a non-null collection of Strings + */ + public Collection getOverlapNames() { + return overlapBindings.values(); + } +} diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/beagle/BeagleOutputToVCF.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/beagle/BeagleOutputToVCF.java index 15bd79586..7d5ad9b8a 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/beagle/BeagleOutputToVCF.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/beagle/BeagleOutputToVCF.java @@ -129,6 +129,9 @@ public class BeagleOutputToVCF extends RodWalker { private final double MIN_PROB_ERROR = 0.000001; private final double MAX_GENOTYPE_QUALITY = -6.0; + private final static String BEAGLE_MONO_FILTER_STRING = "BGL_SET_TO_MONOMORPHIC"; + private final static String ORIGINAL_ALT_ALLELE_INFO_KEY = "OriginalAltAllele"; + public void initialize() { // setup the header fields @@ -138,10 +141,8 @@ public class BeagleOutputToVCF extends RodWalker { hInfo.add(new VCFFormatHeaderLine("OG",1, VCFHeaderLineType.String, "Original Genotype input to Beagle")); hInfo.add(new VCFInfoHeaderLine("R2", 1, VCFHeaderLineType.Float, "r2 Value reported by Beagle on each site")); hInfo.add(new VCFInfoHeaderLine("NumGenotypesChanged", 1, VCFHeaderLineType.Integer, "The number of genotypes changed by Beagle")); - hInfo.add(new VCFFilterHeaderLine("BGL_RM_WAS_A", "This 'A' site was set to monomorphic by Beagle")); - hInfo.add(new VCFFilterHeaderLine("BGL_RM_WAS_C", "This 'C' site was set to monomorphic by Beagle")); - hInfo.add(new VCFFilterHeaderLine("BGL_RM_WAS_G", "This 'G' site was set to monomorphic by Beagle")); - hInfo.add(new VCFFilterHeaderLine("BGL_RM_WAS_T", "This 'T' site was set to monomorphic by Beagle")); + hInfo.add(new VCFInfoHeaderLine(ORIGINAL_ALT_ALLELE_INFO_KEY, 1, VCFHeaderLineType.String, "The original alt allele for a site set to monomorphic by Beagle")); + hInfo.add(new VCFFilterHeaderLine(BEAGLE_MONO_FILTER_STRING, "This site was set to monomorphic by Beagle")); if ( comp.isBound() ) { hInfo.add(new VCFInfoHeaderLine("ACH", 1, VCFHeaderLineType.Integer, "Allele Count from Comparison ROD at this site")); @@ -335,9 +336,8 @@ public class BeagleOutputToVCF extends RodWalker { final VariantContextBuilder builder = new VariantContextBuilder(vc_input).source("outputvcf").genotypes(genotypes); if ( ! ( beagleVarCounts > 0 || DONT_FILTER_MONOMORPHIC_SITES ) ) { - Set removedFilters = vc_input.filtersWereApplied() ? new HashSet(vc_input.getFilters()) : new HashSet(1); - removedFilters.add(String.format("BGL_RM_WAS_%s",vc_input.getAlternateAllele(0))); - builder.alleles(new HashSet(Arrays.asList(vc_input.getReference()))).filters(removedFilters); + builder.attribute(ORIGINAL_ALT_ALLELE_INFO_KEY, vc_input.getAlternateAllele(0)); + builder.alleles(Collections.singleton(vc_input.getReference())).filter(BEAGLE_MONO_FILTER_STRING); } // re-compute chromosome counts diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/CountReads.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/CountReads.java index 825fcac90..45beea28f 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/CountReads.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/CountReads.java @@ -66,11 +66,16 @@ import org.broadinstitute.sting.utils.sam.GATKSAMRecord; */ @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} ) @Requires({DataSource.READS, DataSource.REFERENCE}) -public class CountReads extends ReadWalker implements NanoSchedulable { +public class CountReads extends ReadWalker implements NanoSchedulable { public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker tracker) { return 1; } - @Override public Integer reduceInit() { return 0; } - @Override public Integer reduce(Integer value, Integer sum) { return value + sum; } + @Override public Long reduceInit() { return 0L; } + + public Long reduce(Integer value, Long sum) { return (long) value + sum; } + + public void onTraversalDone(Long result) { + logger.info("CountReads counted " + result + " reads in the traversal"); + } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/Pileup.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/Pileup.java index bc98c670a..23bbf1460 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/Pileup.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/Pileup.java @@ -26,10 +26,7 @@ package org.broadinstitute.sting.gatk.walkers.qc; import org.broad.tribble.Feature; -import org.broadinstitute.sting.commandline.Argument; -import org.broadinstitute.sting.commandline.Input; -import org.broadinstitute.sting.commandline.Output; -import org.broadinstitute.sting.commandline.RodBinding; +import org.broadinstitute.sting.commandline.*; import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; @@ -96,6 +93,10 @@ public class Pileup extends LocusWalker implements TreeReducibl @Input(fullName="metadata",shortName="metadata",doc="Add these ROD bindings to the output Pileup", required=false) public List> rods = Collections.emptyList(); + @Hidden + @Argument(fullName="outputInsertLength",shortName = "outputInsertLength",doc="Add a column which contains the length of the insert each base comes from.",required=false) + public boolean outputInsertLength=false; + @Override public String map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { final String rods = getReferenceOrderedData( tracker ); @@ -104,6 +105,8 @@ public class Pileup extends LocusWalker implements TreeReducibl final StringBuilder s = new StringBuilder(); s.append(String.format("%s %s", basePileup.getPileupString((char)ref.getBase()), rods)); + if ( outputInsertLength ) + s.append(" ").append(insertLengthOutput(basePileup)); if ( SHOW_VERBOSE ) s.append(" ").append(createVerboseOutput(basePileup)); s.append("\n"); @@ -143,6 +146,18 @@ public class Pileup extends LocusWalker implements TreeReducibl return rodString; } + private static String insertLengthOutput(final ReadBackedPileup pileup) { + + Integer[] insertSizes=new Integer[pileup.depthOfCoverage()]; + + int i=0; + for ( PileupElement p : pileup ) { + insertSizes[i]=p.getRead().getInferredInsertSize(); + ++i; + } + return Utils.join(",",insertSizes); + } + private static String createVerboseOutput(final ReadBackedPileup pileup) { final StringBuilder sb = new StringBuilder(); diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/PrintReads.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/PrintReads.java index a28523369..c7ed0bffd 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/PrintReads.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/PrintReads.java @@ -96,7 +96,7 @@ import java.util.*; * -T PrintReads \ * -o output.bam \ * -I input.bam \ - * -ds 0.25 + * -dfrac 0.25 * * */ @@ -124,12 +124,6 @@ public class PrintReads extends ReadWalker impleme @Argument(fullName = "number", shortName = "n", doc="Print the first n reads from the file, discarding the rest", required = false) int nReadsToPrint = -1; - /** - * Downsamples the bam file by the given ratio, printing only approximately the given percentage of reads. The downsampling is balanced (over the entire coverage) - */ - @Argument(fullName = "downsample_coverage", shortName = "ds", doc="Downsample BAM to desired coverage", required = false) - public double downsampleRatio = 1.0; - /** * Only reads from samples listed in the provided file(s) will be included in the output. */ @@ -237,8 +231,7 @@ public class PrintReads extends ReadWalker impleme nReadsToPrint--; // n > 0 means there are still reads to be printed. } - // if downsample option is turned off (= 1) then don't waste time getting the next random number. - return (downsampleRatio == 1 || random.nextDouble() < downsampleRatio); + return true; } /** diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetrics.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetrics.java old mode 100644 new mode 100755 index b3b4857b6..848261d73 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetrics.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/ConcordanceMetrics.java @@ -27,6 +27,7 @@ package org.broadinstitute.sting.gatk.walkers.variantutils; import com.google.java.contract.Requires; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; +import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; import org.broadinstitute.variant.variantcontext.*; import org.broadinstitute.variant.vcf.VCFHeader; @@ -81,10 +82,23 @@ public class ConcordanceMetrics { return Collections.unmodifiableMap(nrd); } + public Map getPerSampleOGC() { + Map ogc = new HashMap(perSampleGenotypeConcordance.size()); + for ( Map.Entry sampleTable : perSampleGenotypeConcordance.entrySet() ) { + ogc.put(sampleTable.getKey(),calculateOGC(sampleTable.getValue())); + } + + return Collections.unmodifiableMap(ogc); + } + public Double getOverallNRD() { return calculateNRD(overallGenotypeConcordance); } + public Double getOverallOGC() { + return calculateOGC(overallGenotypeConcordance); + } + public Map getPerSampleNRS() { Map nrs = new HashMap(perSampleGenotypeConcordance.size()); for ( Map.Entry sampleTable : perSampleGenotypeConcordance.entrySet() ) { @@ -110,6 +124,11 @@ public class ConcordanceMetrics { for ( String sample : perSampleGenotypeConcordance.keySet() ) { Genotype evalGenotype = eval.getGenotype(sample); Genotype truthGenotype = truth.getGenotype(sample); + // ensure genotypes are either no-call ("."), missing (empty alleles), or diploid + if ( ( ! evalGenotype.isNoCall() && evalGenotype.getPloidy() != 2 && evalGenotype.getPloidy() > 0) || + ( ! truthGenotype.isNoCall() && truthGenotype.getPloidy() != 2 && truthGenotype.getPloidy() > 0) ) { + throw new UserException(String.format("Concordance Metrics is currently only implemented for DIPLOID genotypes, found eval ploidy: %d, comp ploidy: %d",evalGenotype.getPloidy(),truthGenotype.getPloidy())); + } perSampleGenotypeConcordance.get(sample).update(evalGenotype,truthGenotype,alleleTruth,truthRef); overallGenotypeConcordance.update(evalGenotype,truthGenotype,alleleTruth,truthRef); } @@ -136,10 +155,32 @@ public class ConcordanceMetrics { return total == 0 ? 1.0 : 1.0 - ( (double) correct)/( (double) total); } + private static double calculateOGC(int[][] concordanceCounts) { + int correct = 0; + int total = 0; + correct += concordanceCounts[GenotypeType.HOM_REF.ordinal()][GenotypeType.HOM_REF.ordinal()]; + correct += concordanceCounts[GenotypeType.HET.ordinal()][GenotypeType.HET.ordinal()]; + correct += concordanceCounts[GenotypeType.HOM_VAR.ordinal()][GenotypeType.HOM_VAR.ordinal()]; + total += correct; + total += concordanceCounts[GenotypeType.HOM_REF.ordinal()][GenotypeType.HET.ordinal()]; + total += concordanceCounts[GenotypeType.HOM_REF.ordinal()][GenotypeType.HOM_VAR.ordinal()]; + total += concordanceCounts[GenotypeType.HET.ordinal()][GenotypeType.HOM_REF.ordinal()]; + total += concordanceCounts[GenotypeType.HET.ordinal()][GenotypeType.HOM_VAR.ordinal()]; + total += concordanceCounts[GenotypeType.HOM_VAR.ordinal()][GenotypeType.HOM_REF.ordinal()]; + total += concordanceCounts[GenotypeType.HOM_VAR.ordinal()][GenotypeType.HET.ordinal()]; + // OGC is by definition correct/total + // note: if there are no observations (so the ratio is NaN), set this to 100% + return total == 0 ? 1.0 : ( (double) correct)/( (double) total); + } + private static double calculateNRS(GenotypeConcordanceTable table) { return calculateNRS(table.getTable()); } + private static double calculateOGC(GenotypeConcordanceTable table) { + return calculateOGC(table.getTable()); + } + private static double calculateNRS(int[][] concordanceCounts) { long confirmedVariant = 0; long unconfirmedVariant = 0; diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java old mode 100644 new mode 100755 index 35213af34..da8b20c66 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java @@ -25,7 +25,10 @@ package org.broadinstitute.sting.gatk.walkers.variantutils; -import org.broadinstitute.sting.commandline.*; +import org.broadinstitute.sting.commandline.Argument; +import org.broadinstitute.sting.commandline.Input; +import org.broadinstitute.sting.commandline.Output; +import org.broadinstitute.sting.commandline.RodBinding; import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; @@ -64,8 +67,58 @@ import java.util.*; * *

Output

* Genotype Concordance writes a GATK report to the specified file (via -o) , consisting of multiple tables of counts - * and proportions. These tables may be optionally moltenized via the -moltenize argument. + * and proportions. These tables may be optionally moltenized via the -moltenize argument. That is, the standard table * + * Sample NO_CALL_HOM_REF NO_CALL_HET NO_CALL_HOM_VAR (...) + * NA12878 0.003 0.001 0.000 (...) + * NA12891 0.005 0.000 0.000 (...) + * + * would instead be displayed + * + * NA12878 NO_CALL_HOM_REF 0.003 + * NA12878 NO_CALL_HET 0.001 + * NA12878 NO_CALL_HOM_VAR 0.000 + * NA12891 NO_CALL_HOM_REF 0.005 + * NA12891 NO_CALL_HET 0.000 + * NA12891 NO_CALL_HOM_VAR 0.000 + * (...) + * + * + * These tables are constructed on a per-sample basis, and include counts of eval vs comp genotype states, and the + * number of times the alternate alleles between the eval and comp sample did not match up. + * + * In addition, Genotype Concordance produces site-level allelic concordance. For strictly bi-allelic VCFs, + * only the ALLELES_MATCH, EVAL_ONLY, TRUTH_ONLY fields will be populated, but where multi-allelic sites are involved + * counts for EVAL_SUBSET_TRUTH and EVAL_SUPERSET_TRUTH will be generated. + * + * For example, in the following situation + * eval: ref - A alt - C + * comp: ref - A alt - C,T + * then the site is tabulated as EVAL_SUBSET_TRUTH. Were the situation reversed, it would be EVAL_SUPERSET_TRUTH. + * However, in the case where eval has both C and T alternate alleles, both must be observed in the genotypes + * (that is, there must be at least one of (0/1,1/1) and at least one of (0/2,1/2,2/2) in the genotype field). If + * one of the alleles has no observations in the genotype fields of the eval, the site-level concordance is + * tabulated as though that allele were not present in the record. + * + *

Monomorphic Records

+ * A site which has an alternate allele, but which is monomorphic in samples, is treated as not having been + * discovered, and will be recorded in the TRUTH_ONLY column (if a record exists in the comp VCF), or not at all + * (if no record exists in the comp VCF). + * + * That is, in the situation + * eval: ref - A alt - C genotypes - 0/0 0/0 0/0 ... 0/0 + * comp: ref - A alt - C ... 0/0 0/0 ... + * is equivalent to + * eval: ref - A alt - . genotypes - 0/0 0/0 0/0 ... 0/0 + * comp: ref - A alt - C ... 0/0 0/0 ... + * + * When a record is present in the comp VCF the *genotypes* for the monomorphic site will still be used to evaluate + * per-sample genotype concordance counts. + * + *

Filtered Records

+ * Filtered records are treated as though they were not present in the VCF, unless -ignoreSiteFilters is provided, + * in which case all records are used. There is currently no way to assess concordance metrics on filtered sites + * exclusively. SelectVariants can be used to extract filtered sites, and VariantFiltration used to un-filter them. */ @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} ) public class GenotypeConcordance extends RodWalker>,ConcordanceMetrics> { @@ -269,7 +322,7 @@ public class GenotypeConcordance extends RodWalker entry : metrics.getPerSampleGenotypeConcordance().entrySet() ) { ConcordanceMetrics.GenotypeConcordanceTable table = entry.getValue(); @@ -298,19 +352,19 @@ public class GenotypeConcordance extends RodWalker nrdEntry : metrics.getPerSampleNRD().entrySet() ) { concordanceSummary.set(nrdEntry.getKey(),"Non-Reference_Discrepancy",nrdEntry.getValue()); } + for ( Map.Entry ogcEntry : metrics.getPerSampleOGC().entrySet() ) { + concordanceSummary.set(ogcEntry.getKey(),"Overall_Genotype_Concordance",ogcEntry.getValue()); + } concordanceSummary.set("ALL_NRS_NRD","Sample","ALL"); concordanceSummary.set("ALL_NRS_NRD","Non-Reference_Sensitivity",metrics.getOverallNRS()); concordanceSummary.set("ALL_NRS_NRD","Non-Reference_Discrepancy",metrics.getOverallNRD()); + concordanceSummary.set("ALL_NRS_NRD","Overall_Genotype_Concordance",metrics.getOverallOGC()); for (ConcordanceMetrics.SiteConcordanceType type : ConcordanceMetrics.SiteConcordanceType.values() ) { @@ -408,6 +466,7 @@ public class GenotypeConcordance extends RodWalker nrdEntry : metrics.getPerSampleNRD().entrySet() ) { concordanceSummary.set(nrdEntry.getKey(),"Non-Reference Discrepancy",nrdEntry.getValue()); } + for ( Map.Entry ogcEntry : metrics.getPerSampleOGC().entrySet() ) { + concordanceSummary.set(ogcEntry.getKey(),"Overall_Genotype_Concordance",ogcEntry.getValue()); + } concordanceSummary.set("ALL","Sample","ALL"); concordanceSummary.set("ALL","Non-Reference Sensitivity",metrics.getOverallNRS()); concordanceSummary.set("ALL","Non-Reference Discrepancy",metrics.getOverallNRD()); + concordanceSummary.set("ALL","Overall_Genotype_Concordance",metrics.getOverallOGC()); for (ConcordanceMetrics.SiteConcordanceType type : ConcordanceMetrics.SiteConcordanceType.values() ) { siteConcordance.set("Comparison",type.toString(),metrics.getOverallSiteConcordance().get(type)); diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/LiftoverVariants.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/LiftoverVariants.java index 17d50f101..0e38869c6 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/LiftoverVariants.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/LiftoverVariants.java @@ -54,7 +54,11 @@ import java.io.File; import java.util.*; /** - * Lifts a VCF file over from one build to another. Note that the resulting VCF could be mis-sorted. + * Lifts a VCF file over from one build to another. + * + * Important note: the resulting VCF is not guaranteed to be valid according to the official specification. The file could + * possibly be mis-sorted and the header may not be complete. LiftoverVariants is intended to be the first of two processing steps + * for the liftover; the second step, FilterLiftedVariants, will produce a valid well-behaved VCF file. */ @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} ) public class LiftoverVariants extends RodWalker { @@ -62,7 +66,7 @@ public class LiftoverVariants extends RodWalker { @ArgumentCollection protected StandardVariantContextInputArgumentCollection variantCollection = new StandardVariantContextInputArgumentCollection(); - @Output(doc="File to which variants should be written") + @Output(doc="File to which variants should be written", required=true, defaultToStdout=false) protected File file = null; protected VariantContextWriter writer = null; diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToBinaryPed.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToBinaryPed.java index 8d16e6ca2..c414b443e 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToBinaryPed.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToBinaryPed.java @@ -176,6 +176,7 @@ public class VariantsToBinaryPed extends RodWalker { // Cut down on memory. try { File temp = File.createTempFile("VariantsToBPed_"+sample, ".tmp"); + temp.deleteOnExit(); printMap.put(sample,new PrintStream(temp)); tempFiles.put(sample,temp); } catch (IOException e) { diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToVCF.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToVCF.java index 60809134a..dbb68961f 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToVCF.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToVCF.java @@ -39,6 +39,7 @@ import org.broadinstitute.sting.gatk.refdata.utils.GATKFeature; import org.broadinstitute.sting.gatk.walkers.Reference; import org.broadinstitute.sting.gatk.walkers.RodWalker; import org.broadinstitute.sting.gatk.walkers.Window; +import org.broadinstitute.sting.gatk.walkers.annotator.VariantOverlapAnnotator; import org.broadinstitute.sting.utils.BaseUtils; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.SampleUtils; @@ -112,24 +113,21 @@ public class VariantsToVCF extends RodWalker { // for dealing with indels in hapmap CloseableIterator dbsnpIterator = null; + VariantOverlapAnnotator variantOverlapAnnotator = null; public void initialize() { vcfwriter = VariantContextWriterFactory.sortOnTheFly(baseWriter, 40, false); + variantOverlapAnnotator = new VariantOverlapAnnotator(dbsnp.dbsnp, getToolkit().getGenomeLocParser()); } public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { if ( tracker == null || !BaseUtils.isRegularBase(ref.getBase()) ) return 0; - String rsID = dbsnp == null ? null : GATKVCFUtils.rsIDOfFirstRealVariant(tracker.getValues(dbsnp.dbsnp, context.getLocation()), VariantContext.Type.SNP); - Collection contexts = getVariantContexts(tracker, ref); for ( VariantContext vc : contexts ) { VariantContextBuilder builder = new VariantContextBuilder(vc); - if ( rsID != null && vc.emptyID() ) { - builder.id(rsID).make(); - } // set the appropriate sample name if necessary if ( sampleName != null && vc.hasGenotypes() && vc.hasGenotype(variants.getName()) ) { @@ -137,7 +135,8 @@ public class VariantsToVCF extends RodWalker { builder.genotypes(g); } - writeRecord(builder.make(), tracker, ref.getLocus()); + final VariantContext withID = variantOverlapAnnotator.annotateRsID(tracker, builder.make()); + writeRecord(withID, tracker, ref.getLocus()); } return 1; diff --git a/public/java/src/org/broadinstitute/sting/tools/CatVariants.java b/public/java/src/org/broadinstitute/sting/tools/CatVariants.java index ad77b2548..b59786d15 100644 --- a/public/java/src/org/broadinstitute/sting/tools/CatVariants.java +++ b/public/java/src/org/broadinstitute/sting/tools/CatVariants.java @@ -144,15 +144,13 @@ public class CatVariants extends CommandLineProgram { BasicConfigurator.configure(); logger.setLevel(Level.INFO); - if ( ! refFile.getName().endsWith(".fasta")) { - throw new UserException("Reference file "+refFile+"name must end with .fasta"); + final ReferenceSequenceFile ref; + try { + ref = ReferenceSequenceFileFactory.getReferenceSequenceFile(refFile); + } catch ( Exception e ) { + throw new UserException("Couldn't load provided reference sequence file " + refFile, e); } - if ( ! refFile.exists() ) { - throw new UserException(String.format("Reference file %s does not exist", refFile.getAbsolutePath())); - } - - // Comparator>> comparator = new PositionComparator(); Comparator> positionComparator = new PositionComparator(); @@ -203,8 +201,6 @@ public class CatVariants extends CommandLineProgram { if (!(outputFile.getName().endsWith(".vcf") || outputFile.getName().endsWith(".VCF"))){ throw new UserException(String.format("Output file %s should be .vcf", outputFile)); } - ReferenceSequenceFile ref = ReferenceSequenceFileFactory.getReferenceSequenceFile(refFile); - FileOutputStream outputStream = new FileOutputStream(outputFile); EnumSet options = EnumSet.of(Options.INDEX_ON_THE_FLY); diff --git a/public/java/src/org/broadinstitute/sting/utils/MathUtils.java b/public/java/src/org/broadinstitute/sting/utils/MathUtils.java index 38c131bc6..07aff5983 100644 --- a/public/java/src/org/broadinstitute/sting/utils/MathUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/MathUtils.java @@ -29,8 +29,8 @@ import com.google.java.contract.Ensures; import com.google.java.contract.Requires; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; -import org.broadinstitute.sting.utils.exceptions.UserException; +import javax.annotation.Nullable; import java.math.BigDecimal; import java.util.*; @@ -54,15 +54,19 @@ public class MathUtils { private static final double JACOBIAN_LOG_TABLE_INV_STEP = 1.0 / JACOBIAN_LOG_TABLE_STEP; private static final double MAX_JACOBIAN_TOLERANCE = 8.0; private static final int JACOBIAN_LOG_TABLE_SIZE = (int) (MAX_JACOBIAN_TOLERANCE / JACOBIAN_LOG_TABLE_STEP) + 1; - private static final int MAXN = 70000; + private static final int MAXN = 70_000; private static final int LOG10_CACHE_SIZE = 4 * MAXN; // we need to be able to go up to 2*(2N) when calculating some of the coefficients /** * The smallest log10 value we'll emit from normalizeFromLog10 and other functions * where the real-space value is 0.0. */ - public final static double LOG10_P_OF_ZERO = -1000000.0; - public final static double FAIR_BINOMIAL_PROB_LOG10_0_5 = Math.log10(0.5); + public static final double LOG10_P_OF_ZERO = -1000000.0; + public static final double FAIR_BINOMIAL_PROB_LOG10_0_5 = Math.log10(0.5); + public static final double LOG_ONE_HALF = -Math.log10(2.0); + public static final double LOG_ONE_THIRD = -Math.log10(3.0); + private static final double NATURAL_LOG_OF_TEN = Math.log(10.0); + private static final double SQUARE_ROOT_OF_TWO_TIMES_PI = Math.sqrt(2.0 * Math.PI); static { log10Cache = new double[LOG10_CACHE_SIZE]; @@ -203,15 +207,16 @@ public class MathUtils { } /** - * Converts a real space array of probabilities into a log10 array + * Converts a real space array of numbers (typically probabilities) into a log10 array * * @param prRealSpace * @return */ public static double[] toLog10(final double[] prRealSpace) { double[] log10s = new double[prRealSpace.length]; - for (int i = 0; i < prRealSpace.length; i++) + for (int i = 0; i < prRealSpace.length; i++) { log10s[i] = Math.log10(prRealSpace[i]); + } return log10s; } @@ -227,6 +232,9 @@ public class MathUtils { return maxValue; for (int i = start; i < finish; i++) { + if ( Double.isNaN(log10p[i]) || log10p[i] == Double.POSITIVE_INFINITY ) { + throw new IllegalArgumentException("log10p: Values must be non-infinite and non-NAN"); + } sum += Math.pow(10.0, log10p[i] - maxValue); } @@ -235,9 +243,6 @@ public class MathUtils { public static double sumLog10(final double[] log10values) { return Math.pow(10.0, log10sumLog10(log10values)); - // double s = 0.0; - // for ( double v : log10values) s += Math.pow(10.0, v); - // return s; } public static double log10sumLog10(final double[] log10values) { @@ -301,12 +306,50 @@ public class MathUtils { return 1; } - public static double NormalDistribution(final double mean, final double sd, final double x) { + /** + * Calculate f(x) = Normal(x | mu = mean, sigma = sd) + * @param mean the desired mean of the Normal distribution + * @param sd the desired standard deviation of the Normal distribution + * @param x the value to evaluate + * @return a well-formed double + */ + public static double normalDistribution(final double mean, final double sd, final double x) { + if( sd < 0 ) + throw new IllegalArgumentException("sd: Standard deviation of normal must be >0"); + if ( ! wellFormedDouble(mean) || ! wellFormedDouble(sd) || ! wellFormedDouble(x) ) + throw new IllegalArgumentException("mean, sd, or, x : Normal parameters must be well formatted (non-INF, non-NAN)"); double a = 1.0 / (sd * Math.sqrt(2.0 * Math.PI)); double b = Math.exp(-1.0 * (Math.pow(x - mean, 2.0) / (2.0 * sd * sd))); return a * b; } + /** + * Calculate f(x) = log10 ( Normal(x | mu = mean, sigma = sd) ) + * @param mean the desired mean of the Normal distribution + * @param sd the desired standard deviation of the Normal distribution + * @param x the value to evaluate + * @return a well-formed double + */ + + public static double normalDistributionLog10(final double mean, final double sd, final double x) { + if( sd < 0 ) + throw new IllegalArgumentException("sd: Standard deviation of normal must be >0"); + if ( ! wellFormedDouble(mean) || ! wellFormedDouble(sd) || ! wellFormedDouble(x) ) + throw new IllegalArgumentException("mean, sd, or, x : Normal parameters must be well formatted (non-INF, non-NAN)"); + final double a = -1.0 * Math.log10(sd * SQUARE_ROOT_OF_TWO_TIMES_PI); + final double b = -1.0 * (square(x - mean) / (2.0 * square(sd))) / NATURAL_LOG_OF_TEN; + return a + b; + } + + /** + * Calculate f(x) = x^2 + * @param x the value to square + * @return x * x + */ + public static double square(final double x) { + return x * x; + } + /** * Calculates the log10 of the binomial coefficient. Designed to prevent * overflows even with very large numbers. @@ -323,6 +366,13 @@ public class MathUtils { * @see #binomialCoefficient(int, int) with log10 applied to result */ public static double log10BinomialCoefficient(final int n, final int k) { + if ( n < 0 ) { + throw new IllegalArgumentException("n: Must have non-negative number of trials"); + } + if ( k > n || k < 0 ) { + throw new IllegalArgumentException("k: Must have non-negative number of successes, and no more successes than number of trials"); + } + return log10Factorial(n) - log10Factorial(k) - log10Factorial(n - k); } @@ -346,6 +396,8 @@ public class MathUtils { * @see #binomialProbability(int, int, double) with log10 applied to result */ public static double log10BinomialProbability(final int n, final int k, final double log10p) { + if ( log10p > 1e-18 ) + throw new IllegalArgumentException("log10p: Log-probability must be 0 or less"); double log10OneMinusP = Math.log10(1 - Math.pow(10, log10p)); return log10BinomialCoefficient(n, k) + log10p * k + log10OneMinusP * (n - k); } @@ -364,9 +416,35 @@ public class MathUtils { return log10BinomialCoefficient(n, k) + (n * FAIR_BINOMIAL_PROB_LOG10_0_5); } + /** A memoization container for {@link #binomialCumulativeProbability(int, int, int)}. Synchronized to accomodate multithreading. */ + private static final Map BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE = + Collections.synchronizedMap(new LRUCache(10_000)); + + /** + * Primitive integer-triplet bijection into long. Returns null when the bijection function fails (in lieu of an exception), which will + * happen when: any value is negative or larger than a short. This method is optimized for speed; it is not intended to serve as a + * utility function. + */ + @Nullable + static Long fastGenerateUniqueHashFromThreeIntegers(final int one, final int two, final int three) { + if (one < 0 || two < 0 || three < 0 || Short.MAX_VALUE < one || Short.MAX_VALUE < two || Short.MAX_VALUE < three) { + return null; + } else { + long result = 0; + result += (short) one; + result <<= 16; + result += (short) two; + result <<= 16; + result += (short) three; + return result; + } + } + /** * Performs the cumulative sum of binomial probabilities, where the probability calculation is done in log space. * Assumes that the probability of a successful hit is fair (i.e. 0.5). + * + * This pure function is memoized because of its expensive BigDecimal calculations. * * @param n number of attempts for the number of hits * @param k_start start (inclusive) of the cumulant sum (over hits) @@ -377,23 +455,41 @@ public class MathUtils { if ( k_end > n ) throw new IllegalArgumentException(String.format("Value for k_end (%d) is greater than n (%d)", k_end, n)); - double cumProb = 0.0; - double prevProb; - BigDecimal probCache = BigDecimal.ZERO; - - for (int hits = k_start; hits <= k_end; hits++) { - prevProb = cumProb; - final double probability = binomialProbability(n, hits); - cumProb += probability; - if (probability > 0 && cumProb - prevProb < probability / 2) { // loss of precision - probCache = probCache.add(new BigDecimal(prevProb)); - cumProb = 0.0; - hits--; // repeat loop - // prevProb changes at start of loop - } + // Fetch cached value, if applicable. + final Long memoizationKey = fastGenerateUniqueHashFromThreeIntegers(n, k_start, k_end); + final Double memoizationCacheResult; + if (memoizationKey != null) { + memoizationCacheResult = BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE.get(memoizationKey); + } else { + memoizationCacheResult = null; } - return probCache.add(new BigDecimal(cumProb)).doubleValue(); + final double result; + if (memoizationCacheResult != null) { + result = memoizationCacheResult; + } else { + double cumProb = 0.0; + double prevProb; + BigDecimal probCache = BigDecimal.ZERO; + + for (int hits = k_start; hits <= k_end; hits++) { + prevProb = cumProb; + final double probability = binomialProbability(n, hits); + cumProb += probability; + if (probability > 0 && cumProb - prevProb < probability / 2) { // loss of precision + probCache = probCache.add(new BigDecimal(prevProb)); + cumProb = 0.0; + hits--; // repeat loop + // prevProb changes at start of loop + } + } + + result = probCache.add(new BigDecimal(cumProb)).doubleValue(); + if (memoizationKey != null) { + BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE.put(memoizationKey, result); + } + } + return result; } /** @@ -405,10 +501,20 @@ public class MathUtils { * @return */ public static double log10MultinomialCoefficient(final int n, final int[] k) { + if ( n < 0 ) + throw new IllegalArgumentException("n: Must have non-negative number of trials"); double denominator = 0.0; + int sum = 0; for (int x : k) { + if ( x < 0 ) + throw new IllegalArgumentException("x element of k: Must have non-negative observations of group"); + if ( x > n ) + throw new IllegalArgumentException("x element of k, n: Group observations must be bounded by k"); denominator += log10Factorial(x); + sum += x; } + if ( sum != n ) + throw new IllegalArgumentException("k and n: Sum of observations in multinomial must sum to total number of trials"); return log10Factorial(n) - denominator; } @@ -423,9 +529,11 @@ public class MathUtils { */ public static double log10MultinomialProbability(final int n, final int[] k, final double[] log10p) { if (log10p.length != k.length) - throw new UserException.BadArgumentValue("p and k", "Array of log10 probabilities must have the same size as the array of number of sucesses: " + log10p.length + ", " + k.length); + throw new IllegalArgumentException("p and k: Array of log10 probabilities must have the same size as the array of number of sucesses: " + log10p.length + ", " + k.length); double log10Prod = 0.0; for (int i = 0; i < log10p.length; i++) { + if ( log10p[i] > 1e-18 ) + throw new IllegalArgumentException("log10p: Log-probability must be <= 0"); log10Prod += log10p[i] * k[i]; } return log10MultinomialCoefficient(n, k) + log10Prod; @@ -468,7 +576,7 @@ public class MathUtils { */ public static double multinomialProbability(final int[] k, final double[] p) { if (p.length != k.length) - throw new UserException.BadArgumentValue("p and k", "Array of log10 probabilities must have the same size as the array of number of sucesses: " + p.length + ", " + k.length); + throw new IllegalArgumentException("p and k: Array of log10 probabilities must have the same size as the array of number of sucesses: " + p.length + ", " + k.length); int n = 0; double[] log10P = new double[p.length]; @@ -791,11 +899,8 @@ public class MathUtils { break; sum += x; i++; - //System.out.printf(" %d/%d", sum, i); } - //System.out.printf("Sum = %d, n = %d, maxI = %d, avg = %f%n", sum, i, maxI, (1.0 * sum) / i); - return (1.0 * sum) / i; } @@ -1291,7 +1396,7 @@ public class MathUtils { } /** - * Compute in a numerical correct way the quanity log10(1-x) + * Compute in a numerical correct way the quantity log10(1-x) * * Uses the approximation log10(1-x) = log10(1/x - 1) + log10(x) to avoid very quick underflow * in 1-x when x is very small diff --git a/public/java/src/org/broadinstitute/sting/utils/Utils.java b/public/java/src/org/broadinstitute/sting/utils/Utils.java index ff0ea958c..75bd6a3d1 100644 --- a/public/java/src/org/broadinstitute/sting/utils/Utils.java +++ b/public/java/src/org/broadinstitute/sting/utils/Utils.java @@ -27,6 +27,7 @@ package org.broadinstitute.sting.utils; import com.google.java.contract.Ensures; import com.google.java.contract.Requires; +import net.sf.samtools.CigarOperator; import net.sf.samtools.SAMFileHeader; import net.sf.samtools.SAMProgramRecord; import org.apache.log4j.Logger; @@ -294,7 +295,7 @@ public class Utils { */ public static String dupString(final String s, int nCopies) { if ( s == null || s.equals("") ) throw new IllegalArgumentException("Bad s " + s); - if ( nCopies < 1 ) throw new IllegalArgumentException("nCopies must be >= 1 but got " + nCopies); + if ( nCopies < 0 ) throw new IllegalArgumentException("nCopies must be >= 0 but got " + nCopies); final StringBuilder b = new StringBuilder(); for ( int i = 0; i < nCopies; i++ ) @@ -682,6 +683,36 @@ public class Utils { return denom == 0 ? "NA" : String.format("%.2f", num / (1.0 * denom)); } + /** + * Adds element from an array into a collection. + * + * In the event of exception being throw due to some element, dest might have been modified by + * the successful addition of element before that one. + * + * @param dest the destination collection which cannot be null and should be able to accept + * the input elements. + * @param elements the element to add to dest + * @param collection type element. + * @throws UnsupportedOperationException if the add operation + * is not supported by dest. + * @throws ClassCastException if the class of any of the elements + * prevents it from being added to dest. + * @throws NullPointerException if any of the elements is null and dest + * does not permit null elements + * @throws IllegalArgumentException if some property of any of the elements + * prevents it from being added to this collection + * @throws IllegalStateException if any of the elements cannot be added at this + * time due to insertion restrictions. + * @return true if the collection was modified as a result. + */ + public static boolean addAll(Collection dest, T ... elements) { + boolean result = false; + for (final T e : elements) { + result = dest.add(e) | result; + } + return result; + } + /** * Create a constant map that maps each value in values to itself */ @@ -748,4 +779,60 @@ public class Utils { if ( suffix == null ) throw new IllegalArgumentException("suffix cannot be null"); return new String(big).endsWith(new String(suffix)); } + + /** + * Get the length of the longest common prefix of seq1 and seq2 + * @param seq1 non-null byte array + * @param seq2 non-null byte array + * @param maxLength the maximum allowed length to return + * @return the length of the longest common prefix of seq1 and seq2, >= 0 + */ + public static int longestCommonPrefix(final byte[] seq1, final byte[] seq2, final int maxLength) { + if ( seq1 == null ) throw new IllegalArgumentException("seq1 is null"); + if ( seq2 == null ) throw new IllegalArgumentException("seq2 is null"); + if ( maxLength < 0 ) throw new IllegalArgumentException("maxLength < 0 " + maxLength); + + final int end = Math.min(seq1.length, Math.min(seq2.length, maxLength)); + for ( int i = 0; i < end; i++ ) { + if ( seq1[i] != seq2[i] ) + return i; + } + return end; + } + + /** + * Get the length of the longest common suffix of seq1 and seq2 + * @param seq1 non-null byte array + * @param seq2 non-null byte array + * @param maxLength the maximum allowed length to return + * @return the length of the longest common suffix of seq1 and seq2, >= 0 + */ + public static int longestCommonSuffix(final byte[] seq1, final byte[] seq2, final int maxLength) { + if ( seq1 == null ) throw new IllegalArgumentException("seq1 is null"); + if ( seq2 == null ) throw new IllegalArgumentException("seq2 is null"); + if ( maxLength < 0 ) throw new IllegalArgumentException("maxLength < 0 " + maxLength); + + final int end = Math.min(seq1.length, Math.min(seq2.length, maxLength)); + for ( int i = 0; i < end; i++ ) { + if ( seq1[seq1.length - i - 1] != seq2[seq2.length - i - 1] ) + return i; + } + return end; + } + + /** + * Trim any number of bases from the front and/or back of an array + * + * @param seq the sequence to trim + * @param trimFromFront how much to trim from the front + * @param trimFromBack how much to trim from the back + * @return a non-null array; can be the original array (i.e. not a copy) + */ + public static byte[] trimArray(final byte[] seq, final int trimFromFront, final int trimFromBack) { + if ( trimFromFront + trimFromBack > seq.length ) + throw new IllegalArgumentException("trimming total is larger than the original array"); + + // don't perform array copies if we need to copy everything anyways + return ( trimFromFront == 0 && trimFromBack == 0 ) ? seq : Arrays.copyOfRange(seq, trimFromFront, seq.length - trimFromBack); + } } diff --git a/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java b/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java index 2f4c1b55d..7f2fe6833 100644 --- a/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java +++ b/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java @@ -336,13 +336,17 @@ public class ActiveRegion implements HasGenomeLocation { /** * Remove all of the reads in readsToRemove from this active region - * @param readsToRemove the collection of reads we want to remove + * @param readsToRemove the set of reads we want to remove */ - public void removeAll( final Collection readsToRemove ) { - reads.removeAll(readsToRemove); + public void removeAll( final Set readsToRemove ) { + final Iterator it = reads.iterator(); spanIncludingReads = extendedLoc; - for ( final GATKSAMRecord read : reads ) { - spanIncludingReads = spanIncludingReads.union( genomeLocParser.createGenomeLoc(read) ); + while ( it.hasNext() ) { + final GATKSAMRecord read = it.next(); + if ( readsToRemove.contains(read) ) + it.remove(); + else + spanIncludingReads = spanIncludingReads.union( genomeLocParser.createGenomeLoc(read) ); } } diff --git a/public/java/src/org/broadinstitute/sting/utils/activeregion/BandPassActivityProfile.java b/public/java/src/org/broadinstitute/sting/utils/activeregion/BandPassActivityProfile.java index f2bc86dfc..f352bc332 100644 --- a/public/java/src/org/broadinstitute/sting/utils/activeregion/BandPassActivityProfile.java +++ b/public/java/src/org/broadinstitute/sting/utils/activeregion/BandPassActivityProfile.java @@ -31,7 +31,6 @@ import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.GenomeLocSortedSet; import org.broadinstitute.sting.utils.MathUtils; -import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; @@ -108,7 +107,7 @@ public class BandPassActivityProfile extends ActivityProfile { final int bandSize = 2 * filterSize + 1; final double[] kernel = new double[bandSize]; for( int iii = 0; iii < bandSize; iii++ ) { - kernel[iii] = MathUtils.NormalDistribution(filterSize, sigma, iii); + kernel[iii] = MathUtils.normalDistribution(filterSize, sigma, iii); } return MathUtils.normalizeFromRealSpace(kernel); } diff --git a/public/java/src/org/broadinstitute/sting/utils/classloader/JVMUtils.java b/public/java/src/org/broadinstitute/sting/utils/classloader/JVMUtils.java index 2f5115dfa..8f4958f6c 100644 --- a/public/java/src/org/broadinstitute/sting/utils/classloader/JVMUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/classloader/JVMUtils.java @@ -248,4 +248,62 @@ public class JVMUtils { interfaces.add(interfaceClass.getSimpleName()); return Utils.join(", ", interfaces); } + + /** + * Returns the Class that invoked the specified "callee" class by examining the runtime stack. + * The calling class is defined as the first class below the callee class on the stack. + * + * For example, given callee == MyClass and the following runtime stack: + * + * JVMUtils.getCallingClass(MyClass) <-- top + * MyClass.foo() + * MyClass.bar() + * OtherClass.foo() + * OtherClass.bar() + * etc. + * + * this method would return OtherClass, since its methods invoked the methods in MyClass. + * + * Considers only the occurrence of the callee class on the stack that is closest to the top + * (even if there are multiple, non-contiguous occurrences). + * + * @param callee Class object for the class whose calling class we want to locate + * @return Class object for the class that invoked the callee class, or null if + * no calling class was found + * @throws IllegalArgumentException if the callee class is not found on the runtime stack + * @throws IllegalStateException if we get an error while trying to load the Class object for the calling + * class reported on the runtime stack + */ + public static Class getCallingClass( final Class callee ) { + final StackTraceElement[] stackTrace = new Throwable().getStackTrace(); + final String calleeClassName = callee.getName(); + + // Start examining the stack at the second-from-the-top position, to remove + // this method call (ie., the call to getCallingClass() itself) from consideration. + int stackTraceIndex = 1; + + // Find the first occurrence of the callee on the runtime stack. Need to use String comparison + // unfortunately, due to limitations of the StackTraceElement class. + while ( stackTraceIndex < stackTrace.length && ! stackTrace[stackTraceIndex].getClassName().equals(calleeClassName) ) { + stackTraceIndex++; + } + + // Make sure we actually found the callee class on the stack + if ( stackTraceIndex == stackTrace.length ) { + throw new IllegalArgumentException(String.format("Specified callee %s is not present on the call stack", callee.getSimpleName())); + } + + // Now find the caller class, which will be the class below the callee on the stack + while ( stackTraceIndex < stackTrace.length && stackTrace[stackTraceIndex].getClassName().equals(calleeClassName) ) { + stackTraceIndex++; + } + + try { + return stackTraceIndex < stackTrace.length ? Class.forName(stackTrace[stackTraceIndex].getClassName()) : null; + } + catch ( ClassNotFoundException e ) { + throw new IllegalStateException(String.format("Could not find caller class %s from the runtime stack in the classpath", + stackTrace[stackTraceIndex].getClassName())); + } + } } diff --git a/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java b/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java index f51881e0b..836c16a7e 100644 --- a/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java +++ b/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java @@ -35,6 +35,7 @@ import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import java.util.Iterator; +import java.util.List; import java.util.Stack; import java.util.Vector; @@ -193,9 +194,17 @@ public class ClippingOp { unclippedCigar.add(new CigarElement(matchesCount, CigarOperator.MATCH_OR_MISMATCH)); unclipped.setCigar(unclippedCigar); - unclipped.setAlignmentStart(read.getAlignmentStart() + calculateAlignmentStartShift(read.getCigar(), unclippedCigar)); + final int newStart = read.getAlignmentStart() + calculateAlignmentStartShift(read.getCigar(), unclippedCigar); + unclipped.setAlignmentStart(newStart); - return unclipped; + if ( newStart <= 0 ) { + // if the start of the unclipped read occurs before the contig, + // we must hard clip away the bases since we cannot represent reads with + // negative or 0 alignment start values in the SAMRecord (e.g., 0 means unaligned) + return hardClip(unclipped, 0, - newStart); + } else { + return unclipped; + } } /** @@ -334,7 +343,24 @@ public class ClippingOp { return newCigar; } - @Requires({"start <= stop", "start == 0 || stop == read.getReadLength() - 1"}) + /** + * Hard clip bases from read, from start to stop in base coordinates + * + * If start == 0, then we will clip from the front of the read, otherwise we clip + * from the right. If start == 0 and stop == 10, this would clip out the first + * 10 bases of the read. + * + * Note that this function works with reads with negative alignment starts, in order to + * allow us to hardClip reads that have had their soft clips reverted and so might have + * negative alignment starts + * + * Works properly with reduced reads and insertion/deletion base qualities + * + * @param read a non-null read + * @param start a start >= 0 and < read.length + * @param stop a stop >= 0 and < read.length. + * @return a cloned version of read that has been properly trimmed down + */ private GATKSAMRecord hardClip(GATKSAMRecord read, int start, int stop) { final int firstBaseAfterSoftClips = read.getAlignmentStart() - read.getSoftStart(); final int lastBaseBeforeSoftClips = read.getSoftEnd() - read.getSoftStart(); @@ -342,7 +368,6 @@ public class ClippingOp { if (start == firstBaseAfterSoftClips && stop == lastBaseBeforeSoftClips) // note that if the read has no soft clips, these constants will be 0 and read length - 1 (beauty of math). return GATKSAMRecord.emptyRead(read); - // If the read is unmapped there is no Cigar string and neither should we create a new cigar string CigarShift cigarShift = (read.getReadUnmappedFlag()) ? new CigarShift(new Cigar(), 0, 0) : hardClipCigar(read.getCigar(), start, stop); @@ -356,7 +381,7 @@ public class ClippingOp { System.arraycopy(read.getReadBases(), copyStart, newBases, 0, newLength); System.arraycopy(read.getBaseQualities(), copyStart, newQuals, 0, newLength); - GATKSAMRecord hardClippedRead; + final GATKSAMRecord hardClippedRead; try { hardClippedRead = (GATKSAMRecord) read.clone(); } catch (CloneNotSupportedException e) { @@ -559,26 +584,34 @@ public class ClippingOp { return new CigarShift(cleanCigar, shiftFromStart, shiftFromEnd); } + /** + * Compute the offset of the first "real" position in the cigar on the genome + * + * This is defined as a first position after a run of Hs followed by a run of Ss + * + * @param cigar A non-null cigar + * @return the offset (from 0) of the first on-genome base + */ + private int calcHardSoftOffset(final Cigar cigar) { + final List elements = cigar.getCigarElements(); + + int size = 0; + int i = 0; + while ( i < elements.size() && elements.get(i).getOperator() == CigarOperator.HARD_CLIP ) { + size += elements.get(i).getLength(); + i++; + } + while ( i < elements.size() && elements.get(i).getOperator() == CigarOperator.SOFT_CLIP ) { + size += elements.get(i).getLength(); + i++; + } + + return size; + } + private int calculateAlignmentStartShift(Cigar oldCigar, Cigar newCigar) { - int newShift = 0; - int oldShift = 0; - - boolean readHasStarted = false; // if the new cigar is composed of S and H only, we have to traverse the entire old cigar to calculate the shift - for (CigarElement cigarElement : newCigar.getCigarElements()) { - if (cigarElement.getOperator() == CigarOperator.HARD_CLIP || cigarElement.getOperator() == CigarOperator.SOFT_CLIP) - newShift += cigarElement.getLength(); - else { - readHasStarted = true; - break; - } - } - - for (CigarElement cigarElement : oldCigar.getCigarElements()) { - if (cigarElement.getOperator() == CigarOperator.HARD_CLIP || cigarElement.getOperator() == CigarOperator.SOFT_CLIP) - oldShift += cigarElement.getLength(); - else if (readHasStarted) - break; - } + final int newShift = calcHardSoftOffset(newCigar); + final int oldShift = calcHardSoftOffset(oldCigar); return newShift - oldShift; } diff --git a/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java b/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java index 3abe5a7f4..6126116c2 100644 --- a/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java +++ b/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java @@ -25,6 +25,7 @@ package org.broadinstitute.sting.utils.exceptions; +import net.sf.samtools.CigarOperator; import net.sf.samtools.SAMFileHeader; import net.sf.samtools.SAMRecord; import net.sf.samtools.SAMSequenceDictionary; @@ -87,6 +88,19 @@ public class UserException extends ReviewedStingException { } } + public static class UnsupportedCigarOperatorException extends UserException { + public UnsupportedCigarOperatorException(final CigarOperator co, final SAMRecord read, final String message) { + super(String.format( + "Unsupported CIGAR operator %s in read %s at %s:%d. %s", + co, + read.getReadName(), + read.getReferenceName(), + read.getAlignmentStart(), + message)); + } + } + + public static class MalformedGenomeLoc extends UserException { public MalformedGenomeLoc(String message, GenomeLoc loc) { super(String.format("Badly formed genome loc: %s: %s", message, loc)); @@ -457,4 +471,10 @@ public class UserException extends ReviewedStingException { super(message,innerException); } } + + public static class IncompatibleRecalibrationTableParameters extends UserException { + public IncompatibleRecalibrationTableParameters(String s) { + super(s); + } + } } diff --git a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java index 150e24c51..70be85f54 100644 --- a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java +++ b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java @@ -42,13 +42,13 @@ import java.util.*; * For each read, this holds underlying alleles represented by an aligned read, and corresponding relative likelihood. */ public class PerReadAlleleLikelihoodMap { - protected final List alleles; - protected final Map> likelihoodReadMap; + /** A set of all of the allele, so we can efficiently determine if an allele is already present */ + private final Set allelesSet = new HashSet<>(); + /** A list of the unique allele, as an ArrayList so we can call get(i) efficiently */ + protected final List alleles = new ArrayList<>(); + protected final Map> likelihoodReadMap = new LinkedHashMap<>(); - public PerReadAlleleLikelihoodMap() { - likelihoodReadMap = new LinkedHashMap>(); - alleles = new ArrayList(); - } + public PerReadAlleleLikelihoodMap() { } /** * Add a new entry into the Read -> ( Allele -> Likelihood ) map of maps. @@ -61,18 +61,20 @@ public class PerReadAlleleLikelihoodMap { if ( a == null ) throw new IllegalArgumentException("Cannot add a null allele to the allele likelihood map"); if ( likelihood == null ) throw new IllegalArgumentException("Likelihood cannot be null"); if ( likelihood > 0.0 ) throw new IllegalArgumentException("Likelihood must be negative (L = log(p))"); + Map likelihoodMap = likelihoodReadMap.get(read); if (likelihoodMap == null){ // LinkedHashMap will ensure iterating through alleles will be in consistent order - likelihoodMap = new LinkedHashMap(); + likelihoodMap = new LinkedHashMap<>(); + likelihoodReadMap.put(read,likelihoodMap); } - likelihoodReadMap.put(read,likelihoodMap); likelihoodMap.put(a,likelihood); - if (!alleles.contains(a)) + if (!allelesSet.contains(a)) { + allelesSet.add(a); alleles.add(a); - + } } public ReadBackedPileup createPerAlleleDownsampledBasePileup(final ReadBackedPileup pileup, final double downsamplingFraction) { @@ -165,6 +167,7 @@ public class PerReadAlleleLikelihoodMap { } public void clear() { + allelesSet.clear(); alleles.clear(); likelihoodReadMap.clear(); } @@ -218,7 +221,7 @@ public class PerReadAlleleLikelihoodMap { final int count = ReadUtils.getMeanRepresentativeReadCount(read); final double likelihood_iii = entry.getValue().get(iii_allele); final double likelihood_jjj = entry.getValue().get(jjj_allele); - haplotypeLikelihood += count * (MathUtils.approximateLog10SumLog10(likelihood_iii, likelihood_jjj) + LOG_ONE_HALF); + haplotypeLikelihood += count * (MathUtils.approximateLog10SumLog10(likelihood_iii, likelihood_jjj) + MathUtils.LOG_ONE_HALF); // fast exit. If this diploid pair is already worse than the max, just stop and look at the next pair if ( haplotypeLikelihood < maxElement ) break; @@ -238,7 +241,6 @@ public class PerReadAlleleLikelihoodMap { return new MostLikelyAllele(alleles.get(hap1), alleles.get(hap2), maxElement, maxElement); } - private static final double LOG_ONE_HALF = -Math.log10(2.0); /** * Given a map from alleles to likelihoods, find the allele with the largest likelihood. @@ -319,7 +321,7 @@ public class PerReadAlleleLikelihoodMap { * @return the list of reads removed from this map because they are poorly modelled */ public List filterPoorlyModelledReads(final double maxErrorRatePerBase) { - final List removedReads = new LinkedList(); + final List removedReads = new LinkedList<>(); final Iterator>> it = likelihoodReadMap.entrySet().iterator(); while ( it.hasNext() ) { final Map.Entry> record = it.next(); @@ -354,8 +356,8 @@ public class PerReadAlleleLikelihoodMap { * @return true if none of the log10 likelihoods imply that the read truly originated from one of the haplotypes */ protected boolean readIsPoorlyModelled(final GATKSAMRecord read, final Collection log10Likelihoods, final double maxErrorRatePerBase) { - final double maxErrorsForRead = Math.ceil(read.getReadLength() * maxErrorRatePerBase); - final double log10QualPerBase = -3.0; + final double maxErrorsForRead = Math.min(2.0, Math.ceil(read.getReadLength() * maxErrorRatePerBase)); + final double log10QualPerBase = -4.0; final double log10MaxLikelihoodForTrueAllele = maxErrorsForRead * log10QualPerBase; for ( final double log10Likelihood : log10Likelihoods ) @@ -364,4 +366,12 @@ public class PerReadAlleleLikelihoodMap { return true; } + + /** + * Get an unmodifiable set of the unique alleles in this PerReadAlleleLikelihoodMap + * @return a non-null unmodifiable map + */ + public Set getAllelesSet() { + return Collections.unmodifiableSet(allelesSet); + } } diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java index bacee7942..1f932b222 100644 --- a/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java @@ -46,7 +46,6 @@ public class Haplotype extends Allele { private EventMap eventMap = null; private Cigar cigar; private int alignmentStartHapwrtRef; - private Event artificialEvent = null; private double score = 0; /** @@ -93,11 +92,6 @@ public class Haplotype extends Allele { super(allele, true); } - protected Haplotype( final byte[] bases, final Event artificialEvent ) { - this(bases, false); - this.artificialEvent = artificialEvent; - } - public Haplotype( final byte[] bases, final GenomeLoc loc ) { this(bases, false); this.genomeLocation = loc; @@ -189,7 +183,7 @@ public class Haplotype extends Allele { } /** - * Get the cigar for this haplotype. Note that cigar is guarenteed to be consolidated + * Get the cigar for this haplotype. Note that the cigar is guaranteed to be consolidated * in that multiple adjacent equal operates will have been merged * @return the cigar of this haplotype */ @@ -223,30 +217,6 @@ public class Haplotype extends Allele { throw new IllegalArgumentException("Read length " + length() + " not equal to the read length of the cigar " + cigar.getReadLength()); } - public boolean isArtificialHaplotype() { - return artificialEvent != null; - } - - public Event getArtificialEvent() { - return artificialEvent; - } - - public Allele getArtificialRefAllele() { - return artificialEvent.ref; - } - - public Allele getArtificialAltAllele() { - return artificialEvent.alt; - } - - public int getArtificialAllelePosition() { - return artificialEvent.pos; - } - - public void setArtificialEvent( final Event artificialEvent ) { - this.artificialEvent = artificialEvent; - } - @Requires({"refInsertLocation >= 0"}) public Haplotype insertAllele( final Allele refAllele, final Allele altAllele, final int refInsertLocation, final int genomicInsertLocation ) { // refInsertLocation is in ref haplotype offset coordinates NOT genomic coordinates @@ -260,7 +230,7 @@ public class Haplotype extends Allele { newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(myBases, 0, haplotypeInsertLocation)); // bases before the variant newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, altAllele.getBases()); // the alt allele of the variant newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(myBases, haplotypeInsertLocation + refAllele.length(), myBases.length)); // bases after the variant - return new Haplotype(newHaplotypeBases, new Event(refAllele, altAllele, genomicInsertLocation)); + return new Haplotype(newHaplotypeBases); } public static LinkedHashMap makeHaplotypeListFromAlleles(final List alleleList, diff --git a/public/java/src/org/broadinstitute/sting/utils/help/DocletUtils.java b/public/java/src/org/broadinstitute/sting/utils/help/DocletUtils.java new file mode 100644 index 000000000..1e9a37cb7 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/help/DocletUtils.java @@ -0,0 +1,76 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.help; + +import com.sun.javadoc.FieldDoc; +import com.sun.javadoc.PackageDoc; +import com.sun.javadoc.ProgramElementDoc; +import org.broadinstitute.sting.utils.classloader.JVMUtils; + +import java.lang.reflect.Field; + +/** + * Methods in the class must ONLY be used by doclets, since the com.sun.javadoc.* classes are not + * available on all systems, and we don't want the GATK proper to depend on them. + */ +public class DocletUtils { + + protected static boolean assignableToClass(ProgramElementDoc classDoc, Class lhsClass, boolean requireConcrete) { + try { + Class type = getClassForDoc(classDoc); + return lhsClass.isAssignableFrom(type) && (!requireConcrete || JVMUtils.isConcrete(type)); + } catch (Throwable t) { + // Ignore errors. + return false; + } + } + + protected static Class getClassForDoc(ProgramElementDoc doc) throws ClassNotFoundException { + return Class.forName(getClassName(doc)); + } + + protected static Field getFieldForFieldDoc(FieldDoc fieldDoc) { + try { + Class clazz = getClassForDoc(fieldDoc.containingClass()); + return JVMUtils.findField(clazz, fieldDoc.name()); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + + /** + * Reconstitute the class name from the given class JavaDoc object. + * + * @param doc the Javadoc model for the given class. + * @return The (string) class name of the given class. + */ + protected static String getClassName(ProgramElementDoc doc) { + PackageDoc containingPackage = doc.containingPackage(); + return containingPackage.name().length() > 0 ? + String.format("%s.%s", containingPackage.name(), doc.name()) : + String.format("%s", doc.name()); + } +} \ No newline at end of file diff --git a/public/java/src/org/broadinstitute/sting/utils/help/GATKDoclet.java b/public/java/src/org/broadinstitute/sting/utils/help/GATKDoclet.java index 677bbf2e5..63cb0900a 100644 --- a/public/java/src/org/broadinstitute/sting/utils/help/GATKDoclet.java +++ b/public/java/src/org/broadinstitute/sting/utils/help/GATKDoclet.java @@ -352,7 +352,7 @@ public class GATKDoclet { private Class getClassForClassDoc(ClassDoc doc) { try { // todo -- what do I need the ? extends Object to pass the compiler? - return (Class) HelpUtils.getClassForDoc(doc); + return (Class) DocletUtils.getClassForDoc(doc); } catch (ClassNotFoundException e) { //logger.warn("Couldn't find class for ClassDoc " + doc); // we got a classdoc for a class we can't find. Maybe in a library or something diff --git a/public/java/src/org/broadinstitute/sting/utils/help/GenericDocumentationHandler.java b/public/java/src/org/broadinstitute/sting/utils/help/GenericDocumentationHandler.java index 1711a3923..02c269495 100644 --- a/public/java/src/org/broadinstitute/sting/utils/help/GenericDocumentationHandler.java +++ b/public/java/src/org/broadinstitute/sting/utils/help/GenericDocumentationHandler.java @@ -68,7 +68,7 @@ public class GenericDocumentationHandler extends DocumentedGATKFeatureHandler { @Override public boolean includeInDocs(ClassDoc doc) { try { - Class type = HelpUtils.getClassForDoc(doc); + Class type = DocletUtils.getClassForDoc(doc); boolean hidden = !getDoclet().showHiddenFeatures() && type.isAnnotationPresent(Hidden.class); return !hidden && JVMUtils.isConcrete(type); } catch (ClassNotFoundException e) { @@ -157,7 +157,7 @@ public class GenericDocumentationHandler extends DocumentedGATKFeatureHandler { root.put("arguments", args); try { // loop over all of the arguments according to the parsing engine - for (final ArgumentSource argumentSource : parsingEngine.extractArgumentSources(HelpUtils.getClassForDoc(toProcess.classDoc))) { + for (final ArgumentSource argumentSource : parsingEngine.extractArgumentSources(DocletUtils.getClassForDoc(toProcess.classDoc))) { // todo -- why can you have multiple ones? ArgumentDefinition argDef = argumentSource.createArgumentDefinitions().get(0); FieldDoc fieldDoc = getFieldDoc(toProcess.classDoc, argumentSource.field.getName()); @@ -663,7 +663,7 @@ public class GenericDocumentationHandler extends DocumentedGATKFeatureHandler { if (fieldDoc.name().equals(name)) return fieldDoc; - Field field = HelpUtils.getFieldForFieldDoc(fieldDoc); + Field field = DocletUtils.getFieldForFieldDoc(fieldDoc); if (field == null) throw new RuntimeException("Could not find the field corresponding to " + fieldDoc + ", presumably because the field is inaccessible"); if (field.isAnnotationPresent(ArgumentCollection.class)) { diff --git a/public/java/src/org/broadinstitute/sting/utils/help/HelpUtils.java b/public/java/src/org/broadinstitute/sting/utils/help/HelpUtils.java index 9a23fd022..74516672d 100644 --- a/public/java/src/org/broadinstitute/sting/utils/help/HelpUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/help/HelpUtils.java @@ -25,57 +25,20 @@ package org.broadinstitute.sting.utils.help; -import com.sun.javadoc.FieldDoc; -import com.sun.javadoc.PackageDoc; -import com.sun.javadoc.ProgramElementDoc; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.AnnotationType; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.GenotypeAnnotation; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.InfoFieldAnnotation; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; -import org.broadinstitute.sting.utils.classloader.JVMUtils; import org.broadinstitute.sting.utils.classloader.PluginManager; -import java.lang.reflect.Field; import java.util.List; +/** + * NON-javadoc/doclet help-related utility methods should go here. Anything with a com.sun.javadoc.* dependency + * should go into DocletUtils for use only by doclets. + */ public class HelpUtils { - protected static boolean assignableToClass(ProgramElementDoc classDoc, Class lhsClass, boolean requireConcrete) { - try { - Class type = getClassForDoc(classDoc); - return lhsClass.isAssignableFrom(type) && (!requireConcrete || JVMUtils.isConcrete(type)); - } catch (Throwable t) { - // Ignore errors. - return false; - } - } - - protected static Class getClassForDoc(ProgramElementDoc doc) throws ClassNotFoundException { - return Class.forName(getClassName(doc)); - } - - protected static Field getFieldForFieldDoc(FieldDoc fieldDoc) { - try { - Class clazz = getClassForDoc(fieldDoc.containingClass()); - return JVMUtils.findField(clazz, fieldDoc.name()); - } catch (ClassNotFoundException e) { - throw new RuntimeException(e); - } - } - - /** - * Reconstitute the class name from the given class JavaDoc object. - * - * @param doc the Javadoc model for the given class. - * @return The (string) class name of the given class. - */ - protected static String getClassName(ProgramElementDoc doc) { - PackageDoc containingPackage = doc.containingPackage(); - return containingPackage.name().length() > 0 ? - String.format("%s.%s", containingPackage.name(), doc.name()) : - String.format("%s", doc.name()); - } - /** * Simple method to print a list of available annotations. */ @@ -98,5 +61,4 @@ public class HelpUtils { System.out.println("\t" + c.getSimpleName()); System.out.println(); } - -} \ No newline at end of file +} diff --git a/public/java/src/org/broadinstitute/sting/utils/help/ResourceBundleExtractorDoclet.java b/public/java/src/org/broadinstitute/sting/utils/help/ResourceBundleExtractorDoclet.java index 0f2383b4b..ac85d7aff 100644 --- a/public/java/src/org/broadinstitute/sting/utils/help/ResourceBundleExtractorDoclet.java +++ b/public/java/src/org/broadinstitute/sting/utils/help/ResourceBundleExtractorDoclet.java @@ -108,7 +108,7 @@ public class ResourceBundleExtractorDoclet { if(isRequiredJavadocMissing(currentClass) && isWalker(currentClass)) undocumentedWalkers.add(currentClass.name()); - renderHelpText(HelpUtils.getClassName(currentClass),currentClass); + renderHelpText(DocletUtils.getClassName(currentClass),currentClass); } for(PackageDoc currentPackage: packages) @@ -173,7 +173,7 @@ public class ResourceBundleExtractorDoclet { * @return True if the class of the given name is a walker. False otherwise. */ protected static boolean isWalker(ClassDoc classDoc) { - return HelpUtils.assignableToClass(classDoc, Walker.class, true); + return DocletUtils.assignableToClass(classDoc, Walker.class, true); } /** diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java index c4b566582..86f3500be 100644 --- a/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java @@ -123,6 +123,15 @@ public class AlignmentStateMachine { return getRead().getReferenceIndex(); } + /** + * Is our read a reduced read? + * + * @return true if the read we encapsulate is a reduced read, otherwise false + */ + public boolean isReducedRead() { + return read.isReducedRead(); + } + /** * Is this the left edge state? I.e., one that is before or after the current read? * @return true if this state is an edge state, false otherwise diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java index 2caaf9d27..669e76adc 100644 --- a/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java @@ -167,7 +167,7 @@ final class PerSampleReadStateManager implements Iterable // use returned List directly rather than make a copy, for efficiency's sake readStatesByAlignmentStart = flattenByAlignmentStart(levelingDownsampler.consumeFinalizedItems()); - levelingDownsampler.reset(); + levelingDownsampler.resetStats(); } return nStatesAdded; diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java index 49a8d10aa..9122beebb 100644 --- a/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java @@ -164,8 +164,8 @@ class SamplePartitioner { @Ensures("doneSubmittingReads == false") public void reset() { for ( final Downsampler downsampler : readsBySample.values() ) { - downsampler.clear(); - downsampler.reset(); + downsampler.clearItems(); + downsampler.resetStats(); } doneSubmittingReads = false; } diff --git a/public/java/src/org/broadinstitute/sting/utils/pairhmm/Log10PairHMM.java b/public/java/src/org/broadinstitute/sting/utils/pairhmm/Log10PairHMM.java index ab6c321e8..ddc1a4559 100644 --- a/public/java/src/org/broadinstitute/sting/utils/pairhmm/Log10PairHMM.java +++ b/public/java/src/org/broadinstitute/sting/utils/pairhmm/Log10PairHMM.java @@ -38,7 +38,7 @@ import java.util.Arrays; * User: rpoplin, carneiro * Date: 3/1/12 */ -public final class Log10PairHMM extends PairHMM { +public final class Log10PairHMM extends N2MemoryPairHMM { /** * Should we use exact log10 calculation (true), or an approximation (false)? */ diff --git a/public/java/src/org/broadinstitute/sting/utils/pairhmm/N2MemoryPairHMM.java b/public/java/src/org/broadinstitute/sting/utils/pairhmm/N2MemoryPairHMM.java new file mode 100644 index 000000000..a091a0716 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/pairhmm/N2MemoryPairHMM.java @@ -0,0 +1,91 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.pairhmm; + +import com.google.java.contract.Requires; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.utils.MathUtils; + +import java.util.Arrays; + +/** + * Superclass for PairHMM that want to use a full read x haplotype matrix for their match, insertion, and deletion matrix + * + * User: rpoplin + * Date: 10/16/12 + */ +abstract class N2MemoryPairHMM extends PairHMM { + protected double[][] transition = null; // The transition probabilities cache + protected double[][] prior = null; // The prior probabilities cache + protected double[][] matchMatrix = null; + protected double[][] insertionMatrix = null; + protected double[][] deletionMatrix = null; + + /** + * Initialize this PairHMM, making it suitable to run against a read and haplotype with given lengths + * + * Note: Do not worry about padding, just provide the true max length of the read and haplotype. The HMM will take care of the padding. + * + * @param haplotypeMaxLength the max length of haplotypes we want to use with this PairHMM + * @param readMaxLength the max length of reads we want to use with this PairHMM + */ + public void initialize( final int readMaxLength, final int haplotypeMaxLength ) { + super.initialize(readMaxLength, haplotypeMaxLength); + + matchMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; + insertionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; + deletionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; + } + + /** + * Print out the core hmm matrices for debugging + */ + protected void dumpMatrices() { + dumpMatrix("matchMetricArray", matchMatrix); + dumpMatrix("insertionMatrix", insertionMatrix); + dumpMatrix("deletionMatrix", deletionMatrix); + } + + /** + * Print out in a human readable form the matrix for debugging + * @param name the name of this matrix + * @param matrix the matrix of values + */ + @Requires({"name != null", "matrix != null"}) + private void dumpMatrix(final String name, final double[][] matrix) { + System.out.printf("%s%n", name); + for ( int i = 0; i < matrix.length; i++) { + System.out.printf("\t%s[%d]", name, i); + for ( int j = 0; j < matrix[i].length; j++ ) { + if ( Double.isInfinite(matrix[i][j]) ) + System.out.printf(" %15s", String.format("%f", matrix[i][j])); + else + System.out.printf(" % 15.5e", matrix[i][j]); + } + System.out.println(); + } + } +} diff --git a/public/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMM.java b/public/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMM.java index 6b57a1354..85ac97f95 100644 --- a/public/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMM.java +++ b/public/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMM.java @@ -40,8 +40,6 @@ import java.util.Arrays; public abstract class PairHMM { protected final static Logger logger = Logger.getLogger(PairHMM.class); - protected double[][] transition = null; // The transition probabilities cache - protected double[][] prior = null; // The prior probabilities cache protected boolean constantsAreInitialized = false; protected byte[] previousHaplotypeBases; @@ -52,12 +50,9 @@ public abstract class PairHMM { /* PairHMM as implemented for the UnifiedGenotyper. Uses log10 sum functions accurate to only 1E-4 */ ORIGINAL, /* Optimized version of the PairHMM which caches per-read computations and operations in real space to avoid costly sums of log10'ed likelihoods */ - LOGLESS_CACHING + LOGLESS_CACHING, } - protected double[][] matchMatrix = null; - protected double[][] insertionMatrix = null; - protected double[][] deletionMatrix = null; protected int maxHaplotypeLength, maxReadLength; protected int paddedMaxReadLength, paddedMaxHaplotypeLength; protected int paddedReadLength, paddedHaplotypeLength; @@ -82,18 +77,12 @@ public abstract class PairHMM { paddedMaxReadLength = readMaxLength + 1; paddedMaxHaplotypeLength = haplotypeMaxLength + 1; - matchMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; - insertionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; - deletionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; - previousHaplotypeBases = null; constantsAreInitialized = false; initialized = true; } - - /** * Compute the total probability of read arising from haplotypeBases given base substitution, insertion, and deletion * probabilities. @@ -152,44 +141,15 @@ public abstract class PairHMM { * To be overloaded by subclasses to actually do calculation for #computeReadLikelihoodGivenHaplotypeLog10 */ @Requires({"readBases.length == readQuals.length", "readBases.length == insertionGOP.length", "readBases.length == deletionGOP.length", - "readBases.length == overallGCP.length", "matchMatrix!=null", "insertionMatrix!=null", "deletionMatrix!=null"}) + "readBases.length == overallGCP.length", "matchMatrix!=null", "insertionMatrix!=null", "deletionMatrix!=null"}) protected abstract double subComputeReadLikelihoodGivenHaplotypeLog10( final byte[] haplotypeBases, - final byte[] readBases, - final byte[] readQuals, - final byte[] insertionGOP, - final byte[] deletionGOP, - final byte[] overallGCP, - final int hapStartIndex, - final boolean recacheReadValues ); - - /** - * Print out the core hmm matrices for debugging - */ - protected void dumpMatrices() { - dumpMatrix("matchMetricArray", matchMatrix); - dumpMatrix("insertionMatrix", insertionMatrix); - dumpMatrix("deletionMatrix", deletionMatrix); - } - - /** - * Print out in a human readable form the matrix for debugging - * @param name the name of this matrix - * @param matrix the matrix of values - */ - @Requires({"name != null", "matrix != null"}) - private void dumpMatrix(final String name, final double[][] matrix) { - System.out.printf("%s%n", name); - for ( int i = 0; i < matrix.length; i++) { - System.out.printf("\t%s[%d]", name, i); - for ( int j = 0; j < matrix[i].length; j++ ) { - if ( Double.isInfinite(matrix[i][j]) ) - System.out.printf(" %15s", String.format("%f", matrix[i][j])); - else - System.out.printf(" % 15.5e", matrix[i][j]); - } - System.out.println(); - } - } + final byte[] readBases, + final byte[] readQuals, + final byte[] insertionGOP, + final byte[] deletionGOP, + final byte[] overallGCP, + final int hapStartIndex, + final boolean recacheReadValues ); /** * Compute the first position at which two haplotypes differ diff --git a/public/java/src/org/broadinstitute/sting/utils/progressmeter/ProgressMeter.java b/public/java/src/org/broadinstitute/sting/utils/progressmeter/ProgressMeter.java index f76490552..9d1011c8f 100644 --- a/public/java/src/org/broadinstitute/sting/utils/progressmeter/ProgressMeter.java +++ b/public/java/src/org/broadinstitute/sting/utils/progressmeter/ProgressMeter.java @@ -149,6 +149,12 @@ public class ProgressMeter { private Position position = new Position(PositionStatus.STARTING); private long nTotalRecordsProcessed = 0; + /** + * The elapsed time in nanosecond, updated by the daemon thread, so that + * we don't pay any system call overhead to determine the the elapsed time. + */ + private long elapsedTimeInNanosecondUpdatedByDaemon = 0; + final ProgressMeterDaemon progressMeterDaemon; /** @@ -225,6 +231,36 @@ public class ProgressMeter { return timer.getElapsedTimeNano(); } + /** + * This function is just like getRuntimeInNanoseconds but it doesn't actually query the + * system timer to determine the value, but rather uses a local variable in this meter + * that is updated by the daemon thread. This means that the result is ridiculously imprecise + * for a nanosecond value (as it's only updated each pollingFrequency of the daemon) but + * it is free for clients to access, which can be critical when one wants to do tests like: + * + * for some work unit: + * do unit if getRuntimeInNanosecondsUpdatedPeriodically < X + * + * and have this operation eventually timeout but don't want to pay the system call time to + * ensure that the loop exits as soon as the elapsed time exceeds X + * + * @return the current runtime in nanoseconds + */ + @Ensures("result >= 0") + public long getRuntimeInNanosecondsUpdatedPeriodically() { + return elapsedTimeInNanosecondUpdatedByDaemon; + } + + /** + * Update the period runtime variable to the current runtime in nanoseconds. Should only + * be called by the daemon thread + */ + protected void updateElapsedTimeInNanoseconds() { + elapsedTimeInNanosecondUpdatedByDaemon = getRuntimeInNanoseconds(); + } + + + /** * Utility routine that prints out process information (including timing) every N records or * every M seconds, for N and M set in global variables. diff --git a/public/java/src/org/broadinstitute/sting/utils/progressmeter/ProgressMeterDaemon.java b/public/java/src/org/broadinstitute/sting/utils/progressmeter/ProgressMeterDaemon.java index 30abef8b8..38316e537 100644 --- a/public/java/src/org/broadinstitute/sting/utils/progressmeter/ProgressMeterDaemon.java +++ b/public/java/src/org/broadinstitute/sting/utils/progressmeter/ProgressMeterDaemon.java @@ -100,6 +100,7 @@ public final class ProgressMeterDaemon extends Thread { public void run() { while (! done) { meter.printProgress(false); + meter.updateElapsedTimeInNanoseconds(); try { Thread.sleep(getPollFrequencyMilliseconds()); } catch (InterruptedException e) { diff --git a/public/java/src/org/broadinstitute/sting/utils/runtime/ProcessSettings.java b/public/java/src/org/broadinstitute/sting/utils/runtime/ProcessSettings.java index 8aafd6034..659523641 100644 --- a/public/java/src/org/broadinstitute/sting/utils/runtime/ProcessSettings.java +++ b/public/java/src/org/broadinstitute/sting/utils/runtime/ProcessSettings.java @@ -25,7 +25,6 @@ package org.broadinstitute.sting.utils.runtime; -import com.sun.corba.se.spi.orbutil.fsm.Input; import java.io.File; import java.util.Map; diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java index e48d1ca4c..762ce4858 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java @@ -664,7 +664,7 @@ public final class AlignmentUtils { if ( numIndels == 0 ) return cigar; if ( numIndels == 1 ) - return leftAlignSingleIndel(cigar, refSeq, readSeq, refIndex, readIndex); + return leftAlignSingleIndel(cigar, refSeq, readSeq, refIndex, readIndex, true); // if we got here then there is more than 1 indel in the alignment if ( doNotThrowExceptionForMultipleIndels ) @@ -709,10 +709,11 @@ public final class AlignmentUtils { * @param readSeq read sequence * @param refIndex 0-based alignment start position on ref * @param readIndex 0-based alignment start position on read + * @param cleanupCigar if true, we'll cleanup the resulting cigar element, removing 0 length elements and deletions from the first cigar position * @return a non-null cigar, in which the single indel is guaranteed to be placed at the leftmost possible position across a repeat (if any) */ @Ensures("result != null") - public static Cigar leftAlignSingleIndel(Cigar cigar, final byte[] refSeq, final byte[] readSeq, final int refIndex, final int readIndex) { + public static Cigar leftAlignSingleIndel(Cigar cigar, final byte[] refSeq, final byte[] readSeq, final int refIndex, final int readIndex, final boolean cleanupCigar) { ensureLeftAlignmentHasGoodArguments(cigar, refSeq, readSeq, refIndex, readIndex); int indexOfIndel = -1; @@ -751,7 +752,7 @@ public final class AlignmentUtils { cigar = newCigar; i = -1; if (reachedEndOfRead) - cigar = cleanUpCigar(cigar); + cigar = cleanupCigar ? cleanUpCigar(cigar) : cigar; } if (reachedEndOfRead) @@ -799,6 +800,23 @@ public final class AlignmentUtils { return new Cigar(elements); } + /** + * Removing a trailing deletion from the incoming cigar if present + * + * @param c the cigar we want to update + * @return a non-null Cigar + */ + @Requires("c != null") + @Ensures("result != null") + public static Cigar removeTrailingDeletions(final Cigar c) { + + final List elements = c.getCigarElements(); + if ( elements.get(elements.size() - 1).getOperator() != CigarOperator.D ) + return c; + + return new Cigar(elements.subList(0, elements.size() - 1)); + } + /** * Move the indel in a given cigar string one base to the left * @@ -933,7 +951,7 @@ public final class AlignmentUtils { */ public static Cigar trimCigarByBases(final Cigar cigar, final int start, final int end) { if ( start < 0 ) throw new IllegalArgumentException("Start must be >= 0 but got " + start); - if ( end < start ) throw new IllegalArgumentException("End " + end + " is < start start " + start); + if ( end < start ) throw new IllegalArgumentException("End " + end + " is < start = " + start); if ( end > cigar.getReadLength() ) throw new IllegalArgumentException("End is beyond the cigar's read length " + end + " for cigar " + cigar ); final Cigar result = trimCigar(cigar, start, end, false); @@ -961,7 +979,7 @@ public final class AlignmentUtils { int pos = 0; for ( final CigarElement elt : cigar.getCigarElements() ) { - if ( pos > end ) break; + if ( pos > end && (byReference || elt.getOperator() != CigarOperator.D) ) break; switch ( elt.getOperator() ) { case D: diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java index bf3045c71..8d496ab96 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java @@ -182,6 +182,11 @@ public class ArtificialBAMBuilder { try { final File file = File.createTempFile("tempBAM", ".bam"); file.deleteOnExit(); + + // Register the bam index file for deletion on exit as well: + new File(file.getAbsolutePath().replace(".bam", ".bai")).deleteOnExit(); + new File(file.getAbsolutePath() + ".bai").deleteOnExit(); + return makeBAMFile(file); } catch ( IOException e ) { throw new RuntimeException(e); diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java index b8367a7df..055f8630b 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java @@ -324,6 +324,31 @@ public class ArtificialSAMUtils { return Arrays.asList(left, right); } + /** + * Create an artificial reduced read based on the parameters. The cigar string will be *M, where * is the + * length of the read. The base counts specified in the baseCounts array will be stored fully encoded in + * the RR attribute. + * + * @param header the SAM header to associate the read with + * @param name the name of the read + * @param refIndex the reference index, i.e. what chromosome to associate it with + * @param alignmentStart where to start the alignment + * @param length the length of the read + * @param baseCounts reduced base counts to encode in the RR attribute; length must match the read length + * @return the artificial reduced read + */ + public static GATKSAMRecord createArtificialReducedRead( final SAMFileHeader header, + final String name, + final int refIndex, + final int alignmentStart, + final int length, + final int[] baseCounts ) { + final GATKSAMRecord read = createArtificialRead(header, name, refIndex, alignmentStart, length); + read.setReducedReadCounts(baseCounts); + read.setReducedReadCountsTag(); + return read; + } + /** * Create a collection of identical artificial reads based on the parameters. The cigar string for each * read will be *M, where * is the length of the read. diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java index 0eed80f3a..cf1c9cb8e 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java @@ -28,6 +28,7 @@ package org.broadinstitute.sting.utils.sam; import com.google.java.contract.Ensures; import com.google.java.contract.Requires; import net.sf.samtools.*; +import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.BaseUtils; import org.broadinstitute.sting.utils.GenomeLoc; @@ -35,6 +36,7 @@ import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.NGSPlatform; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; +import org.broadinstitute.sting.utils.exceptions.UserException; import java.io.File; import java.util.*; @@ -47,6 +49,7 @@ import java.util.*; * @version 0.1 */ public class ReadUtils { + private final static Logger logger = Logger.getLogger(ReadUtils.class); private static final String OFFSET_OUT_OF_BOUNDS_EXCEPTION = "Offset cannot be greater than read length %d : %d"; private static final String OFFSET_NOT_ZERO_EXCEPTION = "We ran past the end of the read and never found the offset, something went wrong!"; @@ -150,11 +153,18 @@ public class ReadUtils { * @return a SAMFileWriter with the compression level if it is a bam. */ public static SAMFileWriter createSAMFileWriterWithCompression(SAMFileHeader header, boolean presorted, String file, int compression) { + validateCompressionLevel(compression); if (file.endsWith(".bam")) return new SAMFileWriterFactory().makeBAMWriter(header, presorted, new File(file), compression); return new SAMFileWriterFactory().makeSAMOrBAMWriter(header, presorted, new File(file)); } + public static int validateCompressionLevel(final int requestedCompressionLevel) { + if ( requestedCompressionLevel < 0 || requestedCompressionLevel > 9 ) + throw new UserException.BadArgumentValue("compress", "Compression level must be 0-9 but got " + requestedCompressionLevel); + return requestedCompressionLevel; + } + /** * is this base inside the adaptor of the read? * @@ -209,7 +219,16 @@ public class ReadUtils { if (insertSize == 0 || read.getReadUnmappedFlag()) // no adaptors in reads with mates in another chromosome or unmapped pairs return CANNOT_COMPUTE_ADAPTOR_BOUNDARY; - + + if ( read.getReadPairedFlag() && read.getReadNegativeStrandFlag() == read.getMateNegativeStrandFlag() ) { + // note that the read.getProperPairFlag() is not reliably set, so many reads may have this tag but still be overlapping +// logger.info(String.format("Read %s start=%d end=%d insert=%d mateStart=%d readNeg=%b mateNeg=%b not properly paired, returning CANNOT_COMPUTE_ADAPTOR_BOUNDARY", +// read.getReadName(), read.getAlignmentStart(), read.getAlignmentEnd(), insertSize, read.getMateAlignmentStart(), +// read.getReadNegativeStrandFlag(), read.getMateNegativeStrandFlag())); + return CANNOT_COMPUTE_ADAPTOR_BOUNDARY; + } + + int adaptorBoundary; // the reference coordinate for the adaptor boundary (effectively the first base IN the adaptor, closest to the read) if (read.getReadNegativeStrandFlag()) adaptorBoundary = read.getMateAlignmentStart() - 1; // case 1 (see header) @@ -218,7 +237,7 @@ public class ReadUtils { if ( (adaptorBoundary < read.getAlignmentStart() - MAXIMUM_ADAPTOR_LENGTH) || (adaptorBoundary > read.getAlignmentEnd() + MAXIMUM_ADAPTOR_LENGTH) ) adaptorBoundary = CANNOT_COMPUTE_ADAPTOR_BOUNDARY; // we are being conservative by not allowing the adaptor boundary to go beyond what we belive is the maximum size of an adaptor - + return adaptorBoundary; } public static int CANNOT_COMPUTE_ADAPTOR_BOUNDARY = Integer.MIN_VALUE; @@ -413,9 +432,9 @@ public class ReadUtils { // clipping the left tail and first base is insertion, go to the next read coordinate // with the same reference coordinate. Advance to the next cigar element, or to the // end of the read if there is no next element. - Pair firstElementIsInsertion = readStartsWithInsertion(cigar); - if (readCoord == 0 && tail == ClippingTail.LEFT_TAIL && firstElementIsInsertion.getFirst()) - readCoord = Math.min(firstElementIsInsertion.getSecond().getLength(), cigar.getReadLength() - 1); + final CigarElement firstElementIsInsertion = readStartsWithInsertion(cigar); + if (readCoord == 0 && tail == ClippingTail.LEFT_TAIL && firstElementIsInsertion != null) + readCoord = Math.min(firstElementIsInsertion.getLength(), cigar.getReadLength() - 1); return readCoord; } @@ -584,25 +603,28 @@ public class ReadUtils { } /** - * Checks if a read starts with an insertion. It looks beyond Hard and Soft clips - * if there are any. - * - * @param read - * @return A pair with the answer (true/false) and the element or null if it doesn't exist + * @see #readStartsWithInsertion(net.sf.samtools.Cigar, boolean) with ignoreClipOps set to true */ - public static Pair readStartsWithInsertion(GATKSAMRecord read) { - return readStartsWithInsertion(read.getCigar()); + public static CigarElement readStartsWithInsertion(final Cigar cigarForRead) { + return readStartsWithInsertion(cigarForRead, true); } - public static Pair readStartsWithInsertion(final Cigar cigar) { - for (CigarElement cigarElement : cigar.getCigarElements()) { - if (cigarElement.getOperator() == CigarOperator.INSERTION) - return new Pair(true, cigarElement); + /** + * Checks if a read starts with an insertion. + * + * @param cigarForRead the CIGAR to evaluate + * @param ignoreClipOps should we ignore S and H operators when evaluating whether an I operator is at the beginning? + * @return the element if it's a leading insertion or null otherwise + */ + public static CigarElement readStartsWithInsertion(final Cigar cigarForRead, final boolean ignoreClipOps) { + for ( final CigarElement cigarElement : cigarForRead.getCigarElements() ) { + if ( cigarElement.getOperator() == CigarOperator.INSERTION ) + return cigarElement; - else if (cigarElement.getOperator() != CigarOperator.HARD_CLIP && cigarElement.getOperator() != CigarOperator.SOFT_CLIP) + else if ( !ignoreClipOps || (cigarElement.getOperator() != CigarOperator.HARD_CLIP && cigarElement.getOperator() != CigarOperator.SOFT_CLIP) ) break; } - return new Pair(false, null); + return null; } /** diff --git a/public/java/src/org/broadinstitute/sting/utils/smithwaterman/GlobalEdgeGreedySWPairwiseAlignment.java b/public/java/src/org/broadinstitute/sting/utils/smithwaterman/GlobalEdgeGreedySWPairwiseAlignment.java new file mode 100644 index 000000000..27ead2e48 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/smithwaterman/GlobalEdgeGreedySWPairwiseAlignment.java @@ -0,0 +1,217 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.smithwaterman; + +import net.sf.samtools.Cigar; +import net.sf.samtools.CigarElement; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.sam.AlignmentUtils; + +import java.util.*; + +/** + * Pairwise discrete Smith-Waterman alignment with an edge greedy implementation + * + * ************************************************************************ + * **** IMPORTANT NOTE: **** + * **** This class assumes that all bytes come from UPPERCASED chars! **** + * ************************************************************************ + * + * User: ebanks + */ +public final class GlobalEdgeGreedySWPairwiseAlignment extends SWPairwiseAlignment { + + private final static boolean DEBUG_MODE = false; + + /** + * Create a new greedy SW pairwise aligner + * + * @param reference the reference sequence we want to align + * @param alternate the alternate sequence we want to align + * @param parameters the SW parameters to use + */ + public GlobalEdgeGreedySWPairwiseAlignment(final byte[] reference, final byte[] alternate, final Parameters parameters) { + super(reference, alternate, parameters); + } + + /** + * Create a new SW pairwise aligner + * + * After creating the object the two sequences are aligned with an internal call to align(seq1, seq2) + * + * @param reference the reference sequence we want to align + * @param alternate the alternate sequence we want to align + * @param namedParameters the named parameter set to get our parameters from + */ + public GlobalEdgeGreedySWPairwiseAlignment(final byte[] reference, final byte[] alternate, final SWParameterSet namedParameters) { + this(reference, alternate, namedParameters.parameters); + } + + /** + * @see #GlobalEdgeGreedySWPairwiseAlignment(byte[], byte[], SWParameterSet) with original default parameters + */ + public GlobalEdgeGreedySWPairwiseAlignment(byte[] reference, byte[] alternate) { + this(reference, alternate, SWParameterSet.ORIGINAL_DEFAULT); + } + + /** + * Aligns the alternate sequence to the reference sequence + * + * @param reference ref sequence + * @param alternate alt sequence + */ + @Override + protected void align(final byte[] reference, final byte[] alternate) { + if ( reference == null || reference.length == 0 ) + throw new IllegalArgumentException("Non-null, non-empty reference sequences are required for the Smith-Waterman calculation"); + if ( alternate == null || alternate.length == 0 ) + throw new IllegalArgumentException("Non-null, non-empty alternate sequences are required for the Smith-Waterman calculation"); + + final int forwardEdgeMatch = Utils.longestCommonPrefix(reference, alternate, Integer.MAX_VALUE); + + // edge case: one sequence is a strict prefix of the other + if ( forwardEdgeMatch == reference.length || forwardEdgeMatch == alternate.length ) { + alignmentResult = new SWPairwiseAlignmentResult(makeCigarForStrictPrefixAndSuffix(reference, alternate, forwardEdgeMatch, 0), 0); + return; + } + + int reverseEdgeMatch = Utils.longestCommonSuffix(reference, alternate, Integer.MAX_VALUE); + + // edge case: one sequence is a strict suffix of the other + if ( reverseEdgeMatch == reference.length || reverseEdgeMatch == alternate.length ) { + alignmentResult = new SWPairwiseAlignmentResult(makeCigarForStrictPrefixAndSuffix(reference, alternate, 0, reverseEdgeMatch), 0); + return; + } + + final int sizeOfRefToAlign = reference.length - forwardEdgeMatch - reverseEdgeMatch; + final int sizeOfAltToAlign = alternate.length - forwardEdgeMatch - reverseEdgeMatch; + + // edge case: one sequence is a strict subset of the other accounting for both prefix and suffix + final int minSizeToAlign = Math.min(sizeOfRefToAlign, sizeOfAltToAlign); + if ( minSizeToAlign < 0 ) + reverseEdgeMatch += minSizeToAlign; + if ( sizeOfRefToAlign <= 0 || sizeOfAltToAlign <= 0 ) { + alignmentResult = new SWPairwiseAlignmentResult(makeCigarForStrictPrefixAndSuffix(reference, alternate, forwardEdgeMatch, reverseEdgeMatch), 0); + return; + } + + final byte[] refToAlign = Utils.trimArray(reference, forwardEdgeMatch, reverseEdgeMatch); + final byte[] altToAlign = Utils.trimArray(alternate, forwardEdgeMatch, reverseEdgeMatch); + + final double[] sw = new double[(sizeOfRefToAlign+1)*(sizeOfAltToAlign+1)]; + if ( keepScoringMatrix ) SW = sw; + final int[] btrack = new int[(sizeOfRefToAlign+1)*(sizeOfAltToAlign+1)]; + + calculateMatrix(refToAlign, altToAlign, sw, btrack, OVERHANG_STRATEGY.INDEL); + + if ( DEBUG_MODE ) { + System.out.println(new String(refToAlign) + " vs. " + new String(altToAlign)); + debugMatrix(sw, sizeOfRefToAlign+1, sizeOfAltToAlign+1); + System.out.println("----"); + debugMatrix(btrack, sizeOfRefToAlign + 1, sizeOfAltToAlign + 1); + System.out.println(); + } + + alignmentResult = calculateCigar(forwardEdgeMatch, reverseEdgeMatch, sizeOfRefToAlign, sizeOfAltToAlign, sw, btrack); + } + + private void debugMatrix(final double[] matrix, final int dim1, final int dim2) { + for ( int i = 0; i < dim1; i++ ) { + for ( int j = 0; j < dim2; j++ ) + System.out.print(String.format("%.1f ", matrix[i * dim2 + j])); + System.out.println(); + } + } + + private void debugMatrix(final int[] matrix, final int dim1, final int dim2) { + for ( int i = 0; i < dim1; i++ ) { + for ( int j = 0; j < dim2; j++ ) + System.out.print(matrix[i*dim2 + j] + " "); + System.out.println(); + } + } + + /** + * Creates a CIGAR for the case where the prefix/suffix match combination encompasses an entire sequence + * + * @param reference the reference sequence + * @param alternate the alternate sequence + * @param matchingPrefix the prefix match size + * @param matchingSuffix the suffix match size + * @return non-null CIGAR + */ + private Cigar makeCigarForStrictPrefixAndSuffix(final byte[] reference, final byte[] alternate, final int matchingPrefix, final int matchingSuffix) { + + final List result = new ArrayList(); + + // edge case: no D or I element + if ( reference.length == alternate.length ) { + result.add(makeElement(State.MATCH, matchingPrefix + matchingSuffix)); + } else { + // add the first M element + if ( matchingPrefix > 0 ) + result.add(makeElement(State.MATCH, matchingPrefix)); + + // add the D or I element + if ( alternate.length > reference.length ) + result.add(makeElement(State.INSERTION, alternate.length - reference.length)); + else // if ( reference.length > alternate.length ) + result.add(makeElement(State.DELETION, reference.length - alternate.length)); + + // add the last M element + if ( matchingSuffix > 0 ) + result.add(makeElement(State.MATCH, matchingSuffix)); + } + + return new Cigar(result); + } + + /** + * Calculates the CIGAR for the alignment from the back track matrix + * + * @param matchingPrefix the prefix match size + * @param matchingSuffix the suffix match size + * @param refLength length of the reference sequence + * @param altLength length of the alternate sequence + * @param sw the Smith-Waterman matrix to use + * @param btrack the back track matrix to use + * @return non-null SWPairwiseAlignmentResult object + */ + protected SWPairwiseAlignmentResult calculateCigar(final int matchingPrefix, final int matchingSuffix, + final int refLength, final int altLength, + final double[] sw, final int[] btrack) { + + final SWPairwiseAlignmentResult SW_result = calculateCigar(refLength, altLength, sw, btrack, OVERHANG_STRATEGY.INDEL); + + final LinkedList lce = new LinkedList(SW_result.cigar.getCigarElements()); + if ( matchingPrefix > 0 ) + lce.addFirst(makeElement(State.MATCH, matchingPrefix)); + if ( matchingSuffix > 0 ) + lce.addLast(makeElement(State.MATCH, matchingSuffix)); + + return new SWPairwiseAlignmentResult(AlignmentUtils.consolidateCigar(new Cigar(lce)), 0); + } +} \ No newline at end of file diff --git a/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SWPairwiseAlignment.java b/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SWPairwiseAlignment.java index 890faa82a..1abf9f836 100644 --- a/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SWPairwiseAlignment.java +++ b/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SWPairwiseAlignment.java @@ -45,19 +45,43 @@ import java.util.*; * Date: Mar 23, 2009 * Time: 1:54:54 PM */ -public final class SWPairwiseAlignment { - private int alignment_offset; // offset of s2 w/respect to s1 - private Cigar alignmentCigar; +public class SWPairwiseAlignment implements SmithWaterman { - private final Parameters parameters; + protected SWPairwiseAlignmentResult alignmentResult; - private static final int MSTATE = 0; - private static final int ISTATE = 1; - private static final int DSTATE = 2; - private static final int CLIP = 3; + protected final Parameters parameters; + + /** + * The state of a trace step through the matrix + */ + protected enum State { + MATCH, + INSERTION, + DELETION, + CLIP + } + + /** + * What strategy should we use when the best path does not start/end at the corners of the matrix? + */ + public enum OVERHANG_STRATEGY { + /* + * Add softclips for the overhangs + */ + SOFTCLIP, + /* + * Treat the overhangs as proper insertions/deletions + */ + INDEL, + /* + * Just ignore the overhangs + */ + IGNORE + } protected static boolean cutoff = false; - private static boolean DO_SOFTCLIP = true; + + protected OVERHANG_STRATEGY overhang_strategy = OVERHANG_STRATEGY.SOFTCLIP; /** * The SW scoring matrix, stored for debugging purposes if keepScoringMatrix is true @@ -90,10 +114,34 @@ public final class SWPairwiseAlignment { * @param parameters the SW parameters to use */ public SWPairwiseAlignment(byte[] seq1, byte[] seq2, Parameters parameters) { - this.parameters = parameters; + this(parameters); align(seq1,seq2); } + /** + * Create a new SW pairwise aligner + * + * After creating the object the two sequences are aligned with an internal call to align(seq1, seq2) + * + * @param seq1 the first sequence we want to align + * @param seq2 the second sequence we want to align + * @param strategy the overhang strategy to use + */ + public SWPairwiseAlignment(final byte[] seq1, final byte[] seq2, final OVERHANG_STRATEGY strategy) { + this(SWParameterSet.ORIGINAL_DEFAULT.parameters); + overhang_strategy = strategy; + align(seq1, seq2); + } + + /** + * Create a new SW pairwise aligner, without actually doing any alignment yet + * + * @param parameters the SW parameters to use + */ + protected SWPairwiseAlignment(Parameters parameters) { + this.parameters = parameters; + } + /** * Create a new SW pairwise aligner * @@ -111,41 +159,94 @@ public final class SWPairwiseAlignment { this(seq1,seq2,SWParameterSet.ORIGINAL_DEFAULT); } - public Cigar getCigar() { return alignmentCigar ; } + @Override + public Cigar getCigar() { return alignmentResult.cigar ; } - public int getAlignmentStart2wrt1() { return alignment_offset; } + @Override + public int getAlignmentStart2wrt1() { return alignmentResult.alignment_offset; } - public void align(final byte[] a, final byte[] b) { - final int n = a.length; - final int m = b.length; + /** + * Aligns the alternate sequence to the reference sequence + * + * @param reference ref sequence + * @param alternate alt sequence + */ + protected void align(final byte[] reference, final byte[] alternate) { + if ( reference == null || reference.length == 0 || alternate == null || alternate.length == 0 ) + throw new IllegalArgumentException("Non-null, non-empty sequences are required for the Smith-Waterman calculation"); + + final int n = reference.length; + final int m = alternate.length; double [] sw = new double[(n+1)*(m+1)]; if ( keepScoringMatrix ) SW = sw; int [] btrack = new int[(n+1)*(m+1)]; - calculateMatrix(a, b, sw, btrack); - calculateCigar(n, m, sw, btrack); // length of the segment (continuous matches, insertions or deletions) + calculateMatrix(reference, alternate, sw, btrack); + alignmentResult = calculateCigar(n, m, sw, btrack, overhang_strategy); // length of the segment (continuous matches, insertions or deletions) } + /** + * Calculates the SW matrices for the given sequences + * + * @param reference ref sequence + * @param alternate alt sequence + * @param sw the Smith-Waterman matrix to populate + * @param btrack the back track matrix to populate + */ + protected void calculateMatrix(final byte[] reference, final byte[] alternate, double[] sw, int[] btrack) { + calculateMatrix(reference, alternate, sw, btrack, overhang_strategy); + } - private void calculateMatrix(final byte[] a, final byte[] b, double [] sw, int [] btrack ) { - final int n = a.length+1; - final int m = b.length+1; + /** + * Calculates the SW matrices for the given sequences + * + * @param reference ref sequence + * @param alternate alt sequence + * @param sw the Smith-Waterman matrix to populate + * @param btrack the back track matrix to populate + * @param overhang_strategy the strategy to use for dealing with overhangs + */ + protected void calculateMatrix(final byte[] reference, final byte[] alternate, double[] sw, int[] btrack, final OVERHANG_STRATEGY overhang_strategy) { + if ( reference.length == 0 || alternate.length == 0 ) + throw new IllegalArgumentException("Non-null, non-empty sequences are required for the Smith-Waterman calculation"); + + final int n = reference.length+1; + final int m = alternate.length+1; //final double MATRIX_MIN_CUTOFF=-1e100; // never let matrix elements drop below this cutoff final double MATRIX_MIN_CUTOFF; // never let matrix elements drop below this cutoff if ( cutoff ) MATRIX_MIN_CUTOFF = 0.0; else MATRIX_MIN_CUTOFF = -1e100; - double [] best_gap_v = new double[m+1]; - Arrays.fill(best_gap_v,-1.0e40); - int [] gap_size_v = new int[m+1]; - double [] best_gap_h = new double[n+1]; + final double[] best_gap_v = new double[m+1]; + Arrays.fill(best_gap_v, -1.0e40); + final int[] gap_size_v = new int[m+1]; + final double[] best_gap_h = new double[n+1]; Arrays.fill(best_gap_h,-1.0e40); - int [] gap_size_h = new int[n+1]; + final int[] gap_size_h = new int[n+1]; + + // we need to initialize the SW matrix with gap penalties if we want to keep track of indels at the edges of alignments + if ( overhang_strategy == OVERHANG_STRATEGY.INDEL ) { + // initialize the first row + sw[1] = parameters.w_open; + double currentValue = parameters.w_open; + for ( int i = 2; i < m; i++ ) { + currentValue += parameters.w_extend; + sw[i] = currentValue; + } + + // initialize the first column + sw[m] = parameters.w_open; + currentValue = parameters.w_open; + for ( int i = 2; i < n; i++ ) { + currentValue += parameters.w_extend; + sw[i*m] = currentValue; + } + } // build smith-waterman matrix and keep backtrack info: for ( int i = 1, row_offset_1 = 0 ; i < n ; i++ ) { // we do NOT update row_offset_1 here, see comment at the end of this outer loop - byte a_base = a[i-1]; // letter in a at the current pos + byte a_base = reference[i-1]; // letter in a at the current pos final int row_offset = row_offset_1 + m; @@ -157,10 +258,10 @@ public final class SWPairwiseAlignment { // data_offset_1 is linearized offset of element [i-1][j-1] - final byte b_base = b[j-1]; // letter in b at the current pos + final byte b_base = alternate[j-1]; // letter in b at the current pos // in other words, step_diag = sw[i-1][j-1] + wd(a_base,b_base); - double step_diag = sw[data_offset_1] + wd(a_base,b_base); + final double step_diag = sw[data_offset_1] + wd(a_base,b_base); // optimized "traversal" of all the matrix cells above the current one (i.e. traversing // all 'step down' events that would end in the current cell. The optimized code @@ -236,65 +337,92 @@ public final class SWPairwiseAlignment { } } + /* + * Class to store the result of calculating the CIGAR from the back track matrix + */ + protected final class SWPairwiseAlignmentResult { + public final Cigar cigar; + public final int alignment_offset; + public SWPairwiseAlignmentResult(final Cigar cigar, final int alignment_offset) { + this.cigar = cigar; + this.alignment_offset = alignment_offset; + } + } - private void calculateCigar(int n, int m, double [] sw, int [] btrack) { + /** + * Calculates the CIGAR for the alignment from the back track matrix + * + * @param refLength length of the reference sequence + * @param altLength length of the alternate sequence + * @param sw the Smith-Waterman matrix to use + * @param btrack the back track matrix to use + * @param overhang_strategy the strategy to use for dealing with overhangs + * @return non-null SWPairwiseAlignmentResult object + */ + protected SWPairwiseAlignmentResult calculateCigar(final int refLength, final int altLength, final double[] sw, final int[] btrack, final OVERHANG_STRATEGY overhang_strategy) { // p holds the position we start backtracking from; we will be assembling a cigar in the backwards order int p1 = 0, p2 = 0; double maxscore = Double.NEGATIVE_INFINITY; // sw scores are allowed to be negative int segment_length = 0; // length of the segment (continuous matches, insertions or deletions) - // look for largest score. we use >= combined with the traversal direction - // to ensure that if two scores are equal, the one closer to diagonal gets picked - for ( int i = 1, data_offset = m+1+m ; i < n+1 ; i++, data_offset += (m+1) ) { - // data_offset is the offset of [i][m] - if ( sw[data_offset] >= maxscore ) { - p1 = i; p2 = m ; maxscore = sw[data_offset]; + // if we want to consider overhangs as legitimate operators, then just start from the corner of the matrix + if ( overhang_strategy == OVERHANG_STRATEGY.INDEL ) { + p1 = refLength; + p2 = altLength; + } else { + // look for largest score. we use >= combined with the traversal direction + // to ensure that if two scores are equal, the one closer to diagonal gets picked + for ( int i = 1, data_offset = altLength+1+altLength ; i < refLength+1 ; i++, data_offset += (altLength+1) ) { + // data_offset is the offset of [i][m] + if ( sw[data_offset] >= maxscore ) { + p1 = i; p2 = altLength ; maxscore = sw[data_offset]; + } } - } - for ( int j = 1, data_offset = n*(m+1)+1 ; j < m+1 ; j++, data_offset++ ) { - // data_offset is the offset of [n][j] - if ( sw[data_offset] > maxscore || sw[data_offset] == maxscore && Math.abs(n-j) < Math.abs(p1 - p2)) { - p1 = n; - p2 = j ; - maxscore = sw[data_offset]; - segment_length = m - j ; // end of sequence 2 is overhanging; we will just record it as 'M' segment + for ( int j = 1, data_offset = refLength*(altLength+1)+1 ; j < altLength+1 ; j++, data_offset++ ) { + // data_offset is the offset of [n][j] + if ( sw[data_offset] > maxscore || sw[data_offset] == maxscore && Math.abs(refLength-j) < Math.abs(p1 - p2)) { + p1 = refLength; + p2 = j ; + maxscore = sw[data_offset]; + segment_length = altLength - j ; // end of sequence 2 is overhanging; we will just record it as 'M' segment + } } } List lce = new ArrayList(5); - if ( segment_length > 0 && DO_SOFTCLIP ) { - lce.add(makeElement(CLIP, segment_length)); + if ( segment_length > 0 && overhang_strategy == OVERHANG_STRATEGY.SOFTCLIP ) { + lce.add(makeElement(State.CLIP, segment_length)); segment_length = 0; } // we will be placing all insertions and deletions into sequence b, so the states are named w/regard // to that sequence - int state = MSTATE; + State state = State.MATCH; - int data_offset = p1*(m+1)+p2; // offset of element [p1][p2] + int data_offset = p1*(altLength+1)+p2; // offset of element [p1][p2] do { int btr = btrack[data_offset]; - int new_state; + State new_state; int step_length = 1; if ( btr > 0 ) { - new_state = DSTATE; + new_state = State.DELETION; step_length = btr; } else if ( btr < 0 ) { - new_state = ISTATE; + new_state = State.INSERTION; step_length = (-btr); - } else new_state = MSTATE; // and step_length =1, already set above + } else new_state = State.MATCH; // and step_length =1, already set above // move to next best location in the sw matrix: switch( new_state ) { - case MSTATE: data_offset -= (m+2); p1--; p2--; break; // move back along the diag in the sw matrix - case ISTATE: data_offset -= step_length; p2 -= step_length; break; // move left - case DSTATE: data_offset -= (m+1)*step_length; p1 -= step_length; break; // move up + case MATCH: data_offset -= (altLength+2); p1--; p2--; break; // move back along the diag in the sw matrix + case INSERTION: data_offset -= step_length; p2 -= step_length; break; // move left + case DELETION: data_offset -= (altLength+1)*step_length; p1 -= step_length; break; // move up } // now let's see if the state actually changed: @@ -305,7 +433,7 @@ public final class SWPairwiseAlignment { segment_length = step_length; state = new_state; } -// next condition is equivalent to while ( sw[p1][p2] != 0 ) (with modified p1 and/or p2: + // next condition is equivalent to while ( sw[p1][p2] != 0 ) (with modified p1 and/or p2: } while ( p1 > 0 && p2 > 0 ); // post-process the last segment we are still keeping; @@ -316,28 +444,41 @@ public final class SWPairwiseAlignment { // last 3 bases of the read overlap with/align to the ref), the cigar will be still 5M if // DO_SOFTCLIP is false or 2S3M if DO_SOFTCLIP is true. // The consumers need to check for the alignment offset and deal with it properly. - if (DO_SOFTCLIP ) { + final int alignment_offset; + if ( overhang_strategy == OVERHANG_STRATEGY.SOFTCLIP ) { lce.add(makeElement(state, segment_length)); - if ( p2> 0 ) lce.add(makeElement(CLIP, p2)); - alignment_offset = p1 ; - } else { + if ( p2 > 0 ) lce.add(makeElement(State.CLIP, p2)); + alignment_offset = p1; + } else if ( overhang_strategy == OVERHANG_STRATEGY.IGNORE ) { lce.add(makeElement(state, segment_length + p2)); alignment_offset = p1 - p2; + } else { // overhang_strategy == OVERHANG_STRATEGY.INDEL + + // take care of the actual alignment + lce.add(makeElement(state, segment_length)); + + // take care of overhangs at the beginning of the alignment + if ( p1 > 0 ) + lce.add(makeElement(State.DELETION, p1)); + else if ( p2 > 0 ) + lce.add(makeElement(State.INSERTION, p2)); + + alignment_offset = 0; } Collections.reverse(lce); - alignmentCigar = AlignmentUtils.consolidateCigar(new Cigar(lce)); + return new SWPairwiseAlignmentResult(AlignmentUtils.consolidateCigar(new Cigar(lce)), alignment_offset); } - private CigarElement makeElement(int state, int segment_length) { - CigarOperator o = null; - switch(state) { - case MSTATE: o = CigarOperator.M; break; - case ISTATE: o = CigarOperator.I; break; - case DSTATE: o = CigarOperator.D; break; - case CLIP: o = CigarOperator.S; break; + protected CigarElement makeElement(final State state, final int length) { + CigarOperator op = null; + switch (state) { + case MATCH: op = CigarOperator.M; break; + case INSERTION: op = CigarOperator.I; break; + case DELETION: op = CigarOperator.D; break; + case CLIP: op = CigarOperator.S; break; } - return new CigarElement(segment_length,o); + return new CigarElement(length, op); } private double wd(byte x, byte y) { @@ -360,7 +501,7 @@ public final class SWPairwiseAlignment { Cigar cigar = getCigar(); - if ( ! DO_SOFTCLIP ) { + if ( overhang_strategy != OVERHANG_STRATEGY.SOFTCLIP ) { // we need to go through all the hassle below only if we do not do softclipping; // otherwise offset is never negative diff --git a/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SmithWaterman.java b/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SmithWaterman.java new file mode 100644 index 000000000..3a8afca8c --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SmithWaterman.java @@ -0,0 +1,57 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.smithwaterman; + +import net.sf.samtools.Cigar; + +/** + * Generic interface for SmithWaterman calculations + * + * This interface allows clients to use a generic SmithWaterman variable, without propogating the specific + * implementation of SmithWaterman throughout their code: + * + * SmithWaterman sw = new SpecificSmithWatermanImplementation(ref, read, params) + * sw.getCigar() + * sw.getAlignmentStart2wrt1() + * + * User: depristo + * Date: 4/26/13 + * Time: 8:24 AM + */ +public interface SmithWaterman { + + /** + * Get the cigar string for the alignment of this SmithWaterman class + * @return a non-null cigar + */ + public Cigar getCigar(); + + /** + * Get the starting position of the read sequence in the reference sequence + * @return a positive integer >= 0 + */ + public int getAlignmentStart2wrt1(); +} diff --git a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java index 0fba432e7..09db585a6 100644 --- a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java @@ -30,10 +30,10 @@ import org.broad.tribble.FeatureCodec; import org.broad.tribble.FeatureCodecHeader; import org.broad.tribble.readers.PositionalBufferedStream; import org.broadinstitute.sting.commandline.RodBinding; +import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; import org.broadinstitute.sting.utils.collections.Pair; -import org.broadinstitute.variant.bcf2.BCF2Codec; import org.broadinstitute.variant.variantcontext.VariantContext; import org.broadinstitute.variant.vcf.*; @@ -52,6 +52,31 @@ public class GATKVCFUtils { */ private GATKVCFUtils() { } + public final static String GATK_COMMAND_LINE_KEY = "GATKCommandLine"; + + /** + * Gets the appropriately formatted header for a VCF file describing this GATK run + * + * @param engine the GATK engine that holds the walker name, GATK version, and other information + * @param argumentSources contains information on the argument values provided to the GATK for converting to a + * command line string. Should be provided from the data in the parsing engine. Can be + * empty in which case the command line will be the empty string. + * @return VCF header line describing this run of the GATK. + */ + public static VCFHeaderLine getCommandLineArgumentHeaderLine(final GenomeAnalysisEngine engine, final Collection argumentSources) { + if ( engine == null ) throw new IllegalArgumentException("engine cannot be null"); + if ( argumentSources == null ) throw new IllegalArgumentException("argumentSources cannot be null"); + + final Map attributes = new LinkedHashMap<>(); + attributes.put("ID", engine.getWalkerName()); + attributes.put("Version", CommandLineGATK.getVersionNumber()); + final Date date = new Date(); + attributes.put("Date", date.toString()); + attributes.put("Epoch", Long.toString(date.getTime())); + attributes.put("CommandLineOptions", engine.createApproximateCommandLineArgumentString(argumentSources.toArray())); + return new VCFSimpleHeaderLine(GATK_COMMAND_LINE_KEY, attributes, Collections.emptyList()); + } + public static Map getVCFHeadersFromRods(GenomeAnalysisEngine toolkit, List> rodBindings) { // Collect the eval rod names final Set names = new TreeSet(); @@ -149,21 +174,6 @@ public class GATKVCFUtils { return VCFUtils.withUpdatedContigs(header, engine.getArguments().referenceFile, engine.getMasterSequenceDictionary()); } - public static String rsIDOfFirstRealVariant(List VCs, VariantContext.Type type) { - if ( VCs == null ) - return null; - - String rsID = null; - for ( VariantContext vc : VCs ) { - if ( vc.getType() == type ) { - rsID = vc.getID(); - break; - } - } - - return rsID; - } - /** * Utility class to read all of the VC records from a file * diff --git a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java index 4565402b9..3bc5da82f 100644 --- a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java @@ -45,7 +45,7 @@ public class GATKVariantContextUtils { public static final int DEFAULT_PLOIDY = 2; public static final double SUM_GL_THRESH_NOCALL = -0.1; // if sum(gl) is bigger than this threshold, we treat GL's as non-informative and will force a no-call. - private static final List NO_CALL_ALLELES = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL); + protected static final List NO_CALL_ALLELES = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL); public final static String MERGE_FILTER_PREFIX = "filterIn"; public final static String MERGE_REF_IN_ALL = "ReferenceInAll"; public final static String MERGE_FILTER_IN_ALL = "FilteredInAll"; @@ -421,6 +421,37 @@ public class GATKVariantContextUtils { return true; // we passed all tests, we matched } + public enum GenotypeAssignmentMethod { + /** + * set all of the genotype GT values to NO_CALL + */ + SET_TO_NO_CALL, + + /** + * Use the subsetted PLs to greedily assigned genotypes + */ + USE_PLS_TO_ASSIGN, + + /** + * Try to match the original GT calls, if at all possible + * + * Suppose I have 3 alleles: A/B/C and the following samples: + * + * original_GT best_match to A/B best_match to A/C + * S1 => A/A A/A A/A + * S2 => A/B A/B A/A + * S3 => B/B B/B A/A + * S4 => B/C A/B A/C + * S5 => C/C A/A C/C + * + * Basically, all alleles not in the subset map to ref. It means that het-alt genotypes + * when split into 2 bi-allelic variants will be het in each, which is good in some cases, + * rather than the undetermined behavior when using the PLs to assign, which could result + * in hom-var or hom-ref for each, depending on the exact PL values. + */ + BEST_MATCH_TO_ORIGINAL + } + /** * subset the Variant Context to the specific set of alleles passed in (pruning the PLs appropriately) * @@ -430,22 +461,23 @@ public class GATKVariantContextUtils { * @return genotypes */ public static GenotypesContext subsetDiploidAlleles(final VariantContext vc, - final List allelesToUse, - final boolean assignGenotypes) { + final List allelesToUse, + final GenotypeAssignmentMethod assignGenotypes) { + if ( allelesToUse.get(0).isNonReference() ) throw new IllegalArgumentException("First allele must be the reference allele"); + if ( allelesToUse.size() == 1 ) throw new IllegalArgumentException("Cannot subset to only 1 alt allele"); // the genotypes with PLs final GenotypesContext oldGTs = vc.getGenotypes(); // the new genotypes to create final GenotypesContext newGTs = GenotypesContext.create(); + // optimization: if no input genotypes, just exit - if (oldGTs.isEmpty()) - return newGTs; + if (oldGTs.isEmpty()) return newGTs; // samples final List sampleIndices = oldGTs.getSampleNamesOrderedByName(); - // we need to determine which of the alternate alleles (and hence the likelihoods) to use and carry forward final int numOriginalAltAlleles = vc.getAlternateAlleles().size(); final int expectedNumLikelihoods = GenotypeLikelihoods.numLikelihoods(vc.getNAlleles(), 2); @@ -456,8 +488,8 @@ public class GATKVariantContextUtils { // an optimization: if we are supposed to use all (or none in the case of a ref call) of the alleles, // then we can keep the PLs as is; otherwise, we determine which ones to keep - if ( numNewAltAlleles != numOriginalAltAlleles && numNewAltAlleles > 0 ) { - likelihoodIndexesToUse = new ArrayList(30); + if ( numNewAltAlleles != numOriginalAltAlleles ) { + likelihoodIndexesToUse = new ArrayList<>(30); final boolean[] altAlleleIndexToUse = new boolean[numOriginalAltAlleles]; for ( int i = 0; i < numOriginalAltAlleles; i++ ) { @@ -478,55 +510,127 @@ public class GATKVariantContextUtils { // create the new genotypes for ( int k = 0; k < oldGTs.size(); k++ ) { final Genotype g = oldGTs.get(sampleIndices.get(k)); - if ( !g.hasLikelihoods() ) { - newGTs.add(GenotypeBuilder.create(g.getSampleName(), NO_CALL_ALLELES)); - continue; - } + final GenotypeBuilder gb = new GenotypeBuilder(g); // create the new likelihoods array from the alleles we are allowed to use - final double[] originalLikelihoods = g.getLikelihoods().getAsVector(); double[] newLikelihoods; - if ( likelihoodIndexesToUse == null ) { - newLikelihoods = originalLikelihoods; - } else if ( originalLikelihoods.length != expectedNumLikelihoods ) { - logger.warn("Wrong number of likelihoods in sample " + g.getSampleName() + " at " + vc + " got " + g.getLikelihoodsString() + " but expected " + expectedNumLikelihoods); + if ( !g.hasLikelihoods() ) { + // we don't have any likelihoods, so we null out PLs and make G ./. newLikelihoods = null; + gb.noPL(); } else { - newLikelihoods = new double[likelihoodIndexesToUse.size()]; - int newIndex = 0; - for ( int oldIndex : likelihoodIndexesToUse ) - newLikelihoods[newIndex++] = originalLikelihoods[oldIndex]; + final double[] originalLikelihoods = g.getLikelihoods().getAsVector(); + if ( likelihoodIndexesToUse == null ) { + newLikelihoods = originalLikelihoods; + } else if ( originalLikelihoods.length != expectedNumLikelihoods ) { + logger.warn("Wrong number of likelihoods in sample " + g.getSampleName() + " at " + vc + " got " + g.getLikelihoodsString() + " but expected " + expectedNumLikelihoods); + newLikelihoods = null; + } else { + newLikelihoods = new double[likelihoodIndexesToUse.size()]; + int newIndex = 0; + for ( int oldIndex : likelihoodIndexesToUse ) + newLikelihoods[newIndex++] = originalLikelihoods[oldIndex]; - // might need to re-normalize - newLikelihoods = MathUtils.normalizeFromLog10(newLikelihoods, false, true); - } + // might need to re-normalize + newLikelihoods = MathUtils.normalizeFromLog10(newLikelihoods, false, true); + } - // if there is no mass on the (new) likelihoods, then just no-call the sample - if ( newLikelihoods != null && MathUtils.sum(newLikelihoods) > SUM_GL_THRESH_NOCALL ) { - newGTs.add(GenotypeBuilder.create(g.getSampleName(), NO_CALL_ALLELES)); - } - else { - final GenotypeBuilder gb = new GenotypeBuilder(g); - - if ( newLikelihoods == null || numNewAltAlleles == 0 ) + if ( newLikelihoods == null || likelihoodsAreUninformative(newLikelihoods) ) gb.noPL(); else gb.PL(newLikelihoods); - - // if we weren't asked to assign a genotype, then just no-call the sample - if ( !assignGenotypes || MathUtils.sum(newLikelihoods) > SUM_GL_THRESH_NOCALL ) { - gb.alleles(NO_CALL_ALLELES); - } - else { - // find the genotype with maximum likelihoods - int PLindex = numNewAltAlleles == 0 ? 0 : MathUtils.maxElementIndex(newLikelihoods); - GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles = GenotypeLikelihoods.getAllelePair(PLindex); - - gb.alleles(Arrays.asList(allelesToUse.get(alleles.alleleIndex1), allelesToUse.get(alleles.alleleIndex2))); - if ( numNewAltAlleles != 0 ) gb.log10PError(GenotypeLikelihoods.getGQLog10FromLikelihoods(PLindex, newLikelihoods)); - } - newGTs.add(gb.make()); } + + updateGenotypeAfterSubsetting(g.getAlleles(), gb, assignGenotypes, newLikelihoods, allelesToUse); + newGTs.add(gb.make()); + } + + return newGTs; + } + + private static boolean likelihoodsAreUninformative(final double[] likelihoods) { + return MathUtils.sum(likelihoods) > SUM_GL_THRESH_NOCALL; + } + + /** + * Add the genotype call (GT) field to GenotypeBuilder using the requested algorithm assignmentMethod + * + * @param originalGT the original genotype calls, cannot be null + * @param gb the builder where we should put our newly called alleles, cannot be null + * @param assignmentMethod the method to use to do the assignment, cannot be null + * @param newLikelihoods a vector of likelihoods to use if the method requires PLs, should be log10 likelihoods, cannot be null + * @param allelesToUse the alleles we are using for our subsetting + */ + protected static void updateGenotypeAfterSubsetting(final List originalGT, + final GenotypeBuilder gb, + final GenotypeAssignmentMethod assignmentMethod, + final double[] newLikelihoods, + final List allelesToUse) { + gb.noAD(); + switch ( assignmentMethod ) { + case SET_TO_NO_CALL: + gb.alleles(NO_CALL_ALLELES); + gb.noGQ(); + break; + case USE_PLS_TO_ASSIGN: + if ( newLikelihoods == null || likelihoodsAreUninformative(newLikelihoods) ) { + // if there is no mass on the (new) likelihoods, then just no-call the sample + gb.alleles(NO_CALL_ALLELES); + gb.noGQ(); + } else { + // find the genotype with maximum likelihoods + final int PLindex = MathUtils.maxElementIndex(newLikelihoods); + GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles = GenotypeLikelihoods.getAllelePair(PLindex); + gb.alleles(Arrays.asList(allelesToUse.get(alleles.alleleIndex1), allelesToUse.get(alleles.alleleIndex2))); + gb.log10PError(GenotypeLikelihoods.getGQLog10FromLikelihoods(PLindex, newLikelihoods)); + } + break; + case BEST_MATCH_TO_ORIGINAL: + final List best = new LinkedList<>(); + final Allele ref = allelesToUse.get(0); // WARNING -- should be checked in input argument + for ( final Allele originalAllele : originalGT ) { + best.add(allelesToUse.contains(originalAllele) ? originalAllele : ref); + } + gb.noGQ(); + gb.noPL(); + gb.alleles(best); + break; + } + } + + /** + * Subset the samples in VC to reference only information with ref call alleles + * + * Preserves DP if present + * + * @param vc the variant context to subset down to + * @param ploidy ploidy to use if a genotype doesn't have any alleles + * @return a GenotypesContext + */ + public static GenotypesContext subsetToRefOnly(final VariantContext vc, final int ploidy) { + if ( vc == null ) throw new IllegalArgumentException("vc cannot be null"); + if ( ploidy < 1 ) throw new IllegalArgumentException("ploidy must be >= 1 but got " + ploidy); + + // the genotypes with PLs + final GenotypesContext oldGTs = vc.getGenotypes(); + + // optimization: if no input genotypes, just exit + if (oldGTs.isEmpty()) return oldGTs; + + // the new genotypes to create + final GenotypesContext newGTs = GenotypesContext.create(); + + final Allele ref = vc.getReference(); + final List diploidRefAlleles = Arrays.asList(ref, ref); + + // create the new genotypes + for ( final Genotype g : vc.getGenotypes() ) { + final int gPloidy = g.getPloidy() == 0 ? ploidy : g.getPloidy(); + final List refAlleles = gPloidy == 2 ? diploidRefAlleles : Collections.nCopies(gPloidy, ref); + final GenotypeBuilder gb = new GenotypeBuilder(g.getSampleName(), refAlleles); + if ( g.hasDP() ) gb.DP(g.getDP()); + if ( g.hasGQ() ) gb.GQ(g.getGQ()); + newGTs.add(gb.make()); } return newGTs; @@ -539,7 +643,7 @@ public class GATKVariantContextUtils { * @return genotypes context */ public static GenotypesContext assignDiploidGenotypes(final VariantContext vc) { - return subsetDiploidAlleles(vc, vc.getAlleles(), true); + return subsetDiploidAlleles(vc, vc.getAlleles(), GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN); } /** @@ -557,7 +661,7 @@ public class GATKVariantContextUtils { * @return a list of bi-allelic (or monomorphic) variant context */ public static List splitVariantContextToBiallelics(final VariantContext vc) { - return splitVariantContextToBiallelics(vc, false); + return splitVariantContextToBiallelics(vc, false, GenotypeAssignmentMethod.SET_TO_NO_CALL); } /** @@ -575,18 +679,18 @@ public class GATKVariantContextUtils { * @param trimLeft if true, we will also left trim alleles, potentially moving the resulting vcs forward on the genome * @return a list of bi-allelic (or monomorphic) variant context */ - public static List splitVariantContextToBiallelics(final VariantContext vc, final boolean trimLeft) { + public static List splitVariantContextToBiallelics(final VariantContext vc, final boolean trimLeft, final GenotypeAssignmentMethod genotypeAssignmentMethod) { if ( ! vc.isVariant() || vc.isBiallelic() ) // non variant or biallelics already satisfy the contract return Collections.singletonList(vc); else { - final List biallelics = new LinkedList(); + final List biallelics = new LinkedList<>(); for ( final Allele alt : vc.getAlternateAlleles() ) { VariantContextBuilder builder = new VariantContextBuilder(vc); final List alleles = Arrays.asList(vc.getReference(), alt); builder.alleles(alleles); - builder.genotypes(subsetDiploidAlleles(vc, alleles, false)); + builder.genotypes(subsetDiploidAlleles(vc, alleles, genotypeAssignmentMethod)); VariantContextUtils.calculateChromosomeCounts(builder, true); final VariantContext trimmed = trimAlleles(builder.make(), trimLeft, true); biallelics.add(trimmed); @@ -697,6 +801,7 @@ public class GATKVariantContextUtils { int maxAC = -1; final Map attributesWithMaxAC = new LinkedHashMap(); double log10PError = CommonInfo.NO_LOG10_PERROR; + boolean anyVCHadFiltersApplied = false; VariantContext vcWithMaxAC = null; GenotypesContext genotypes = GenotypesContext.create(); @@ -729,6 +834,7 @@ public class GATKVariantContextUtils { log10PError = vc.getLog10PError(); filters.addAll(vc.getFilters()); + anyVCHadFiltersApplied |= vc.filtersWereApplied(); // // add attributes @@ -841,7 +947,9 @@ public class GATKVariantContextUtils { builder.alleles(alleles); builder.genotypes(genotypes); builder.log10PError(log10PError); - builder.filters(filters.isEmpty() ? filters : new TreeSet(filters)); + if ( anyVCHadFiltersApplied ) { + builder.filters(filters.isEmpty() ? filters : new TreeSet<>(filters)); + } builder.attributes(new TreeMap(mergeInfoWithMaxAC ? attributesWithMaxAC : attributes)); // Trim the padded bases of all alleles if necessary diff --git a/public/java/test/org/broadinstitute/sting/MD5DB.java b/public/java/test/org/broadinstitute/sting/MD5DB.java index 2b0d52a11..7bd6f7bc4 100644 --- a/public/java/test/org/broadinstitute/sting/MD5DB.java +++ b/public/java/test/org/broadinstitute/sting/MD5DB.java @@ -97,7 +97,12 @@ public class MD5DB { if ( ! dir.exists() ) { System.out.printf("##### Creating MD5 db %s%n", LOCAL_MD5_DB_DIR); if ( ! dir.mkdir() ) { - throw new ReviewedStingException("Infrastructure failure: failed to create md5 directory " + LOCAL_MD5_DB_DIR); + // Need to check AGAIN whether the dir exists, because we might be doing multi-process parallelism + // within the same working directory, and another GATK instance may have come along and created the + // directory between the calls to exists() and mkdir() above. + if ( ! dir.exists() ) { + throw new ReviewedStingException("Infrastructure failure: failed to create md5 directory " + LOCAL_MD5_DB_DIR); + } } } } @@ -203,98 +208,106 @@ public class MD5DB { } public static class MD5Match { - final String actualMD5, expectedMD5; - final String failMessage; - boolean failed; + public final String actualMD5, expectedMD5; + public final String failMessage; + public final String diffEngineOutput; + public final boolean failed; - public MD5Match(final String actualMD5, final String expectedMD5, final String failMessage, final boolean failed) { + public MD5Match(final String actualMD5, final String expectedMD5, final String failMessage, final String diffEngineOutput, final boolean failed) { this.actualMD5 = actualMD5; this.expectedMD5 = expectedMD5; this.failMessage = failMessage; + this.diffEngineOutput = diffEngineOutput; this.failed = failed; } } /** - * Tests a file MD5 against an expected value, returning the MD5. NOTE: This function WILL throw an exception if the MD5s are different. - * @param name Name of the test. + * Tests a file MD5 against an expected value, returning an MD5Match object containing a description of the + * match or mismatch. In case of a mismatch, outputs a description of the mismatch to various log files/streams. + * + * NOTE: This function WILL NOT throw an exception if the MD5s are different. + * + * @param testName Name of the test. + * @param testClassName Name of the class that contains the test. * @param resultsFile File to MD5. * @param expectedMD5 Expected MD5 value. * @param parameterize If true or if expectedMD5 is an empty string, will print out the calculated MD5 instead of error text. - * @return The calculated MD5. + * @return an MD5Match object containing a description of the match/mismatch. Will have its "failed" field set + * to true if there was a mismatch (unless we're using the "parameterize" argument) */ - public MD5Match assertMatchingMD5(final String name, final File resultsFile, final String expectedMD5, final boolean parameterize) { - final String actualMD5 = testFileMD5(name, resultsFile, expectedMD5, parameterize); - String failMessage = null; + public MD5Match testFileMD5(final String testName, final String testClassName, final File resultsFile, final String expectedMD5, final boolean parameterize) { + final String actualMD5 = calculateFileMD5(resultsFile); + String diffEngineOutput = ""; + String failMessage = ""; boolean failed = false; + // copy md5 to integrationtests + updateMD5Db(actualMD5, resultsFile); + if (parameterize || expectedMD5.equals("")) { - // Don't assert - } else if ( actualMD5.equals(expectedMD5) ) { - //BaseTest.log(String.format(" => %s PASSED (expected=%s)", name, expectedMD5)); - } else { + BaseTest.log(String.format("PARAMETERIZATION: file %s has md5 = %s", resultsFile, actualMD5)); + } else if ( ! expectedMD5.equals(actualMD5) ) { failed = true; - failMessage = String.format("%s has mismatching MD5s: expected=%s observed=%s", name, expectedMD5, actualMD5); + failMessage = String.format("%s:%s has mismatching MD5s: expected=%s observed=%s", testClassName, testName, expectedMD5, actualMD5); + diffEngineOutput = logMD5MismatchAndGetDiffEngineOutput(testName, testClassName, expectedMD5, actualMD5); } - return new MD5Match(actualMD5, expectedMD5, failMessage, failed); + return new MD5Match(actualMD5, expectedMD5, failMessage, diffEngineOutput, failed); } - /** - * Tests a file MD5 against an expected value, returning the MD5. NOTE: This function WILL NOT throw an exception if the MD5s are different. - * @param name Name of the test. - * @param resultsFile File to MD5. - * @param expectedMD5 Expected MD5 value. - * @param parameterize If true or if expectedMD5 is an empty string, will print out the calculated MD5 instead of error text. - * @return The calculated MD5. + * Calculates the MD5 for the specified file and returns it as a String + * + * @param file file whose MD5 to calculate + * @return file's MD5 in String form + * @throws RuntimeException if the file could not be read */ - public String testFileMD5(final String name, final File resultsFile, final String expectedMD5, final boolean parameterize) { + public String calculateFileMD5( final File file ) { try { - final String filemd5sum = Utils.calcMD5(getBytesFromFile(resultsFile)); - - // - // copy md5 to integrationtests - // - updateMD5Db(filemd5sum, resultsFile); - - if (parameterize || expectedMD5.equals("")) { - BaseTest.log(String.format("PARAMETERIZATION: file %s has md5 = %s", resultsFile, filemd5sum)); - } else { - //System.out.println(String.format("Checking MD5 for %s [calculated=%s, expected=%s]", resultsFile, filemd5sum, expectedMD5)); - //System.out.flush(); - - if ( ! expectedMD5.equals(filemd5sum) ) { - // we are going to fail for real in assertEquals (so we are counted by the testing framework). - // prepare ourselves for the comparison - System.out.printf("##### Test %s is going to fail #####%n", name); - String pathToExpectedMD5File = getMD5FilePath(expectedMD5, "[No DB file found]"); - String pathToFileMD5File = getMD5FilePath(filemd5sum, "[No DB file found]"); - BaseTest.log(String.format("expected %s", expectedMD5)); - BaseTest.log(String.format("calculated %s", filemd5sum)); - BaseTest.log(String.format("diff %s %s", pathToExpectedMD5File, pathToFileMD5File)); - - md5MismatchStream.printf("%s\t%s\t%s%n", expectedMD5, filemd5sum, name); - md5MismatchStream.flush(); - - // inline differences - final ByteArrayOutputStream baos = new ByteArrayOutputStream(); - final PrintStream ps = new PrintStream(baos); - DiffEngine.SummaryReportParams params = new DiffEngine.SummaryReportParams(ps, 20, 10, 0, MAX_RAW_DIFFS_TO_SUMMARIZE, false); - boolean success = DiffEngine.simpleDiffFiles(new File(pathToExpectedMD5File), new File(pathToFileMD5File), MAX_RECORDS_TO_READ, params); - if ( success ) { - final String content = baos.toString(); - BaseTest.log(content); - System.out.printf("Note that the above list is not comprehensive. At most 20 lines of output, and 10 specific differences will be listed. Please use -T DiffObjects -R public/testdata/exampleFASTA.fasta -m %s -t %s to explore the differences more freely%n", - pathToExpectedMD5File, pathToFileMD5File); - } - ps.close(); - } - } - - return filemd5sum; - } catch (Exception e) { - throw new RuntimeException("Failed to read bytes from calls file: " + resultsFile, e); + return Utils.calcMD5(getBytesFromFile(file)); + } + catch ( Exception e ) { + throw new RuntimeException("Failed to read bytes from file: " + file + " for MD5 calculation", e); } } + + /** + * Logs a description (including diff engine output) of the MD5 mismatch between the expectedMD5 + * and actualMD5 to a combination of BaseTest.log(), the md5MismatchStream, and stdout, then returns + * the diff engine output. + * + * @param testName name of the test that generated the mismatch + * @param testClassName name of the class containing the test that generated the mismatch + * @param expectedMD5 the MD5 we were expecting from this test + * @param actualMD5 the MD5 we actually calculated from the test output + * @return the diff engine output produced while logging the description of the mismatch + */ + private String logMD5MismatchAndGetDiffEngineOutput(final String testName, final String testClassName, final String expectedMD5, final String actualMD5) { + System.out.printf("##### Test %s:%s is going to fail #####%n", testClassName, testName); + String pathToExpectedMD5File = getMD5FilePath(expectedMD5, "[No DB file found]"); + String pathToFileMD5File = getMD5FilePath(actualMD5, "[No DB file found]"); + BaseTest.log(String.format("expected %s", expectedMD5)); + BaseTest.log(String.format("calculated %s", actualMD5)); + BaseTest.log(String.format("diff %s %s", pathToExpectedMD5File, pathToFileMD5File)); + + md5MismatchStream.printf("%s\t%s\t%s%n", expectedMD5, actualMD5, testName); + md5MismatchStream.flush(); + + // inline differences + String diffEngineOutput = ""; + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + final PrintStream ps = new PrintStream(baos); + DiffEngine.SummaryReportParams params = new DiffEngine.SummaryReportParams(ps, 20, 10, 0, MAX_RAW_DIFFS_TO_SUMMARIZE, false); + boolean success = DiffEngine.simpleDiffFiles(new File(pathToExpectedMD5File), new File(pathToFileMD5File), MAX_RECORDS_TO_READ, params); + if ( success ) { + diffEngineOutput = baos.toString(); + BaseTest.log(diffEngineOutput); + System.out.printf("Note that the above list is not comprehensive. At most 20 lines of output, and 10 specific differences will be listed. Please use -T DiffObjects -R public/testdata/exampleFASTA.fasta -m %s -t %s to explore the differences more freely%n", + pathToExpectedMD5File, pathToFileMD5File); + } + ps.close(); + + return diffEngineOutput; + } } diff --git a/public/java/test/org/broadinstitute/sting/MD5Mismatch.java b/public/java/test/org/broadinstitute/sting/MD5Mismatch.java index e459a24ce..56acedaf0 100644 --- a/public/java/test/org/broadinstitute/sting/MD5Mismatch.java +++ b/public/java/test/org/broadinstitute/sting/MD5Mismatch.java @@ -35,29 +35,32 @@ import java.util.List; * @since Date created */ public class MD5Mismatch extends Exception { - final List actuals, expecteds; + final List actuals, expecteds, diffEngineOutputs; - public MD5Mismatch(final String actual, final String expected) { - this(Collections.singletonList(actual), Collections.singletonList(expected)); + public MD5Mismatch(final String actual, final String expected, final String diffEngineOutput) { + this(Collections.singletonList(actual), Collections.singletonList(expected), Collections.singletonList(diffEngineOutput)); } - public MD5Mismatch(final List actuals, final List expecteds) { - super(formatMessage(actuals, expecteds)); + public MD5Mismatch(final List actuals, final List expecteds, final List diffEngineOutputs) { + super(formatMessage(actuals, expecteds, diffEngineOutputs)); this.actuals = actuals; this.expecteds = expecteds; + this.diffEngineOutputs = diffEngineOutputs; } @Override public String toString() { - return formatMessage(actuals, expecteds); + return formatMessage(actuals, expecteds, diffEngineOutputs); } - private final static String formatMessage(final List actuals, final List expecteds) { + private static String formatMessage(final List actuals, final List expecteds, final List diffEngineOutputs) { final StringBuilder b = new StringBuilder("MD5 mismatch: "); for ( int i = 0; i < actuals.size(); i++ ) { - if ( i > 1 ) b.append("\t\t\n"); + if ( i >= 1 ) b.append("\t\t\n\n"); b.append("actual ").append(actuals.get(i)); b.append(" expected ").append(expecteds.get(i)); + b.append("\nDiff Engine Output:\n"); + b.append(diffEngineOutputs.get(i)); } return b.toString(); } diff --git a/public/java/test/org/broadinstitute/sting/WalkerTest.java b/public/java/test/org/broadinstitute/sting/WalkerTest.java index dd5a2b0a7..78f67967b 100644 --- a/public/java/test/org/broadinstitute/sting/WalkerTest.java +++ b/public/java/test/org/broadinstitute/sting/WalkerTest.java @@ -34,6 +34,7 @@ import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.gatk.phonehome.GATKRunReport; import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.classloader.JVMUtils; import org.broadinstitute.variant.bcf2.BCF2Utils; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.variant.vcf.VCFCodec; @@ -73,10 +74,6 @@ public class WalkerTest extends BaseTest { return md5DB; } - public MD5DB.MD5Match assertMatchingMD5(final String name, final File resultsFile, final String expectedMD5) { - return getMd5DB().assertMatchingMD5(name, resultsFile, expectedMD5, parameterize()); - } - public void validateOutputBCFIfPossible(final String name, final File resultFile) { final File bcfFile = BCF2Utils.shadowBCF(resultFile); if ( bcfFile != null && bcfFile.exists() ) { @@ -114,15 +111,15 @@ public class WalkerTest extends BaseTest { } } - public List assertMatchingMD5s(final String name, List resultFiles, List expectedMD5s) { + public List assertMatchingMD5s(final String testName, final String testClassName, List resultFiles, List expectedMD5s) { List md5s = new ArrayList(); List fails = new ArrayList(); for (int i = 0; i < resultFiles.size(); i++) { - MD5DB.MD5Match result = assertMatchingMD5(name, resultFiles.get(i), expectedMD5s.get(i)); - validateOutputBCFIfPossible(name, resultFiles.get(i)); + MD5DB.MD5Match result = getMd5DB().testFileMD5(testName, testClassName, resultFiles.get(i), expectedMD5s.get(i), parameterize()); + validateOutputBCFIfPossible(testName, resultFiles.get(i)); if ( ! result.failed ) { - validateOutputIndex(name, resultFiles.get(i)); + validateOutputIndex(testName, resultFiles.get(i)); md5s.add(result.expectedMD5); } else { fails.add(result); @@ -132,14 +129,17 @@ public class WalkerTest extends BaseTest { if ( ! fails.isEmpty() ) { List actuals = new ArrayList(); List expecteds = new ArrayList(); + List diffEngineOutputs = new ArrayList(); + for ( final MD5DB.MD5Match fail : fails ) { actuals.add(fail.actualMD5); expecteds.add(fail.expectedMD5); + diffEngineOutputs.add(fail.diffEngineOutput); logger.warn("Fail: " + fail.failMessage); } - final MD5Mismatch failure = new MD5Mismatch(actuals, expecteds); - Assert.fail(failure.toString(), failure); + final MD5Mismatch failure = new MD5Mismatch(actuals, expecteds, diffEngineOutputs); + Assert.fail(failure.toString()); } return md5s; @@ -170,6 +170,9 @@ public class WalkerTest extends BaseTest { boolean includeImplicitArgs = true; boolean includeShadowBCF = true; + // Name of the test class that created this test case + private Class testClass; + // the default output path for the integration test private File outputFileLocation = null; @@ -183,6 +186,7 @@ public class WalkerTest extends BaseTest { this.args = args; this.nOutputFiles = md5s.size(); this.md5s = md5s; + this.testClass = getCallingTestClass(); } public WalkerTestSpec(String args, List exts, List md5s) { @@ -194,19 +198,29 @@ public class WalkerTest extends BaseTest { this.nOutputFiles = md5s.size(); this.md5s = md5s; this.exts = exts; + this.testClass = getCallingTestClass(); } public WalkerTestSpec(String args, int nOutputFiles, Class expectedException) { this.args = args; this.nOutputFiles = nOutputFiles; this.expectedException = expectedException; + this.testClass = getCallingTestClass(); + } + + private Class getCallingTestClass() { + return JVMUtils.getCallingClass(getClass()); + } + + public String getTestClassName() { + return testClass.getSimpleName(); } public String getArgsWithImplicitArgs() { String args = this.args; if ( includeImplicitArgs ) { args = args + (ENABLE_PHONE_HOME_FOR_TESTS ? - String.format(" -et %s ", GATKRunReport.PhoneHomeOption.STANDARD) : + String.format(" -et %s ", GATKRunReport.PhoneHomeOption.AWS) : String.format(" -et %s -K %s ", GATKRunReport.PhoneHomeOption.NO_ET, gatkKeyFile)); if ( includeShadowBCF && GENERATE_SHADOW_BCF ) args = args + " --generateShadowBCF "; @@ -298,6 +312,10 @@ public class WalkerTest extends BaseTest { for (int i = 0; i < spec.nOutputFiles; i++) { String ext = spec.exts == null ? ".tmp" : "." + spec.exts.get(i); File fl = createTempFile(String.format("walktest.tmp_param.%d", i), ext); + + // Mark corresponding *.idx for deletion on exit as well just in case an index is created for the temp file: + new File(fl.getAbsolutePath() + ".idx").deleteOnExit(); + tmpFiles.add(fl); } @@ -306,7 +324,7 @@ public class WalkerTest extends BaseTest { if ( spec.expectsException() ) { // this branch handles the case were we are testing that a walker will fail as expected - return executeTest(name, spec.getOutputFileLocation(), null, tmpFiles, args, spec.getExpectedException()); + return executeTest(name, spec.getTestClassName(), spec.getOutputFileLocation(), null, tmpFiles, args, spec.getExpectedException()); } else { List md5s = new LinkedList(); md5s.addAll(spec.md5s); @@ -316,7 +334,7 @@ public class WalkerTest extends BaseTest { md5s.add(md5); tmpFiles.add(spec.auxillaryFiles.get(md5)); } - return executeTest(name, spec.getOutputFileLocation(), md5s, tmpFiles, args, null); + return executeTest(name, spec.getTestClassName(), spec.getOutputFileLocation(), md5s, tmpFiles, args, null); } } @@ -337,35 +355,37 @@ public class WalkerTest extends BaseTest { /** * execute the test, given the following: - * @param name the name of the test + * @param testName the name of the test + * @param testClassName the name of the class that contains the test * @param md5s the list of md5s * @param tmpFiles the temp file corresponding to the md5 list * @param args the argument list * @param expectedException the expected exception or null * @return a pair of file and string lists */ - private Pair, List> executeTest(String name, File outputFileLocation, List md5s, List tmpFiles, String args, Class expectedException) { - if ( md5s != null ) qcMD5s(name, md5s); + private Pair, List> executeTest(String testName, String testClassName, File outputFileLocation, List md5s, List tmpFiles, String args, Class expectedException) { + if ( md5s != null ) qcMD5s(testName, md5s); if (outputFileLocation != null) args += " -o " + outputFileLocation.getAbsolutePath(); - executeTest(name, args, expectedException); + executeTest(testName, testClassName, args, expectedException); if ( expectedException != null ) { return null; } else { // we need to check MD5s - return new Pair, List>(tmpFiles, assertMatchingMD5s(name, tmpFiles, md5s)); + return new Pair, List>(tmpFiles, assertMatchingMD5s(testName, testClassName, tmpFiles, md5s)); } } /** * execute the test, given the following: - * @param name the name of the test - * @param args the argument list + * @param testName the name of the test + * @param testClassName the name of the class that contains the test + * @param args the argument list * @param expectedException the expected exception or null */ - private void executeTest(String name, String args, Class expectedException) { + private void executeTest(String testName, String testClassName, String args, Class expectedException) { CommandLineGATK instance = new CommandLineGATK(); String[] command = Utils.escapeExpressions(args); @@ -374,7 +394,7 @@ public class WalkerTest extends BaseTest { try { final String now = new SimpleDateFormat("HH:mm:ss").format(new Date()); final String cmdline = Utils.join(" ",command); - System.out.println(String.format("[%s] Executing test %s with GATK arguments: %s", now, name, cmdline)); + System.out.println(String.format("[%s] Executing test %s:%s with GATK arguments: %s", now, testClassName, testName, cmdline)); // also write the command line to the HTML log for convenient follow-up // do the replaceAll so paths become relative to the current BaseTest.log(cmdline.replaceAll(publicTestDirRoot, "").replaceAll(privateTestDirRoot, "")); @@ -388,8 +408,8 @@ public class WalkerTest extends BaseTest { // it's the type we expected //System.out.println(String.format(" => %s PASSED", name)); } else { - final String message = String.format("Test %s expected exception %s but instead got %s with error message %s", - name, expectedException, e.getClass(), e.getMessage()); + final String message = String.format("Test %s:%s expected exception %s but instead got %s with error message %s", + testClassName, testName, expectedException, e.getClass(), e.getMessage()); if ( e.getCause() != null ) { final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final PrintStream ps = new PrintStream(baos); @@ -409,7 +429,7 @@ public class WalkerTest extends BaseTest { if ( expectedException != null ) { if ( ! gotAnException ) // we expected an exception but didn't see it - Assert.fail(String.format("Test %s expected exception %s but none was thrown", name, expectedException.toString())); + Assert.fail(String.format("Test %s:%s expected exception %s but none was thrown", testClassName, testName, expectedException.toString())); } else { if ( CommandLineExecutable.result != 0) { throw new RuntimeException("Error running the GATK with arguments: " + args); diff --git a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java index 8d0874ea1..aca6cf984 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java @@ -25,13 +25,32 @@ package org.broadinstitute.sting.gatk; +import net.sf.samtools.SAMFileReader; +import net.sf.samtools.SAMRecord; +import org.broad.tribble.readers.AsciiLineReader; import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.commandline.Output; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.filters.MappingQualityUnavailableFilter; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.walkers.ReadFilters; +import org.broadinstitute.sting.gatk.walkers.ReadWalker; import org.broadinstitute.sting.gatk.walkers.qc.ErrorThrowing; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.sam.GATKSamRecordFactory; +import org.broadinstitute.sting.utils.variant.GATKVCFUtils; +import org.broadinstitute.variant.vcf.VCFCodec; +import org.broadinstitute.variant.vcf.VCFHeader; +import org.broadinstitute.variant.vcf.VCFHeaderLine; +import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import java.io.File; +import java.io.FileInputStream; +import java.io.PrintStream; import java.util.Arrays; /** @@ -123,7 +142,159 @@ public class EngineFeaturesIntegrationTest extends WalkerTest { final String root = "-T ErrorThrowing -R " + exampleFASTA; final String args = root + cfg.args + " -E " + cfg.expectedException.getSimpleName(); WalkerTestSpec spec = new WalkerTestSpec(args, 0, cfg.expectedException); + executeTest(cfg.toString(), spec); } } + + // -------------------------------------------------------------------------------- + // + // Test that read filters are being applied in the order we expect + // + // -------------------------------------------------------------------------------- + + @ReadFilters({MappingQualityUnavailableFilter.class}) + public static class DummyReadWalkerWithMapqUnavailableFilter extends ReadWalker { + @Output + PrintStream out; + + @Override + public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker) { + return 1; + } + + @Override + public Integer reduceInit() { + return 0; + } + + @Override + public Integer reduce(Integer value, Integer sum) { + return value + sum; + } + + @Override + public void onTraversalDone(Integer result) { + out.println(result); + } + } + + @Test(enabled = true) + public void testUserReadFilterAppliedBeforeWalker() { + WalkerTestSpec spec = new WalkerTestSpec("-R " + b37KGReference + " -I " + privateTestDir + "allMAPQ255.bam" + + " -T DummyReadWalkerWithMapqUnavailableFilter -o %s -L MT -rf ReassignMappingQuality", + 1, Arrays.asList("ecf27a776cdfc771defab1c5d19de9ab")); + executeTest("testUserReadFilterAppliedBeforeWalker", spec); + } + + @Test + public void testNegativeCompress() { + testBadCompressArgument(-1); + } + + @Test + public void testTooBigCompress() { + testBadCompressArgument(100); + } + + private void testBadCompressArgument(final int compress) { + WalkerTestSpec spec = new WalkerTestSpec("-T PrintReads -R " + b37KGReference + " -I private/testdata/NA12878.1_10mb_2_10mb.bam -o %s -compress " + compress, + 1, UserException.class); + executeTest("badCompress " + compress, spec); + } + + // -------------------------------------------------------------------------------- + // + // Test that the VCF version key is what we expect + // + // -------------------------------------------------------------------------------- + @Test(enabled = true) + public void testGATKVersionInVCF() throws Exception { + WalkerTestSpec spec = new WalkerTestSpec("-T SelectVariants -R " + b37KGReference + + " -V " + privateTestDir + "NA12878.WGS.b37.chr20.firstMB.vcf" + + " -o %s -L 20:61098", + 1, Arrays.asList("")); + spec.disableShadowBCF(); + final File vcf = executeTest("testGATKVersionInVCF", spec).first.get(0); + final VCFHeader header = (VCFHeader)new VCFCodec().readHeader(new AsciiLineReader(new FileInputStream(vcf))); + final VCFHeaderLine versionLine = header.getMetaDataLine(GATKVCFUtils.GATK_COMMAND_LINE_KEY); + Assert.assertNotNull(versionLine); + Assert.assertTrue(versionLine.toString().contains("SelectVariants")); + } + + @Test(enabled = true) + public void testMultipleGATKVersionsInVCF() throws Exception { + WalkerTestSpec spec = new WalkerTestSpec("-T SelectVariants -R " + b37KGReference + + " -V " + privateTestDir + "gatkCommandLineInHeader.vcf" + + " -o %s", + 1, Arrays.asList("")); + spec.disableShadowBCF(); + final File vcf = executeTest("testMultipleGATKVersionsInVCF", spec).first.get(0); + final VCFHeader header = (VCFHeader)new VCFCodec().readHeader(new AsciiLineReader(new FileInputStream(vcf))); + + boolean foundHC = false; + boolean foundSV = false; + for ( final VCFHeaderLine line : header.getMetaDataInInputOrder() ) { + if ( line.getKey().equals(GATKVCFUtils.GATK_COMMAND_LINE_KEY) ) { + if ( line.toString().contains("HaplotypeCaller") ) { + Assert.assertFalse(foundHC); + foundHC = true; + } + if ( line.toString().contains("SelectVariants") ) { + Assert.assertFalse(foundSV); + foundSV = true; + } + } + } + + Assert.assertTrue(foundHC, "Didn't find HaplotypeCaller command line header field"); + Assert.assertTrue(foundSV, "Didn't find SelectVariants command line header field"); + } + + // -------------------------------------------------------------------------------- + // + // Test that defaultBaseQualities actually works + // + // -------------------------------------------------------------------------------- + + public WalkerTestSpec testDefaultBaseQualities(final Integer value, final String md5) { + return new WalkerTestSpec("-T PrintReads -R " + b37KGReference + " -I " + privateTestDir + "/baseQualitiesToFix.bam -o %s" + + (value != null ? " --defaultBaseQualities " + value : ""), + 1, Arrays.asList(md5)); + } + + @Test() + public void testDefaultBaseQualities20() { + executeTest("testDefaultBaseQualities20", testDefaultBaseQualities(20, "7d254a9d0ec59c66ee3e137f56f4c78f")); + } + + @Test() + public void testDefaultBaseQualities30() { + executeTest("testDefaultBaseQualities30", testDefaultBaseQualities(30, "0f50def6cbbbd8ccd4739e2b3998e503")); + } + + @Test(expectedExceptions = Exception.class) + public void testDefaultBaseQualitiesNoneProvided() { + executeTest("testDefaultBaseQualitiesNoneProvided", testDefaultBaseQualities(null, "")); + } + + @Test + public void testGATKEngineConsolidatesCigars() { + final WalkerTestSpec spec = new WalkerTestSpec(" -T PrintReads" + + " -R " + b37KGReference + + " -I " + privateTestDir + "zero_length_cigar_elements.bam" + + " -o %s", + 1, Arrays.asList("")); // No MD5s; we only want to check the cigar + + final File outputBam = executeTest("testGATKEngineConsolidatesCigars", spec).first.get(0); + final SAMFileReader reader = new SAMFileReader(outputBam); + reader.setValidationStringency(SAMFileReader.ValidationStringency.SILENT); + reader.setSAMRecordFactory(new GATKSamRecordFactory()); + + final SAMRecord read = reader.iterator().next(); + reader.close(); + + // Original cigar was 0M3M0M8M. Check that it's been consolidated after running through the GATK engine: + Assert.assertEquals(read.getCigarString(), "11M", "Cigar 0M3M0M8M not consolidated correctly by the engine"); + } } \ No newline at end of file diff --git a/public/java/test/org/broadinstitute/sting/gatk/MaxRuntimeIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/MaxRuntimeIntegrationTest.java index e6176dbe8..5b3f1e790 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/MaxRuntimeIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/MaxRuntimeIntegrationTest.java @@ -26,19 +26,52 @@ package org.broadinstitute.sting.gatk; import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.commandline.Argument; +import org.broadinstitute.sting.commandline.Output; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.walkers.LocusWalker; import org.broadinstitute.sting.utils.SimpleTimer; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.PrintStream; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.List; import java.util.concurrent.TimeUnit; /** * */ public class MaxRuntimeIntegrationTest extends WalkerTest { + public static class SleepingWalker extends LocusWalker { + @Output PrintStream out; + + @Argument(fullName="sleepTime",shortName="sleepTime",doc="x", required=false) + public int sleepTime = 100; + + @Override + public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + try {Thread.sleep(sleepTime);} catch (InterruptedException e) {}; + return 1; + } + + @Override public Integer reduceInit() { return 0; } + @Override public Integer reduce(Integer value, Integer sum) { return sum + value; } + + @Override + public void onTraversalDone(Integer result) { + out.println(result); + } + } + private static final long STARTUP_TIME = TimeUnit.NANOSECONDS.convert(60, TimeUnit.SECONDS); private class MaxRuntimeTestProvider extends TestDataProvider { @@ -84,4 +117,35 @@ public class MaxRuntimeIntegrationTest extends WalkerTest { + " exceeded max. tolerated runtime " + TimeUnit.SECONDS.convert(cfg.expectedMaxRuntimeNano(), TimeUnit.NANOSECONDS) + " given requested runtime " + cfg.maxRuntime + " " + cfg.unit); } + + @DataProvider(name = "SubshardProvider") + public Object[][] makeSubshardProvider() { + List tests = new ArrayList(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + tests.add(new Object[]{10}); + tests.add(new Object[]{100}); + tests.add(new Object[]{500}); + tests.add(new Object[]{1000}); + tests.add(new Object[]{2000}); + + return tests.toArray(new Object[][]{}); + } + + @Test(enabled = true, dataProvider = "SubshardProvider", timeOut = 120 * 1000) + public void testSubshardTimeout(final int sleepTime) throws Exception { + final int maxRuntime = 5000; + + WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( + "-T SleepingWalker -R " + b37KGReference + + " -I " + privateTestDir + "NA12878.100kb.BQSRv2.example.bam -o %s" + + " -maxRuntime " + maxRuntime + " -maxRuntimeUnits MILLISECONDS -sleepTime " + sleepTime, 1, + Collections.singletonList("")); + final File result = executeTest("Subshard max runtime ", spec).getFirst().get(0); + final int cycle = Integer.valueOf(new BufferedReader(new FileReader(result)).readLine()); + + final int maxCycles = (int)Math.ceil((maxRuntime * 5) / sleepTime); + logger.warn(String.format("Max cycles %d saw %d in file %s with sleepTime %d and maxRuntime %d", maxCycles, cycle, result, sleepTime, maxRuntime)); + Assert.assertTrue(cycle < maxCycles, "Too many cycles seen -- saw " + cycle + " in file " + result + " but max should have been " + maxCycles); + } } \ No newline at end of file diff --git a/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java new file mode 100644 index 000000000..56725147e --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java @@ -0,0 +1,371 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk; + +import net.sf.picard.reference.IndexedFastaSequenceFile; +import net.sf.samtools.*; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.commandline.Tags; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.datasources.providers.LocusShardDataProvider; +import org.broadinstitute.sting.gatk.datasources.providers.ReadShardDataProvider; +import org.broadinstitute.sting.gatk.datasources.reads.*; +import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; +import org.broadinstitute.sting.gatk.executive.WindowMaker; +import org.broadinstitute.sting.gatk.filters.ReadFilter; +import org.broadinstitute.sting.gatk.iterators.ReadTransformer; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.resourcemanagement.ThreadAllocation; +import org.broadinstitute.sting.gatk.traversals.*; +import org.broadinstitute.sting.gatk.walkers.*; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.GenomeLocSortedSet; +import org.broadinstitute.sting.utils.SampleUtils; +import org.broadinstitute.sting.utils.activeregion.ActiveRegion; +import org.broadinstitute.sting.utils.activeregion.ActivityProfileState; +import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; +import org.broadinstitute.sting.utils.sam.*; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +import java.io.File; +import java.io.IOException; +import java.util.*; + +public class ReadMetricsUnitTest extends BaseTest { + + @Test + public void testReadsSeenDoNotOverflowInt() { + + final ReadMetrics metrics = new ReadMetrics(); + + final long moreThanMaxInt = ((long)Integer.MAX_VALUE) + 1L; + + for ( long i = 0L; i < moreThanMaxInt; i++ ) { + metrics.incrementNumReadsSeen(); + } + + Assert.assertEquals(metrics.getNumReadsSeen(), moreThanMaxInt); + Assert.assertTrue(metrics.getNumReadsSeen() > (long) Integer.MAX_VALUE); + + logger.warn(String.format("%d %d %d", Integer.MAX_VALUE, moreThanMaxInt, Long.MAX_VALUE)); + } + + + // Test the accuracy of the read metrics + + private IndexedFastaSequenceFile reference; + private SAMSequenceDictionary dictionary; + private SAMFileHeader header; + private GATKSAMReadGroupRecord readGroup; + private GenomeLocParser genomeLocParser; + private File testBAM; + + private static final int numReadsPerContig = 250000; + private static final List contigs = Arrays.asList("1", "2", "3"); + + @BeforeClass + private void init() throws IOException { + reference = new CachingIndexedFastaSequenceFile(new File(b37KGReference)); + dictionary = reference.getSequenceDictionary(); + genomeLocParser = new GenomeLocParser(dictionary); + header = ArtificialSAMUtils.createDefaultReadGroup(new SAMFileHeader(), "test", "test"); + header.setSequenceDictionary(dictionary); + header.setSortOrder(SAMFileHeader.SortOrder.coordinate); + readGroup = new GATKSAMReadGroupRecord(header.getReadGroup("test")); + + final List reads = new ArrayList<>(); + for ( final String contig : contigs ) { + for ( int i = 1; i <= numReadsPerContig; i++ ) { + reads.add(buildSAMRecord("read" + contig + "_" + i, contig, i)); + } + } + + createBAM(reads); + } + + private void createBAM(final List reads) throws IOException { + testBAM = File.createTempFile("TraverseActiveRegionsUnitTest", ".bam"); + testBAM.deleteOnExit(); + + SAMFileWriter out = new SAMFileWriterFactory().setCreateIndex(true).makeBAMWriter(reads.get(0).getHeader(), true, testBAM); + for (GATKSAMRecord read : reads ) { + out.addAlignment(read); + } + out.close(); + + new File(testBAM.getAbsolutePath().replace(".bam", ".bai")).deleteOnExit(); + new File(testBAM.getAbsolutePath() + ".bai").deleteOnExit(); + } + + // copied from LocusViewTemplate + protected GATKSAMRecord buildSAMRecord(final String readName, final String contig, final int alignmentStart) { + GATKSAMRecord record = new GATKSAMRecord(header); + + record.setReadName(readName); + record.setReferenceIndex(dictionary.getSequenceIndex(contig)); + record.setAlignmentStart(alignmentStart); + + record.setCigarString("1M"); + record.setReadString("A"); + record.setBaseQualityString("A"); + record.setReadGroup(readGroup); + + return record; + } + + @Test + public void testCountsFromReadTraversal() { + final GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); + engine.setGenomeLocParser(genomeLocParser); + + final Collection samFiles = new ArrayList<>(); + final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); + samFiles.add(readerID); + + final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser, + false, + SAMFileReader.ValidationStringency.STRICT, + null, + null, + new ValidationExclusion(), + new ArrayList(), + new ArrayList(), + false, (byte)30, false, true); + + engine.setReadsDataSource(dataSource); + + final TraverseReadsNano traverseReadsNano = new TraverseReadsNano(1); + final DummyReadWalker walker = new DummyReadWalker(); + traverseReadsNano.initialize(engine, walker, null); + + for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new ReadShardBalancer()) ) { + final ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard, engine.getGenomeLocParser(), dataSource.seek(shard), reference, new ArrayList()); + traverseReadsNano.traverse(walker, dataProvider, 0); + dataProvider.close(); + } + + Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig); + Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig); + } + + @Test + public void testCountsFromLocusTraversal() { + final GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); + engine.setGenomeLocParser(genomeLocParser); + + final Collection samFiles = new ArrayList<>(); + final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); + samFiles.add(readerID); + + final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser, + false, + SAMFileReader.ValidationStringency.STRICT, + null, + null, + new ValidationExclusion(), + new ArrayList(), + new ArrayList(), + false, (byte)30, false, true); + + engine.setReadsDataSource(dataSource); + final Set samples = SampleUtils.getSAMFileSamples(dataSource.getHeader()); + + final TraverseLociNano traverseLociNano = new TraverseLociNano(1); + final DummyLocusWalker walker = new DummyLocusWalker(); + traverseLociNano.initialize(engine, walker, null); + + for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new LocusShardBalancer()) ) { + final WindowMaker windowMaker = new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples); + for ( WindowMaker.WindowMakerIterator window : windowMaker ) { + final LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList()); + traverseLociNano.traverse(walker, dataProvider, 0); + dataProvider.close(); + } + windowMaker.close(); + } + + //dataSource.close(); + Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig); + Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig); + } + + @Test + public void testCountsFromActiveRegionTraversal() { + final GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); + engine.setGenomeLocParser(genomeLocParser); + + final Collection samFiles = new ArrayList<>(); + final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); + samFiles.add(readerID); + + final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser, + false, + SAMFileReader.ValidationStringency.STRICT, + null, + null, + new ValidationExclusion(), + new ArrayList(), + new ArrayList(), + false, (byte)30, false, true); + + engine.setReadsDataSource(dataSource); + final Set samples = SampleUtils.getSAMFileSamples(dataSource.getHeader()); + + final List intervals = new ArrayList<>(contigs.size()); + for ( final String contig : contigs ) + intervals.add(genomeLocParser.createGenomeLoc(contig, 1, numReadsPerContig)); + + final TraverseActiveRegions traverseActiveRegions = new TraverseActiveRegions(); + final DummyActiveRegionWalker walker = new DummyActiveRegionWalker(); + traverseActiveRegions.initialize(engine, walker, null); + + for ( final Shard shard : dataSource.createShardIteratorOverIntervals(new GenomeLocSortedSet(genomeLocParser, intervals), new ActiveRegionShardBalancer()) ) { + final WindowMaker windowMaker = new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples); + for ( WindowMaker.WindowMakerIterator window : windowMaker ) { + final LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList()); + traverseActiveRegions.traverse(walker, dataProvider, 0); + dataProvider.close(); + } + windowMaker.close(); + } + + Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig); + Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig); + } + + @Test + public void testFilteredCounts() { + final GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); + engine.setGenomeLocParser(genomeLocParser); + + final Collection samFiles = new ArrayList<>(); + final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); + samFiles.add(readerID); + + final List filters = new ArrayList<>(); + filters.add(new EveryTenthReadFilter()); + + final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser, + false, + SAMFileReader.ValidationStringency.STRICT, + null, + null, + new ValidationExclusion(), + filters, + new ArrayList(), + false, (byte)30, false, true); + + engine.setReadsDataSource(dataSource); + + final TraverseReadsNano traverseReadsNano = new TraverseReadsNano(1); + final DummyReadWalker walker = new DummyReadWalker(); + traverseReadsNano.initialize(engine, walker, null); + + for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new ReadShardBalancer()) ) { + final ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard, engine.getGenomeLocParser(), dataSource.seek(shard), reference, new ArrayList()); + traverseReadsNano.traverse(walker, dataProvider, 0); + dataProvider.close(); + } + + Assert.assertEquals((long)engine.getCumulativeMetrics().getCountsByFilter().get(EveryTenthReadFilter.class.getSimpleName()), contigs.size() * numReadsPerContig / 10); + } + + class DummyLocusWalker extends LocusWalker { + @Override + public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + return 0; + } + + @Override + public Integer reduceInit() { + return 0; + } + + @Override + public Integer reduce(Integer value, Integer sum) { + return 0; + } + } + + class DummyReadWalker extends ReadWalker { + @Override + public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker) { + return 0; + } + + @Override + public Integer reduceInit() { + return 0; + } + + @Override + public Integer reduce(Integer value, Integer sum) { + return 0; + } + } + + class DummyActiveRegionWalker extends ActiveRegionWalker { + @Override + public ActivityProfileState isActive(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + return new ActivityProfileState(ref.getLocus(), 0.0); + } + + @Override + public Integer map(ActiveRegion activeRegion, RefMetaDataTracker metaDataTracker) { + return 0; + } + + @Override + public Integer reduceInit() { + return 0; + } + + @Override + public Integer reduce(Integer value, Integer sum) { + return 0; + } + } + + private final class EveryTenthReadFilter extends ReadFilter { + + private int myCounter = 0; + + @Override + public boolean filterOut(final SAMRecord record) { + if ( ++myCounter == 10 ) { + myCounter = 0; + return true; + } + + return false; + } + } +} \ No newline at end of file diff --git a/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedViewUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedViewUnitTest.java similarity index 98% rename from public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedViewUnitTest.java rename to public/java/test/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedViewUnitTest.java index bf4d36d92..784bd727e 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedViewUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedViewUnitTest.java @@ -49,7 +49,7 @@ import java.util.*; /** * @author depristo */ -public class ReadBasedReferenceOrderedViewUnitTest extends BaseTest { +public class IntervalReferenceOrderedViewUnitTest extends BaseTest { private static int startingChr = 1; private static int endingChr = 2; private static int readCount = 100; @@ -285,7 +285,7 @@ public class ReadBasedReferenceOrderedViewUnitTest extends BaseTest { Collections.sort(intervals); final GenomeLoc span = span(intervals); - final ReadBasedReferenceOrderedView view = new ReadBasedReferenceOrderedView(genomeLocParser, span, names, iterators); + final IntervalReferenceOrderedView view = new IntervalReferenceOrderedView(genomeLocParser, span, names, iterators); if ( testStateless ) { // test each tracker is well formed, as each is created diff --git a/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedViewUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedViewUnitTest.java index fad632cfd..1d39f43c6 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedViewUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedViewUnitTest.java @@ -97,7 +97,7 @@ public class ReferenceOrderedViewUnitTest extends BaseTest { LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Collections.emptyList()); ReferenceOrderedView view = new ManagingReferenceOrderedView( provider ); - RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",10), null); + RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",10)); Assert.assertEquals(tracker.getValues(Feature.class).size(), 0, "The tracker should not have produced any data"); } @@ -115,7 +115,7 @@ public class ReferenceOrderedViewUnitTest extends BaseTest { LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Collections.singletonList(dataSource)); ReferenceOrderedView view = new ManagingReferenceOrderedView( provider ); - RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20), null); + RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20)); TableFeature datum = tracker.getFirstValue(new RodBinding(TableFeature.class, "tableTest")); Assert.assertEquals(datum.get("COL1"),"C","datum parameter for COL1 is incorrect"); @@ -141,7 +141,7 @@ public class ReferenceOrderedViewUnitTest extends BaseTest { LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Arrays.asList(dataSource1,dataSource2)); ReferenceOrderedView view = new ManagingReferenceOrderedView( provider ); - RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20), null); + RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20)); TableFeature datum1 = tracker.getFirstValue(new RodBinding(TableFeature.class, "tableTest1")); Assert.assertEquals(datum1.get("COL1"),"C","datum1 parameter for COL1 is incorrect"); diff --git a/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/ActiveRegionShardBalancerUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/ActiveRegionShardBalancerUnitTest.java new file mode 100644 index 000000000..e768faba4 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/ActiveRegionShardBalancerUnitTest.java @@ -0,0 +1,101 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.datasources.reads; + +import net.sf.samtools.SAMFileHeader; +import net.sf.samtools.SAMFileSpan; +import net.sf.samtools.SAMSequenceRecord; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +import java.io.FileNotFoundException; +import java.util.*; + +public class ActiveRegionShardBalancerUnitTest extends BaseTest { + // example genome loc parser for this test, can be deleted if you don't use the reference + private GenomeLocParser genomeLocParser; + protected SAMDataSource readsDataSource; + + @BeforeClass + public void setup() throws FileNotFoundException { + // sequence + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(10, 0, 10000); + genomeLocParser = new GenomeLocParser(header.getSequenceDictionary()); + readsDataSource = null; + } + + @Test + public void testMergingManyContigs() { + executeTest(genomeLocParser.getContigs().getSequences()); + } + + @Test + public void testMergingAllPointersOnSingleContig() { + executeTest(Arrays.asList(genomeLocParser.getContigs().getSequences().get(1))); + } + + @Test + public void testMergingMultipleDiscontinuousContigs() { + final List all = genomeLocParser.getContigs().getSequences(); + executeTest(Arrays.asList(all.get(1), all.get(3))); + } + + private void executeTest(final Collection records) { + final ActiveRegionShardBalancer balancer = new ActiveRegionShardBalancer(); + + final List> expectedLocs = new LinkedList<>(); + final List pointers = new LinkedList<>(); + + for ( final SAMSequenceRecord record : records ) { + final int size = 10; + int end = 0; + for ( int i = 0; i < record.getSequenceLength(); i += size) { + final int myEnd = i + size - 1; + end = myEnd; + final GenomeLoc loc = genomeLocParser.createGenomeLoc(record.getSequenceName(), i, myEnd); + final Map fileSpans = Collections.emptyMap(); + final FilePointer fp = new FilePointer(fileSpans, Collections.singletonList(loc)); + pointers.add(fp); + } + expectedLocs.add(Collections.singleton(genomeLocParser.createGenomeLoc(record.getSequenceName(), 0, end))); + } + + balancer.initialize(readsDataSource, pointers.iterator(), genomeLocParser); + + int i = 0; + int nShardsFound = 0; + for ( final Shard shard : balancer ) { + nShardsFound++; + Assert.assertEquals(new HashSet<>(shard.getGenomeLocs()), expectedLocs.get(i++)); + } + Assert.assertEquals(nShardsFound, records.size(), "Didn't find exactly one shard for each contig in the sequence dictionary"); + } +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java b/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java index 00389be97..25c71d570 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java +++ b/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java @@ -26,7 +26,9 @@ package org.broadinstitute.sting.gatk.datasources.reads; import com.google.caliper.Param; +import org.broadinstitute.sting.gatk.WalkerManager; import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod; +import org.broadinstitute.sting.gatk.walkers.LocusWalker; import org.broadinstitute.sting.gatk.walkers.qc.CountLoci; /** @@ -86,7 +88,7 @@ public class DownsamplerBenchmark extends ReadProcessingBenchmark { }, PER_SAMPLE { @Override - DownsamplingMethod create() { return DownsamplingMethod.getDefaultDownsamplingMethod(new CountLoci()); } + DownsamplingMethod create() { return WalkerManager.getDownsamplingMethod(LocusWalker.class); } }; abstract DownsamplingMethod create(); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/DownsamplingIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/DownsamplingIntegrationTest.java new file mode 100644 index 000000000..85f9169da --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/DownsamplingIntegrationTest.java @@ -0,0 +1,44 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.downsampling; + +import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.testng.annotations.Test; + +public class DownsamplingIntegrationTest extends WalkerTest { + + @Test + public void testDetectLowDcovValueWithLocusTraversal() { + final WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( + "-T CountLoci -R " + publicTestDir + "exampleFASTA.fasta -I " + publicTestDir + "exampleBAM.bam -o %s " + + "-dcov " + (DownsamplingMethod.MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS - 1), + 1, + UserException.class + ); + executeTest("testDetectLowDcovValueWithLocusTraversal", spec); + } +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/FractionalDownsamplerUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/FractionalDownsamplerUnitTest.java index 6f18d794f..8f0eee069 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/downsampling/FractionalDownsamplerUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/FractionalDownsamplerUnitTest.java @@ -30,6 +30,7 @@ import net.sf.samtools.SAMRecord; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import org.testng.Assert; @@ -152,7 +153,39 @@ public class FractionalDownsamplerUnitTest extends BaseTest { Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), test.totalReads - downsampledReads.size()); - downsampler.reset(); + downsampler.resetStats(); Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0); } + + @Test + public void testDoNotDiscardReducedReads() { + GenomeAnalysisEngine.resetRandomGenerator(); + final ReadsDownsampler downsampler = new FractionalDownsampler(0.0); + + final Collection reads = new ArrayList(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int i = 1; i <= 10; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, 1, 5, baseCounts)); + } + for ( int i = 1; i <= 5; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 5)); + } + + downsampler.submit(reads); + downsampler.signalEndOfInput(); + + Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 5, "wrong number of items discarded by the downsampler"); + Assert.assertTrue(downsampler.hasFinalizedItems(), "downsampler should have finalized items but doesn't"); + Assert.assertEquals(downsampler.size(), 10, "downsampler size() reports wrong number of items"); + + final Collection readsReturned = downsampler.consumeFinalizedItems(); + + Assert.assertEquals(readsReturned.size(), 10, "wrong number of items returned by the downsampler"); + + for ( GATKSAMRecord readReturned : readsReturned ) { + Assert.assertTrue(readReturned.isReducedRead(), "non-reduced read survived the downsampling process, but shouldn't have"); + } + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/LevelingDownsamplerUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/LevelingDownsamplerUnitTest.java index 972e51dcd..8cf0fd2a1 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/downsampling/LevelingDownsamplerUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/LevelingDownsamplerUnitTest.java @@ -25,16 +25,17 @@ package org.broadinstitute.sting.gatk.downsampling; +import net.sf.samtools.SAMFileHeader; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.utils.locusiterator.AlignmentStateMachine; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.annotations.Test; import org.testng.annotations.DataProvider; import org.testng.Assert; -import java.util.ArrayList; -import java.util.Collection; -import java.util.LinkedList; -import java.util.List; +import java.util.*; public class LevelingDownsamplerUnitTest extends BaseTest { @@ -158,9 +159,46 @@ public class LevelingDownsamplerUnitTest extends BaseTest { Assert.assertEquals(numItemsReportedDiscarded, numItemsActuallyDiscarded); - downsampler.reset(); + downsampler.resetStats(); Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0); Assert.assertTrue(totalRemainingItems <= Math.max(test.targetSize, test.numStacks)); } + + @Test + public void testDoNotDiscardReducedReads() { + GenomeAnalysisEngine.resetRandomGenerator(); + final Downsampler> downsampler = new LevelingDownsampler, AlignmentStateMachine>(1); + + final Collection> groups = new LinkedList>(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int alignmentStart : Arrays.asList(1, 2, 3) ) { + final LinkedList group = new LinkedList(); + for ( int i = 1; i <= 10; i++ ) { + group.add(new AlignmentStateMachine(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, alignmentStart, 5, baseCounts))); + } + groups.add(group); + } + + downsampler.submit(groups); + downsampler.signalEndOfInput(); + + Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0, "wrong number of items discarded by the downsampler"); + Assert.assertTrue(downsampler.hasFinalizedItems(), "downsampler should have finalized items but doesn't"); + Assert.assertEquals(downsampler.size(), 30, "downsampler size() reports wrong number of items"); + + final Collection> groupsReturned = downsampler.consumeFinalizedItems(); + + Assert.assertEquals(groupsReturned.size(), 3, "wrong number of groups returned by the downsampler"); + + for ( LinkedList group : groupsReturned ) { + Assert.assertEquals(group.size(), 10, "group has wrong size after downsampling"); + + for ( AlignmentStateMachine state : group ) { + Assert.assertTrue(state.isReducedRead()); + } + } + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsamplerUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsamplerUnitTest.java index 022eb02d2..a50201efd 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsamplerUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsamplerUnitTest.java @@ -30,6 +30,7 @@ import net.sf.samtools.SAMRecord; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import org.testng.Assert; @@ -125,7 +126,49 @@ public class ReservoirDownsamplerUnitTest extends BaseTest { Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), test.expectedNumDiscardedItems); Assert.assertEquals(test.totalReads - downsampledReads.size(), test.expectedNumDiscardedItems); - downsampler.reset(); + downsampler.resetStats(); Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0); } + + @Test + public void testDoNotDiscardReducedReads() { + GenomeAnalysisEngine.resetRandomGenerator(); + final ReadsDownsampler downsampler = new ReservoirDownsampler(1); + + final Collection reads = new ArrayList(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int i = 1; i <= 10; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, 1, 5, baseCounts)); + } + for ( int i = 1; i <= 5; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 5)); + } + + downsampler.submit(reads); + downsampler.signalEndOfInput(); + + Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 4, "wrong number of items discarded by the downsampler"); + Assert.assertTrue(downsampler.hasFinalizedItems(), "downsampler should have finalized items but doesn't"); + Assert.assertEquals(downsampler.size(), 11, "downsampler size() reports wrong number of items"); + + final Collection readsReturned = downsampler.consumeFinalizedItems(); + + Assert.assertEquals(readsReturned.size(), 11, "wrong number of items returned by the downsampler"); + + int numReducedReadsReturned = 0; + int numNormalReadsReturned = 0; + for ( GATKSAMRecord readReturned : readsReturned ) { + if ( readReturned.isReducedRead() ) { + numReducedReadsReturned++; + } + else { + numNormalReadsReturned++; + } + } + + Assert.assertEquals(numReducedReadsReturned, 10, "wrong number of reduced reads returned by the downsampler"); + Assert.assertEquals(numNormalReadsReturned, 1, "wrong number of non-reduced reads returned by the downsampler"); + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsamplerUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsamplerUnitTest.java index c6b0dea29..bec0030d0 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsamplerUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsamplerUnitTest.java @@ -177,7 +177,7 @@ public class SimplePositionalDownsamplerUnitTest extends BaseTest { Assert.assertEquals(numReadsActuallyEliminated, numReadsReportedEliminated); } - downsampler.reset(); + downsampler.resetStats(); Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0); } @@ -328,4 +328,48 @@ public class SimplePositionalDownsamplerUnitTest extends BaseTest { Assert.assertEquals(downsampledReads.size(), 10); } + + @Test + public void testDoNotDiscardReducedReads() { + GenomeAnalysisEngine.resetRandomGenerator(); + final ReadsDownsampler downsampler = new SimplePositionalDownsampler(1); + + final Collection reads = new ArrayList(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int alignmentStart : Arrays.asList(1, 2, 3) ) { + for ( int i = 1; i <= 10; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, alignmentStart, 5, baseCounts)); + } + for ( int i = 1; i <= 5; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialRead(header, "foo", 0, alignmentStart, 5)); + } + } + + downsampler.submit(reads); + downsampler.signalEndOfInput(); + + Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 12, "wrong number of items discarded by the downsampler"); + Assert.assertTrue(downsampler.hasFinalizedItems(), "downsampler should have finalized items but doesn't"); + Assert.assertEquals(downsampler.size(), 33, "downsampler size() reports wrong number of items"); + + final Collection readsReturned = downsampler.consumeFinalizedItems(); + + Assert.assertEquals(readsReturned.size(), 33, "wrong number of items returned by the downsampler"); + + int numReducedReadsReturned = 0; + int numNormalReadsReturned = 0; + for ( GATKSAMRecord readReturned : readsReturned ) { + if ( readReturned.isReducedRead() ) { + numReducedReadsReturned++; + } + else { + numNormalReadsReturned++; + } + } + + Assert.assertEquals(numReducedReadsReturned, 30, "wrong number of reduced reads returned by the downsampler"); + Assert.assertEquals(numNormalReadsReturned, 3, "wrong number of non-reduced reads returned by the downsampler"); + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/AllowNCigarMalformedReadFilterUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/AllowNCigarMalformedReadFilterUnitTest.java new file mode 100644 index 000000000..d169bf7e9 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/AllowNCigarMalformedReadFilterUnitTest.java @@ -0,0 +1,77 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.filters; + + +import net.sf.samtools.SAMRecord; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.Collections; + + +/** + * Tests for the {@link MalformedReadFilter} when the unsafe flag + * {@link ValidationExclusion.TYPE#ALLOW_N_CIGAR_READS} is set. + * + * @author Valentin Ruano-Rubio + * @since 6/6/13 + */ +public class AllowNCigarMalformedReadFilterUnitTest extends MalformedReadFilterUnitTest { + + + @Override + protected ValidationExclusion composeValidationExclusion() { + return new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS)); + } + + + @Test(enabled = true, + dataProvider= "UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.IGNORE) + public void testCigarNOperatorFilterIgnore(final String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(false); + final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString); + Assert.assertFalse(filter.filterOut(nContainingCigarRead), + "filters out N containing Cigar when it should ignore the fact"); + } + + @Test(enabled = false) + @Override + public void testCigarNOperatorFilterException(final String cigarString) { + // Nothing to do here. + // Just deactivates the parents test case. + } + + + + + + + +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/MalformedReadFilterUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/MalformedReadFilterUnitTest.java index 981d54d54..0d8515dde 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/filters/MalformedReadFilterUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/MalformedReadFilterUnitTest.java @@ -25,11 +25,25 @@ package org.broadinstitute.sting.gatk.filters; -import org.broadinstitute.sting.utils.exceptions.UserException; + +import net.sf.samtools.Cigar; +import net.sf.samtools.SAMFileHeader; +import net.sf.samtools.SAMRecord; +import net.sf.samtools.TextCigarCodec; +import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.broadinstitute.sting.gatk.datasources.reads.SAMDataSource; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.Assert; +import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.exceptions.UserException.UnsupportedCigarOperatorException; + +import java.lang.annotation.*; +import java.lang.reflect.Method; +import java.util.*; /** @@ -38,14 +52,14 @@ import org.testng.annotations.Test; * @author Eric Banks * @since 3/14/13 */ -public class MalformedReadFilterUnitTest { +public class MalformedReadFilterUnitTest extends ReadFilterTest { ////////////////////////////////////// // Test the checkSeqStored() method // ////////////////////////////////////// @Test(enabled = true) - public void testcheckSeqStored () { + public void testCheckSeqStored () { final GATKSAMRecord goodRead = ArtificialSAMUtils.createArtificialRead(new byte[]{(byte)'A'}, new byte[]{(byte)'A'}, "1M"); final GATKSAMRecord badRead = ArtificialSAMUtils.createArtificialRead(new byte[]{}, new byte[]{}, "1M"); @@ -59,4 +73,174 @@ public class MalformedReadFilterUnitTest { Assert.assertTrue(false, "We should have exceptioned out in the previous line"); } catch (UserException e) { } } + + @Test(enabled = true, dataProvider= "UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.FILTER) + public void testCigarNOperatorFilterTruePositive(String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(true); + final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString); + Assert.assertTrue(filter.filterOut(nContainingCigarRead), + " Did not filtered out a N containing CIGAR read"); + } + + @Test(enabled = true, dataProvider= "UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.ACCEPT) + public void testCigarNOperatorFilterTrueNegative(String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(true); + final SAMRecord nonNContainingCigarRead = buildSAMRecord(cigarString); + Assert.assertFalse(filter.filterOut(nonNContainingCigarRead), + " Filtered out a non-N containing CIGAR read"); + } + + @Test(enabled = true, + expectedExceptions = UnsupportedCigarOperatorException.class, + dataProvider= "UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.EXCEPTION) + public void testCigarNOperatorFilterException(final String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(false); + final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString); + + filter.filterOut(nContainingCigarRead); + } + + @Test(enabled = true, dataProvider="UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.ACCEPT) + public void testCigarNOperatorFilterControl(final String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(false); + final SAMRecord nonNContainingCigarRead = buildSAMRecord(cigarString); + + Assert.assertFalse(filter.filterOut(nonNContainingCigarRead)); + } + + protected SAMRecord buildSAMRecord(final String cigarString) { + final Cigar nContainingCigar = TextCigarCodec.getSingleton().decode(cigarString); + return this.createRead(nContainingCigar, 1, 0, 10); + } + + protected MalformedReadFilter buildMalformedReadFilter(final boolean filterRNO) { + return buildMalformedReadFiter(filterRNO,new ValidationExclusion.TYPE[] {}); + } + + protected MalformedReadFilter buildMalformedReadFiter(boolean filterRNO, final ValidationExclusion.TYPE... excl) { + final ValidationExclusion ve = new ValidationExclusion(Arrays.asList(excl)); + + final MalformedReadFilter filter = new MalformedReadFilter(); + + final SAMFileHeader h = getHeader(); + final SAMDataSource ds = getDataSource(); + + final GenomeAnalysisEngine gae = new GenomeAnalysisEngine() { + @Override + public SAMFileHeader getSAMFileHeader() { + return h; + } + + @Override + public SAMDataSource getReadsDataSource() { + return ds; + } + }; + filter.initialize(gae); + filter.filterReadsWithNCigar = filterRNO; + return filter; + } + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.METHOD) + @Inherited + protected @interface CigarOperatorTest { + + enum Outcome { + ANY,ACCEPT,FILTER,EXCEPTION,IGNORE; + + public boolean appliesTo (String cigar) { + boolean hasN = cigar.indexOf('N') != -1; + switch (this) { + case ANY: return true; + case ACCEPT: return !hasN; + case IGNORE: return hasN; + case FILTER: + case EXCEPTION: + default: + return hasN; + + } + } + } + + Outcome value() default Outcome.ANY; + } + + /** + * Cigar test data for unsupported operator test. + * Each element of this array corresponds to a test case. In turn the first element of the test case array is the + * Cigar string for that test case and the second indicates whether it should be filtered due to the presence of a + * unsupported operator + */ + private static final String[] TEST_CIGARS = { + "101M10D20I10M", + "6M14N5M", + "1N", + "101M", + "110N", + "2N4M", + "4M2N", + "3M1I1M", + "1M2I2M", + "1M10N1I1M", + "1M1I1D", + "11N12M1I34M12N" + }; + + @DataProvider(name= "UnsupportedCigarOperatorDataProvider") + public Iterator unsupportedOperatorDataProvider(final Method testMethod) { + final CigarOperatorTest a = resolveCigarOperatorTestAnnotation(testMethod); + final List result = new LinkedList(); + for (final String cigarString : TEST_CIGARS) { + if (a == null || a.value().appliesTo(cigarString)) { + result.add(new Object[] { cigarString }); + } + } + return result.iterator(); + } + + /** + * Gets the most specific {@link CigarOperatorTest} annotation for the + * signature of the test method provided. + *

+ * This in-house implementation is required due to the fact that method + * annotations do not have inheritance. + * + * @param m targeted test method. + * @return null if there is no {@link CigarOperatorTest} + * annotation in this or overridden methods. + */ + private CigarOperatorTest resolveCigarOperatorTestAnnotation(final Method m) { + CigarOperatorTest res = m.getAnnotation(CigarOperatorTest.class); + if (res != null) { + return res; + } + Class c = this.getClass(); + Class p = c.getSuperclass(); + while (p != null && p != Object.class) { + try { + final Method met = p.getDeclaredMethod(m.getName(), + m.getParameterTypes()); + res = met.getAnnotation(CigarOperatorTest.class); + if (res != null) { + break; + } + } catch (NoSuchMethodException e) { + // Its ok; nothing to do here, just keep looking. + } + c = p; + p = c.getSuperclass(); + } + return res; + } + } diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/ReadFilterTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/ReadFilterTest.java new file mode 100644 index 000000000..5b6f67c42 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/ReadFilterTest.java @@ -0,0 +1,370 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.filters; + +import net.sf.samtools.*; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.broadinstitute.sting.gatk.datasources.reads.SAMDataSource; +import org.broadinstitute.sting.gatk.datasources.reads.SAMReaderID; +import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod; +import org.broadinstitute.sting.gatk.resourcemanagement.ThreadAllocation; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.testng.annotations.AfterClass; +import org.testng.annotations.BeforeClass; + +import java.util.*; + +/** + * Class ReadBaseTest + *

+ * This is the base test class for read filter test classes. All read + * filter test cases should extend from this + * class; it sets ups a header mock up to test read filtering. + * + * Feel free to override non-final method to modify the behavior + * (i.e. change how read group id are formatted, or complete a header). + * + *

+ * You can statically determine the number of read-group involved + * in the test by calling {@link #ReadFilterTest(int)} in you constructor. + *

+ * + * Notice that the same header object is shared by all test and + * it is initialized by Junit (calling {@link #beforeClass()}. + * + * @author Valentin Ruano Rubio + * @date May 23, 2013 + */ +public class ReadFilterTest extends BaseTest { + + private static final int DEFAULT_READ_GROUP_COUNT = 5; + private static final int DEFAULT_READER_COUNT = 1; + private static final String DEFAULT_READ_GROUP_PREFIX = "ReadGroup"; + private static final String DEFAULT_PLATFORM_UNIT_PREFIX = "Lane"; + private static final String DEFAULT_SAMPLE_NAME_PREFIX = "Sample"; + private static final String DEFAULT_PLATFORM_PREFIX = "Platform"; + private static final int DEFAULT_CHROMOSOME_COUNT = 1; + private static final int DEFAULT_CHROMOSOME_START_INDEX = 1; + private static final int DEFAULT_CHROMOSOME_SIZE = 1000; + private static final String DEFAULT_SAM_FILE_FORMAT = "readfile-%3d.bam"; + + private final int groupCount; + + private SAMFileHeader header; + + private SAMDataSource dataSource; + + /** + * Constructs a new read-filter test providing the number of read + * groups in the file. + * + * @param groupCount number of read-group in the fictional SAM file, + * must be equal or greater than 1. + */ + protected ReadFilterTest(final int groupCount) { + if (groupCount < 1) { + throw new IllegalArgumentException( + "the read group count must at least be 1"); + } + this.groupCount = groupCount; + } + + + /** + * Gets the data source. + * + * @throws IllegalStateException if the data source was not initialized + * invoking {@link #beforeClass()} + * @return never null + */ + protected final SAMDataSource getDataSource() { + checkDataSourceExists(); + return dataSource; + } + + /** + * Returns the mock-up SAM file header for testing. + * + * @throws IllegalStateException if the header was not initialized + * invoking {@link #beforeClass()} + * @return never null + */ + protected final SAMFileHeader getHeader() { + checkHeaderExists(); + return header; + } + + /** + * Construct a read filter test with the default number of groups + * ({@link #DEFAULT_READ_GROUP_COUNT}. + */ + public ReadFilterTest() { + this(DEFAULT_READ_GROUP_COUNT); + } + + /** + * Return the number of read groups involved in the test + * @return 1 or greater. + */ + protected final int getReadGroupCount() { + return groupCount; + } + + /** + * Composes the Id for the read group given its index. + * + * This methods must return a unique distinct ID for each possible index and + * it must be the same value each time it is invoked. + * + * @param index the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null and must be unique to each possible + * read group index. + */ + protected String composeReadGroupId(final int index) { + checkReadGroupIndex(index); + return DEFAULT_READ_GROUP_PREFIX + index; + } + + /** + * Composes the Platform name for the read group given its index. + * + * This method must always return the same value give an index. + * + * @param index the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null. + */ + protected String composePlatformName(final int index) { + checkReadGroupIndex(index); + return DEFAULT_PLATFORM_PREFIX + (((index-1)%2)+1); + } + + + /** + * Composes the Platform unit name for the read group given its index. + * + * @param index the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null. + */ + protected String composePlatformUnitName(final int index) { + checkReadGroupIndex(index); + return DEFAULT_PLATFORM_UNIT_PREFIX + (((index-1)%3)+1); + } + + + + /** + * Checks the correctness of a given read group index. + * + * A correct index is any value in the range [1,{@link #getReadGroupCount()}]. + * + * @param index the target index. + * @throws IllegalArgumentException if the input index is not correct. + */ + protected final void checkReadGroupIndex(final int index) { + checkIndex(index,groupCount,"read group"); + } + + + private void checkIndex(final int index, final int max, CharSequence name) { + if (index < 1 || index > max) { + throw new IllegalArgumentException( + name + " index (" + + index + + ") is out of bounds [1," + max + "]"); + } + } + + + /** + * Checks whether the header was initialized. + * + * @throws IllegalStateException if the header was not yet initialized. + */ + protected final void checkHeaderExists() { + if (header == null) { + throw new IllegalArgumentException( + "header has not been initialized;" + + " beforeClass() was not invoked"); + } + } + + /** + * Checks whether the data source was initialized. + * + * @throws IllegalStateException if the data source was not yet initialized. + */ + protected final void checkDataSourceExists() { + if (header == null) { + throw new IllegalArgumentException( + "data source has not been initialized;" + + " beforeClass() was not invoked"); + } + } + + /** + * Returns the ID for a read group given its index. + * + * @param index the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null and must be unique to each + * possible read group index. + */ + protected final String getReadGroupId(final int index) { + checkReadGroupIndex(index); + return getHeader().getReadGroups().get(index - 1).getReadGroupId(); + } + + /** + * Returns the platform name for a read group given its index. + * + * @param group the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null. + */ + protected final String getPlatformName(final int group) { + checkReadGroupIndex(group); + return getHeader().getReadGroups().get(group - 1).getPlatform(); + } + + /** + * Returns the platform unit for a read group given its index. + * + * @param group the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null. + */ + protected final String getPlatformUnit(final int group) { + checkReadGroupIndex(group); + return getHeader().getReadGroups().get(group - 1).getPlatformUnit(); + } + + + /** + * Composes the mock up SAM file header. + * + * It must return an equivalent (equal) value each time it is invoked. + * + * @return never null. + */ + protected SAMFileHeader composeHeader() { + + return ArtificialSAMUtils.createArtificialSamHeader( + DEFAULT_CHROMOSOME_COUNT, DEFAULT_CHROMOSOME_START_INDEX, + DEFAULT_CHROMOSOME_SIZE); + } + + @BeforeClass + public void beforeClass() { + + header = composeHeader(); + dataSource = composeDataSource(); + final List readGroupIDs = new ArrayList(); + final List sampleNames = new ArrayList(); + + for (int i = 1; i <= getReadGroupCount(); i++) { + final String readGroupId = composeReadGroupId(i); + readGroupIDs.add(readGroupId); + sampleNames.add(readGroupId); + } + + ArtificialSAMUtils.createEnumeratedReadGroups( + header, readGroupIDs, sampleNames); + + for (int i = 1; i <= getReadGroupCount(); i++) { + final String readGroupId = readGroupIDs.get(i-1); + final SAMReadGroupRecord groupRecord = header.getReadGroup(readGroupId); + groupRecord.setAttribute("PL", composePlatformName(i)); + groupRecord.setAttribute("PU", composePlatformUnitName(i)); + } + + } + + protected ValidationExclusion composeValidationExclusion() { + return new ValidationExclusion(); + } + + protected SAMDataSource composeDataSource() { + checkHeaderExists(); + final Set readerIDs = new HashSet<>(1); + final ThreadAllocation ta = new ThreadAllocation(); + final Integer numFileHandles = 1; // I believe that any value would do but need to confirm. + final boolean useOriginalBaseQualities = true; + final SAMFileReader.ValidationStringency strictness = SAMFileReader.ValidationStringency.LENIENT; + final Integer readBufferSize = 1; // not relevant. + final DownsamplingMethod downsamplingMethod = DownsamplingMethod.NONE; + final ValidationExclusion exclusionList = composeValidationExclusion(); + final Collection supplementalFilters = Collections.EMPTY_SET; + final boolean includeReadsWithDeletionAtLoci = true; + + final GenomeLocParser glp = new GenomeLocParser(header.getSequenceDictionary()); + final SAMDataSource res = new SAMDataSource( + readerIDs, + ta, + numFileHandles, + glp, + useOriginalBaseQualities, + strictness, + readBufferSize, + downsamplingMethod, + exclusionList, + supplementalFilters, + includeReadsWithDeletionAtLoci); + + return res; + } + + @AfterClass + public void afterClass() { + header = null; + dataSource = null; + } + + /** + * Creates a read record. + * + * @param cigar the new record CIGAR. + * @param group the new record group index that must be in the range \ + * [1,{@link #getReadGroupCount()}] + * @param reference the reference sequence index (0-based) + * @param start the start position of the read alignment in the reference + * (1-based) + * @return never null + */ + protected SAMRecord createRead(final Cigar cigar, final int group, final int reference, final int start) { + final SAMRecord record = ArtificialSAMUtils.createArtificialRead(cigar); + record.setHeader(getHeader()); + record.setAlignmentStart(start); + record.setReferenceIndex(reference); + record.setAttribute(SAMTag.RG.toString(), getReadGroupId(group)); + return record; + + } +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/ReadGroupBlackListFilterUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/ReadGroupBlackListFilterUnitTest.java index 1370aeb50..1be31b293 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/filters/ReadGroupBlackListFilterUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/ReadGroupBlackListFilterUnitTest.java @@ -26,13 +26,10 @@ package org.broadinstitute.sting.gatk.filters; import org.testng.Assert; -import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; -import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; -import net.sf.samtools.SAMFileHeader; import net.sf.samtools.SAMRecord; import net.sf.samtools.SAMReadGroupRecord; @@ -40,34 +37,7 @@ import java.util.List; import java.util.ArrayList; import java.util.Collections; -public class ReadGroupBlackListFilterUnitTest extends BaseTest { - private static final int READ_GROUP_COUNT = 5; - private static final String READ_GROUP_PREFIX = "ReadGroup"; - private static final String SAMPLE_NAME_PREFIX = "Sample"; - private static final String PLATFORM_PREFIX = "Platform"; - private static final String PLATFORM_UNIT_PREFIX = "Lane"; - private static SAMFileHeader header; - - @BeforeClass - public void beforeClass() { - header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000); - - List readGroupIDs = new ArrayList(); - List sampleNames = new ArrayList(); - - for (int i = 1; i <= READ_GROUP_COUNT; i++) { - readGroupIDs.add(READ_GROUP_PREFIX + i); - sampleNames.add(SAMPLE_NAME_PREFIX + i); - } - - ArtificialSAMUtils.createEnumeratedReadGroups(header, readGroupIDs, sampleNames); - - for (int i = 1; i <= READ_GROUP_COUNT; i++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + i); - groupRecord.setAttribute("PL", PLATFORM_PREFIX + (((i-1)%2)+1)); - groupRecord.setAttribute("PU", PLATFORM_UNIT_PREFIX + (((i-1)%3)+1)); - } - } +public class ReadGroupBlackListFilterUnitTest extends ReadFilterTest { @Test(expectedExceptions=ReviewedStingException.class) public void testBadFilter() { @@ -88,14 +58,14 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { @Test public void testFilterReadGroup() { - SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, 1, 20); - filteredRecord.setAttribute("RG", READ_GROUP_PREFIX + "1"); + SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, 1, 20); + filteredRecord.setAttribute("RG", getReadGroupId(1)); - SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(header, "readDos", 0, 2, 20); - unfilteredRecord.setAttribute("RG", READ_GROUP_PREFIX + "2"); + SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readDos", 0, 2, 20); + unfilteredRecord.setAttribute("RG", getReadGroupId(2)); List filterList = new ArrayList(); - filterList.add("RG:" + READ_GROUP_PREFIX + "1"); + filterList.add("RG:" + getReadGroupId(1)); ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList); Assert.assertTrue(filter.filterOut(filteredRecord)); @@ -104,14 +74,14 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { @Test public void testFilterPlatformUnit() { - SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, 1, 20); - filteredRecord.setAttribute("RG", READ_GROUP_PREFIX + "1"); + SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, 1, 20); + filteredRecord.setAttribute("RG", getReadGroupId(1)); - SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(header, "readDos", 0, 2, 20); - unfilteredRecord.setAttribute("RG", READ_GROUP_PREFIX + "2"); + SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readDos", 0, 2, 20); + unfilteredRecord.setAttribute("RG", getReadGroupId(2)); List filterList = new ArrayList(); - filterList.add("PU:" + PLATFORM_UNIT_PREFIX + "1"); + filterList.add("PU:" + getPlatformUnit(1)); ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList); Assert.assertTrue(filter.filterOut(filteredRecord)); @@ -123,18 +93,18 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { int recordsPerGroup = 3; List records = new ArrayList(); int alignmentStart = 0; - for (int x = 1; x <= READ_GROUP_COUNT; x++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + x); + for (int x = 1; x <= getReadGroupCount(); x++) { + SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { - SAMRecord record = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); + SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20); record.setAttribute("RG", groupRecord.getReadGroupId()); records.add(record); } } List filterList = new ArrayList(); - filterList.add("RG:" + READ_GROUP_PREFIX + "1"); - filterList.add("RG:" + READ_GROUP_PREFIX + "3"); + filterList.add("RG:" + getReadGroupId(1)); + filterList.add("RG:" + getReadGroupId(3)); ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList); int filtered = 0; @@ -153,7 +123,7 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { } int filteredExpected = recordsPerGroup * 2; - int unfilteredExpected = recordsPerGroup * (READ_GROUP_COUNT - 2); + int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2); Assert.assertEquals(filtered, filteredExpected, "Filtered"); Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered"); } @@ -163,17 +133,17 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { int recordsPerGroup = 3; List records = new ArrayList(); int alignmentStart = 0; - for (int x = 1; x <= READ_GROUP_COUNT; x++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + x); + for (int x = 1; x <= getReadGroupCount(); x++) { + SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { - SAMRecord record = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); + SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20); record.setAttribute("RG", groupRecord.getReadGroupId()); records.add(record); } } List filterList = new ArrayList(); - filterList.add("PU:" + PLATFORM_UNIT_PREFIX + "1"); + filterList.add("PU:" + getPlatformUnit(1)); ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList); int filtered = 0; @@ -202,10 +172,10 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { int recordsPerGroup = 3; List records = new ArrayList(); int alignmentStart = 0; - for (int x = 1; x <= READ_GROUP_COUNT; x++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + x); + for (int x = 1; x <= getReadGroupCount(); x++) { + SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { - SAMRecord record = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); + SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20); record.setAttribute("RG", groupRecord.getReadGroupId()); records.add(record); } @@ -231,7 +201,7 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { } int filteredExpected = recordsPerGroup * 2; - int unfilteredExpected = recordsPerGroup * (READ_GROUP_COUNT - 2); + int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2); Assert.assertEquals(filtered, filteredExpected, "Filtered"); Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered"); } @@ -241,10 +211,10 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { int recordsPerGroup = 3; List records = new ArrayList(); int alignmentStart = 0; - for (int x = 1; x <= READ_GROUP_COUNT; x++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + x); + for (int x = 1; x <= getReadGroupCount(); x++) { + SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { - SAMRecord record = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); + SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20); record.setAttribute("RG", groupRecord.getReadGroupId()); records.add(record); } @@ -270,7 +240,7 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { } int filteredExpected = recordsPerGroup * 2; - int unfilteredExpected = recordsPerGroup * (READ_GROUP_COUNT - 2); + int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2); Assert.assertEquals(filtered, filteredExpected, "Filtered"); Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered"); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/UnsafeMalformedReadFilterUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/UnsafeMalformedReadFilterUnitTest.java new file mode 100644 index 000000000..30e2f0f1b --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/UnsafeMalformedReadFilterUnitTest.java @@ -0,0 +1,50 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.filters; + + +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; + +import java.util.Collections; + + +/** + * Tests for the {@link MalformedReadFilter} when the unsafe flag + * {@link ValidationExclusion.TYPE#ALL} is set. + * + * @author Valentin Ruano-Rubio + * @since 6/6/13 + */ +public class UnsafeMalformedReadFilterUnitTest extends AllowNCigarMalformedReadFilterUnitTest { + + + @Override + protected ValidationExclusion composeValidationExclusion() { + return new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALL)); + } + + +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/iterators/ReadFormattingIteratorUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/iterators/ReadFormattingIteratorUnitTest.java new file mode 100644 index 000000000..5d037bc4b --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/iterators/ReadFormattingIteratorUnitTest.java @@ -0,0 +1,50 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.iterators; + +import net.sf.samtools.*; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.Arrays; + + +public class ReadFormattingIteratorUnitTest extends BaseTest { + + @Test + public void testIteratorConsolidatesCigars() { + final Cigar unconsolidatedCigar = TextCigarCodec.getSingleton().decode("3M0M5M0M"); + final SAMRecord unconsolidatedRead = ArtificialSAMUtils.createArtificialRead(unconsolidatedCigar); + + final StingSAMIterator readIterator = StingSAMIteratorAdapter.adapt(Arrays.asList(unconsolidatedRead).iterator()); + final ReadFormattingIterator formattingIterator = new ReadFormattingIterator(readIterator, false, (byte)-1); + final SAMRecord postIterationRead = formattingIterator.next(); + + Assert.assertEquals(postIterationRead.getCigarString(), "8M", "Cigar 3M0M5M0M not consolidated correctly by ReadFormattingIterator"); + } +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/samples/SampleDBUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/samples/SampleDBUnitTest.java index 295b31203..23f8bc1f7 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/samples/SampleDBUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/samples/SampleDBUnitTest.java @@ -45,7 +45,7 @@ import java.util.*; public class SampleDBUnitTest extends BaseTest { private static SampleDBBuilder builder; // all the test sample files are located here - private File testPED = new File(privateTestDir + "ceutrio.ped"); + private File testPED = new File(privateTestDir + "testtrio.ped"); private static final Set testPEDSamples = new HashSet(Arrays.asList( new Sample("kid", "fam1", "dad", "mom", Gender.MALE, Affection.AFFECTED), diff --git a/public/java/test/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCacheUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCacheUnitTest.java index f3e1ce44b..4d85997b3 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCacheUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCacheUnitTest.java @@ -26,9 +26,11 @@ package org.broadinstitute.sting.gatk.traversals; import net.sf.picard.reference.IndexedFastaSequenceFile; +import net.sf.samtools.SAMFileHeader; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; import org.broadinstitute.sting.utils.sam.ArtificialBAMBuilder; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.Assert; import org.testng.annotations.BeforeClass; @@ -39,6 +41,7 @@ import java.io.File; import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.List; public class TAROrderedReadCacheUnitTest extends BaseTest { @@ -98,8 +101,53 @@ public class TAROrderedReadCacheUnitTest extends BaseTest { Assert.assertEquals(cache.getNumDiscarded(), 0, "should have reset stats"); Assert.assertEquals(cacheReads.size(), nExpectedToKeep, "should have 1 read for every read we expected to keep"); + verifySortednessOfReads(cacheReads); + } + + @Test + public void testReadCacheWithReducedReads() { + final List reads = new ArrayList(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int i = 1; i <= 100; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, i, 5, baseCounts)); + reads.add(ArtificialSAMUtils.createArtificialRead(header, "foo", 0, i, 5)); + } + + final TAROrderedReadCache cache = new TAROrderedReadCache(50); + + cache.addAll(reads); + + // Our cache should have kept all of the reduced reads (which are retained unconditionally and do not count + // towards the capacity limit), and discarded half of the 100 non-reduced reads due to the cache capacity + // limit of 50. + Assert.assertEquals(cache.size(), 150, "wrong number of reads in the cache at the end"); + Assert.assertEquals(cache.getNumDiscarded(), 50, "wrong number of reads discarded from the cache"); + + final List cacheReads = cache.popCurrentReads(); + + int numReducedReadsRetained = 0; + int numNormalReadsRetained = 0; + + for ( GATKSAMRecord read : cacheReads ) { + if ( read.isReducedRead() ) { + numReducedReadsRetained++; + } + else { + numNormalReadsRetained++; + } + } + + Assert.assertEquals(numReducedReadsRetained, 100, "wrong number of reduced reads retained in the cache"); + Assert.assertEquals(numNormalReadsRetained, 50, "wrong number of non-reduced reads retained in the cache"); + + verifySortednessOfReads(cacheReads); + } + + private void verifySortednessOfReads( final List reads) { int lastStart = -1; - for ( final GATKSAMRecord read : cacheReads ) { + for ( GATKSAMRecord read : reads ) { Assert.assertTrue(lastStart <= read.getAlignmentStart(), "Reads should be sorted but weren't. Found read with start " + read.getAlignmentStart() + " while last was " + lastStart); lastStart = read.getAlignmentStart(); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java index b6106d4bc..e4b6c37cc 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java @@ -77,7 +77,7 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { @DataProvider(name = "TraversalEngineProvider") public Object[][] makeTraversals() { final List traversals = new LinkedList(); - traversals.add(new Object[]{new TraverseActiveRegions()}); + traversals.add(new Object[]{new TraverseActiveRegions<>()}); return traversals.toArray(new Object[][]{}); } @@ -405,8 +405,6 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { for (LocusShardDataProvider dataProvider : createDataProviders(t, walker, intervals, bam)) t.traverse(walker, dataProvider, 0); - t.endTraversal(walker, 0); - return walker.mappedActiveRegions; } @@ -490,7 +488,7 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { traverseActiveRegions.initialize(engine, walker); List providers = new ArrayList(); - for (Shard shard : dataSource.createShardIteratorOverIntervals(new GenomeLocSortedSet(genomeLocParser, intervals), new LocusShardBalancer())) { + for (Shard shard : dataSource.createShardIteratorOverIntervals(new GenomeLocSortedSet(genomeLocParser, intervals), new ActiveRegionShardBalancer())) { for (WindowMaker.WindowMakerIterator window : new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples)) { providers.add(new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList())); } @@ -523,8 +521,8 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { final int maxTests = Integer.MAX_VALUE; int nTests = 0; - for ( final int readLength : Arrays.asList(10, 100) ) { - for ( final int skips : Arrays.asList(0, 1, 10) ) { + for ( final int readLength : Arrays.asList(100) ) { + for ( final int skips : Arrays.asList(0, 10) ) { for ( final int start : starts ) { for ( final int nReadsPerLocus : Arrays.asList(1, 2) ) { for ( final int nLoci : Arrays.asList(1, 1000) ) { @@ -536,7 +534,7 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { for ( final GenomeLocSortedSet activeRegions : enumerateActiveRegions(bamBuilder.getAlignmentStart(), bamBuilder.getAlignmentEnd())) { nTests++; if ( nTests < maxTests ) // && nTests == 1238 ) - tests.add(new Object[]{nTests, activeRegions, readStates, bamBuilder}); + tests.add(new Object[]{new TraverseActiveRegions<>(), nTests, activeRegions, readStates, bamBuilder}); } } } @@ -586,7 +584,7 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { @Test(enabled = true && ! DEBUG, dataProvider = "CombinatorialARTTilingProvider") - public void testARTReadsInActiveRegions(final int id, final GenomeLocSortedSet activeRegions, final EnumSet readStates, final ArtificialBAMBuilder bamBuilder) { + public void testARTReadsInActiveRegions(final TraverseActiveRegions traversal, final int id, final GenomeLocSortedSet activeRegions, final EnumSet readStates, final ArtificialBAMBuilder bamBuilder) { logger.warn("Running testARTReadsInActiveRegions id=" + id + " locs " + activeRegions + " against bam " + bamBuilder); final List intervals = Arrays.asList( genomeLocParser.createGenomeLoc("1", bamBuilder.getAlignmentStart(), bamBuilder.getAlignmentEnd()) @@ -595,7 +593,6 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { final DummyActiveRegionWalker walker = new DummyActiveRegionWalker(activeRegions, false); walker.setStates(readStates); - final TraverseActiveRegions traversal = new TraverseActiveRegions(); final Map activeRegionsMap = getActiveRegions(traversal, walker, intervals, bamBuilder.makeTemporarilyBAMFile()); final Set alreadySeenReads = new HashSet(); // for use with the primary / non-primary @@ -640,8 +637,8 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { // // --------------------------------------------------------------------------------------------------------- - @Test(enabled = true && ! DEBUG) - public void ensureAllInsertionReadsAreInActiveRegions() { + @Test(dataProvider = "TraversalEngineProvider", enabled = true && ! DEBUG) + public void ensureAllInsertionReadsAreInActiveRegions(final TraverseActiveRegions traversal) { final int readLength = 10; final int start = 20; @@ -667,7 +664,6 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { final DummyActiveRegionWalker walker = new DummyActiveRegionWalker(activeRegions, false); - final TraverseActiveRegions traversal = new TraverseActiveRegions(); final Map activeRegionsMap = getActiveRegions(traversal, walker, intervals, bamBuilder.makeTemporarilyBAMFile()); final ActiveRegion region = activeRegionsMap.values().iterator().next(); diff --git a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java index 8bc373fe8..5b52d4e33 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java @@ -32,6 +32,7 @@ import org.broadinstitute.sting.commandline.Tags; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.gatk.datasources.providers.ReadShardDataProvider; import org.broadinstitute.sting.gatk.datasources.reads.*; +import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; import org.broadinstitute.sting.gatk.resourcemanagement.ThreadAllocation; import org.broadinstitute.sting.gatk.walkers.ReadWalker; import org.broadinstitute.sting.gatk.walkers.qc.CountReads; @@ -47,6 +48,7 @@ import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.PrintStream; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import static org.testng.Assert.fail; @@ -146,18 +148,18 @@ public class TraverseReadsUnitTest extends BaseTest { fail("Shard == null"); } - ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard,genomeLocParser,dataSource.seek(shard),null,null); + ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard,genomeLocParser,dataSource.seek(shard),null, Collections.emptyList()); accumulator = traversalEngine.traverse(countReadWalker, dataProvider, accumulator); dataProvider.close(); } countReadWalker.onTraversalDone(accumulator); - if (!(accumulator instanceof Integer)) { - fail("Count read walker should return an interger."); + if (!(accumulator instanceof Long)) { + fail("Count read walker should return a Long."); } - if (((Integer) accumulator) != 10000) { - fail("there should be 10000 mapped reads in the index file, there was " + ((Integer) accumulator)); + if (!accumulator.equals(new Long(10000))) { + fail("there should be 10000 mapped reads in the index file, there was " + (accumulator)); } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/BAQIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/BAQIntegrationTest.java index 6b0422c6a..604c0e377 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/walkers/BAQIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/BAQIntegrationTest.java @@ -43,7 +43,7 @@ public class BAQIntegrationTest extends WalkerTest { // -------------------------------------------------------------------------------------------------------------- @Test public void testPrintReadsNoBAQ() { - WalkerTestSpec spec = new WalkerTestSpec( baseCommand +" -baq OFF", 1, Arrays.asList("11af64ba020262d06b490bae2c5e08f8")); + WalkerTestSpec spec = new WalkerTestSpec( baseCommand +" -baq OFF", 1, Arrays.asList("d1f74074e718c82810512bf40dbc7f72")); executeTest(String.format("testPrintReadsNoBAQ"), spec); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/CNV/SymbolicAllelesIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/CNV/SymbolicAllelesIntegrationTest.java index 4aaba0d70..bfabe2bc1 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/walkers/CNV/SymbolicAllelesIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/CNV/SymbolicAllelesIntegrationTest.java @@ -57,7 +57,7 @@ public class SymbolicAllelesIntegrationTest extends WalkerTest { WalkerTestSpec spec = new WalkerTestSpec( baseTestString(b36KGReference, "symbolic_alleles_2.vcf"), 1, - Arrays.asList("bf5a09f783ab1fa44774c81f91d10921")); + Arrays.asList("30f66a097987330d42e87da8bcd6be21")); executeTest("Test symbolic alleles mixed in with non-symbolic alleles", spec); } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/coverage/CallableLociIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/coverage/CallableLociIntegrationTest.java index c07bf171a..336c15ccc 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/walkers/coverage/CallableLociIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/coverage/CallableLociIntegrationTest.java @@ -34,13 +34,13 @@ public class CallableLociIntegrationTest extends WalkerTest { final static String commonArgs = "-R " + b36KGReference + " -T CallableLoci -I " + validationDataLocation + "/NA12878.1kg.p2.chr1_10mb_11_mb.SLX.bam -o %s"; final static String reduceReadArgs = "-R " + b37KGReference + " -T CallableLoci -I " + " private/testdata/NA12878.HiSeq.b37.chr20.10_11mb.reduced.bam -o %s"; - final static String SUMMARY_MD5 = "ffdbd9cdcb4169ebed5ae4bec797260f"; + final static String SUMMARY_MD5 = "a6f5963669f19d9d137ced87d65834b0"; @Test public void testCallableLociWalkerBed() { String gatk_args = commonArgs + " -format BED -L 1:10,000,000-11,000,000 -summary %s"; WalkerTestSpec spec = new WalkerTestSpec(gatk_args, 2, - Arrays.asList("42e86c06c167246a28bffdacaca75ffb", SUMMARY_MD5)); + Arrays.asList("9b4ffea1dbcfefadeb1c9fa74b0e0e59", SUMMARY_MD5)); executeTest("formatBed", spec); } @@ -48,7 +48,7 @@ public class CallableLociIntegrationTest extends WalkerTest { public void testCallableLociWalkerPerBase() { String gatk_args = commonArgs + " -format STATE_PER_BASE -L 1:10,000,000-11,000,000 -summary %s"; WalkerTestSpec spec = new WalkerTestSpec(gatk_args, 2, - Arrays.asList("d66c525d9c70f62df8156261d3e535ad", SUMMARY_MD5)); + Arrays.asList("d6505e489899e80c08a7168777f6e07b", SUMMARY_MD5)); executeTest("format_state_per_base", spec); } @@ -64,7 +64,7 @@ public class CallableLociIntegrationTest extends WalkerTest { public void testCallableLociWalker3() { String gatk_args = commonArgs + " -format BED -L 1:10,000,000-11,000,000 -minDepth 10 -maxDepth 100 --minBaseQuality 10 --minMappingQuality 20 -summary %s"; WalkerTestSpec spec = new WalkerTestSpec(gatk_args, 2, - Arrays.asList("46a53379aaaf9803276a0a34b234f6ab", "da431d393f7c2b2b3e27556b86c1dbc7")); + Arrays.asList("7f79ad8195c4161060463eeb21d2bb11", "7ee269e5f4581a924529a356cc806e55")); executeTest("formatBed lots of arguments", spec); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/coverage/DepthOfCoverageIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/coverage/DepthOfCoverageIntegrationTest.java index 7171edf20..003ab6cf9 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/walkers/coverage/DepthOfCoverageIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/coverage/DepthOfCoverageIntegrationTest.java @@ -82,23 +82,23 @@ public class DepthOfCoverageIntegrationTest extends WalkerTest { // now add the expected files that get generated spec.addAuxFile("0f9603eb1ca4a26828e82d8c8f4991f6", baseOutputFile); spec.addAuxFile("51e6c09a307654f43811af35238fb179", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".library_cumulative_coverage_counts")); - spec.addAuxFile("229b9b5bc2141c86dbc69c8acc9eba6a", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".library_cumulative_coverage_proportions")); + spec.addAuxFile("520720a88ae7608257af51bc41c06b87", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".library_cumulative_coverage_proportions")); spec.addAuxFile("9cd395f47b329b9dd00ad024fcac9929", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".library_interval_statistics")); - spec.addAuxFile("e69ee59f447816c025c09a56e321cef8", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".library_interval_summary")); - spec.addAuxFile("fa054b665d1ae537ada719da7713e11b", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".library_statistics")); - spec.addAuxFile("28dec9383b3a323a5ce7d96d62712917", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".library_summary")); + spec.addAuxFile("6958004a8156f3f267caa6b04cf90f5f", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".library_interval_summary")); + spec.addAuxFile("ebbfc9b9f4e12ac989c127061948c565", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".library_statistics")); + spec.addAuxFile("e003bef6762833a5cebca25d94194616", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".library_summary")); spec.addAuxFile("a836b92ac17b8ff9788e2aaa9116b5d4", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".read_group_cumulative_coverage_counts")); - spec.addAuxFile("d32a8c425fadcc4c048bd8b48d0f61e5", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".read_group_cumulative_coverage_proportions")); + spec.addAuxFile("0732b6d2db9c94b0fcf18ca1f19772a8", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".read_group_cumulative_coverage_proportions")); spec.addAuxFile("7b9d0e93bf5b5313995be7010ef1f528", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".read_group_interval_statistics")); - spec.addAuxFile("4656c8797696cf5ef0cdc5971271236a", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".read_group_interval_summary")); - spec.addAuxFile("6f1d7f2120a4ac524c6026498f45295a", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".read_group_statistics")); - spec.addAuxFile("69c424bca013159942337b67fdf31ff8", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".read_group_summary")); + spec.addAuxFile("3522f7380554b926c71a7258250c1d63", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".read_group_interval_summary")); + spec.addAuxFile("2cd9d8c5e37584edd62ca6938659cf59", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".read_group_statistics")); + spec.addAuxFile("78fdd35a63a7a4c6b3a043b946b04730", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".read_group_summary")); spec.addAuxFile("6909d50a7da337cd294828b32b945eb8", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".sample_cumulative_coverage_counts")); - spec.addAuxFile("a395dafde101971d2b9e5ddb6cd4b7d0", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".sample_cumulative_coverage_proportions")); + spec.addAuxFile("aa00e3652dd518ccbae2caa00171835b", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".sample_cumulative_coverage_proportions")); spec.addAuxFile("df0ba76e0e6082c0d29fcfd68efc6b77", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".sample_interval_statistics")); - spec.addAuxFile("185b910e499c08a8b88dd3ed1ac9e8ec", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".sample_interval_summary")); - spec.addAuxFile("d5d11b686689467b5a8836f0a07f447d", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".sample_statistics")); - spec.addAuxFile("ad1a2775a31b1634daf64e691676bb96", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".sample_summary")); + spec.addAuxFile("0ce5ebfa46b081820d013bdbbfe42d34", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".sample_interval_summary")); + spec.addAuxFile("c7c5bad6c6818995c634f350aa66fde9", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".sample_statistics")); + spec.addAuxFile("949c9ce745753cd98f337600d3931d09", createTempFileFromBase(baseOutputFile.getAbsolutePath()+".sample_summary")); execute("testBaseOutputNoFiltering",spec); } @@ -115,7 +115,7 @@ public class DepthOfCoverageIntegrationTest extends WalkerTest { spec.setOutputFileLocation(baseOutputFile); spec.addAuxFile("6ccd7d8970ba98cb95fe41636a070c1c",baseOutputFile); - spec.addAuxFile("7d87783b3d98b928cac16d383ceca807",createTempFileFromBase(baseOutputFile.getAbsolutePath()+".library_interval_summary")); + spec.addAuxFile("4429d33ce8836c09ba2b5ddfae2f998e",createTempFileFromBase(baseOutputFile.getAbsolutePath()+".library_interval_summary")); execute("testNoCoverageDueToFiltering",spec); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/CountReadsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/CountReadsUnitTest.java new file mode 100644 index 000000000..8f5541c41 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/CountReadsUnitTest.java @@ -0,0 +1,51 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.walkers.qc; + +import org.broadinstitute.sting.BaseTest; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class CountReadsUnitTest extends BaseTest { + + @Test + public void testReadsDoNotOverflowInt() { + + final CountReads walker = new CountReads(); + + final long moreThanMaxInt = ((long)Integer.MAX_VALUE) + 1L; + + Long sum = walker.reduceInit(); + + for ( long i = 0L; i < moreThanMaxInt; i++ ) { + final Integer x = walker.map(null, null, null); + sum = walker.reduce(x, sum); + } + + Assert.assertEquals(sum.longValue(), moreThanMaxInt); + Assert.assertTrue(sum.longValue() > (long) Integer.MAX_VALUE); + } +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/PileupWalkerIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/PileupWalkerIntegrationTest.java index 76654fb74..6141a484c 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/PileupWalkerIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/PileupWalkerIntegrationTest.java @@ -26,13 +26,14 @@ package org.broadinstitute.sting.gatk.walkers.qc; import org.broadinstitute.sting.WalkerTest; +import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; public class PileupWalkerIntegrationTest extends WalkerTest { - String gatkSpeedupArgs="-T Pileup -I " + validationDataLocation + "NA12878.HiSeq.WGS.bwa.cleaned.recal.hg19.20.bam " - + "-R " + hg19Reference + " -o %s "; @Test public void testGnarleyFHSPileup() { @@ -73,25 +74,50 @@ public class PileupWalkerIntegrationTest extends WalkerTest { //testing speedup to GATKBAMIndex - @Test - public void testPileupOnLargeBamChr20(){ - WalkerTestSpec spec = new WalkerTestSpec(gatkSpeedupArgs + "-L 20:1-76,050", 1, Arrays.asList("8702701350de11a6d28204acefdc4775")); - executeTest("Testing single on big BAM at start of chromosome 20", spec); + @DataProvider(name="GATKBAMIndexTest") + public Object[][] makeMyDataProvider() { + List tests = new ArrayList(); + tests.add(new Object[]{"-L 20:1-76,050","8702701350de11a6d28204acefdc4775"}); + tests.add(new Object[]{"-L 20:10,000,000-10,001,100","818cf5a8229efe6f89fc1cd8145ccbe3"}); + tests.add(new Object[]{"-L 20:62,954,114-63,025,520","22471ea4a12e5139aef62bf8ff2a5b63"}); + tests.add(new Object[]{"-L 20:1-76,050 -L 20:20,000,000-20,000,100 -L 20:40,000,000-40,000,100 -L 20:30,000,000-30,000,100 -L 20:50,000,000-50,000,100 -L 20:62,954,114-63,025,520 ","08d899ed7c5a76ef3947bf67338acda1"}); + return tests.toArray(new Object[][]{}); } - @Test - public void testPileupOnLargeBamMid20(){ - WalkerTestSpec spec = new WalkerTestSpec(gatkSpeedupArgs + "-L 20:10,000,000-10,001,100", 1, Arrays.asList("818cf5a8229efe6f89fc1cd8145ccbe3")); - executeTest("Testing single on big BAM somewhere in chromosome 20", spec); + + @Test(dataProvider = "GATKBAMIndexTest") + public void testGATKBAMIndexSpeedup(final String intervals, final String md5){ + final String gatkArgs="-T Pileup -I " + validationDataLocation + "NA12878.HiSeq.WGS.bwa.cleaned.recal.hg19.20.bam " + + "-R " + hg19Reference + " -o %s "; + + WalkerTestSpec spec = new WalkerTestSpec(gatkArgs + intervals, 1, Arrays.asList(md5)); + executeTest("Testing with intervals="+intervals, spec); } + + + /***********************/ + + // testing hidden option -outputInsertLength + private final static String SingleReadAligningOffChromosome1withInsertLengthMD5 = "279e2ec8832e540f47a6e2bdf4cef5ea"; @Test - public void testPileupOnLargeBamEnd20(){ - WalkerTestSpec spec = new WalkerTestSpec(gatkSpeedupArgs + "-L 20:62,954,114-63,025,520", 1, Arrays.asList("22471ea4a12e5139aef62bf8ff2a5b63")); - executeTest("Testing single at end of chromosome 20", spec); + public void testSingleReadAligningOffChromosome1withInsertLength() { + String gatk_args = "-T Pileup " + + " -I " + privateTestDir + "readOffb37contig1.bam" + + " -R " + b37KGReference + + " -outputInsertLength" + + " -o %s"; + WalkerTestSpec spec = new WalkerTestSpec(gatk_args, 1, Arrays.asList(SingleReadAligningOffChromosome1withInsertLengthMD5)); + executeTest("Testing single read spanning off chromosome 1 (with insert length)", spec); } + @Test - public void testPileupOnLargeBam20Many(){ - WalkerTestSpec spec = new WalkerTestSpec(gatkSpeedupArgs + "-L 20:1-76,050 -L 20:20,000,000-20,000,100 -L 20:40,000,000-40,000,100 -L 20:30,000,000-30,000,100 -L 20:50,000,000-50,000,100 -L 20:62,954,114-63,025,520 ", - 1, Arrays.asList("08d899ed7c5a76ef3947bf67338acda1")); - executeTest("Testing single on big BAM many places", spec); + public void testGnarleyFHSPileupwithInsertLength() { + String gatk_args = "-T Pileup -I " + validationDataLocation + "FHS_Pileup_Test.bam " + + "-R " + hg18Reference + + " -outputInsertLength" + + " -L chr15:46,347,148 -o %s"; + String expected_md5 = "53ced173768f3d4d90b8a8206e72eae5"; + WalkerTestSpec spec = new WalkerTestSpec(gatk_args, 1, Arrays.asList(expected_md5)); + executeTest("Testing the standard (no-indel) pileup on three merged FHS pools with 27 deletions in 969 bases (with insert length)", spec); } + } diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/readutils/PrintReadsIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/readutils/PrintReadsIntegrationTest.java index 7482eae60..adc7ad765 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/walkers/readutils/PrintReadsIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/readutils/PrintReadsIntegrationTest.java @@ -59,10 +59,10 @@ public class PrintReadsIntegrationTest extends WalkerTest { {new PRTest(hg18Reference, "HiSeq.1mb.bam", " -simplifyBAM", "1510dc4429f3ed49caf96da41e8ed396")}, {new PRTest(hg18Reference, "HiSeq.1mb.bam", " -n 10", "0e3d1748ad1cb523e3295cab9d09d8fc")}, // See: GATKBAMIndex.getStartOfLastLinearBin(), BAMScheduler.advance(), IntervalOverlapFilteringIterator.advance() - {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", "", "e1cac555f3d720f611c47eec93e84bd9")}, - {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L 1", "6e2558317d409195eab3006dc9e5524c")}, + {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", "", "d7f23fd77d7dc7cb50d3397f644c6d8a")}, + {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L 1", "c601db95b20248d012b0085347fcb6d1")}, {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L unmapped", "2d32440e47e8d9d329902fe573ad94ce")}, - {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L 1 -L unmapped", "6e2558317d409195eab3006dc9e5524c")}, + {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L 1 -L unmapped", "c601db95b20248d012b0085347fcb6d1")}, {new PRTest(b37KGReference, "oneReadAllInsertion.bam", "", "349650b6aa9e574b48a2a62627f37c7d")}, {new PRTest(b37KGReference, "NA12878.1_10mb_2_10mb.bam", "", "0c1cbe67296637a85e80e7a182f828ab")} }; diff --git a/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java index 27af8ec68..3933b3830 100644 --- a/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java @@ -25,6 +25,7 @@ package org.broadinstitute.sting.utils; +import cern.jet.random.Normal; import org.broadinstitute.sting.BaseTest; import org.testng.Assert; import org.testng.annotations.BeforeClass; @@ -40,6 +41,35 @@ public class MathUtilsUnitTest extends BaseTest { public void init() { } + /** + * Tests that we get unqiue values for the valid (non-null-producing) input space for {@link MathUtils#fastGenerateUniqueHashFromThreeIntegers(int, int, int)}. + */ + @Test + public void testGenerateUniqueHashFromThreePositiveIntegers() { + logger.warn("Executing testGenerateUniqueHashFromThreePositiveIntegers"); + + final Set observedLongs = new HashSet(); + for (short i = 0; i < Byte.MAX_VALUE; i++) { + for (short j = 0; j < Byte.MAX_VALUE; j++) { + for (short k = 0; k < Byte.MAX_VALUE; k++) { + final Long aLong = MathUtils.fastGenerateUniqueHashFromThreeIntegers(i, j, k); + //System.out.println(String.format("%s, %s, %s: %s", i, j, k, aLong)); + Assert.assertTrue(observedLongs.add(aLong)); + } + } + } + + for (short i = Byte.MAX_VALUE; i <= Short.MAX_VALUE && i > 0; i += 128) { + for (short j = Byte.MAX_VALUE; j <= Short.MAX_VALUE && j > 0; j += 128) { + for (short k = Byte.MAX_VALUE; k <= Short.MAX_VALUE && k > 0; k += 128) { + final Long aLong = MathUtils.fastGenerateUniqueHashFromThreeIntegers(i, j, k); + // System.out.println(String.format("%s, %s, %s: %s", i, j, k, aLong)); + Assert.assertTrue(observedLongs.add(aLong)); + } + } + } + } + /** * Tests that we get the right values from the binomial distribution */ @@ -63,13 +93,15 @@ public class MathUtilsUnitTest extends BaseTest { public void testCumulativeBinomialProbability() { logger.warn("Executing testCumulativeBinomialProbability"); - final int numTrials = 10; - for ( int i = 0; i < numTrials; i++ ) - Assert.assertEquals(MathUtils.binomialCumulativeProbability(numTrials, i, i), MathUtils.binomialProbability(numTrials, i), 1e-10, String.format("k=%d, n=%d", i, numTrials)); - - Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 2), 0.05468750, 1e-7); - Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 5), 0.62304687, 1e-7); - Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 10), 1.0, 1e-7); + for (int j = 0; j < 2; j++) { // Test memoizing functionality, as well. + final int numTrials = 10; + for ( int i = 0; i < numTrials; i++ ) + Assert.assertEquals(MathUtils.binomialCumulativeProbability(numTrials, i, i), MathUtils.binomialProbability(numTrials, i), 1e-10, String.format("k=%d, n=%d", i, numTrials)); + + Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 2), 0.05468750, 1e-7); + Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 5), 0.62304687, 1e-7); + Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 10), 1.0, 1e-7); + } } /** @@ -398,4 +430,20 @@ public class MathUtilsUnitTest extends BaseTest { Assert.assertEquals(MathUtils.logDotProduct(new double[]{-5.0,-3.0,2.0}, new double[]{6.0,7.0,8.0}),10.0,1e-3); Assert.assertEquals(MathUtils.logDotProduct(new double[]{-5.0}, new double[]{6.0}),1.0,1e-3); } + + @Test + public void testNormalDistribution() { + final double requiredPrecision = 1E-10; + + final Normal n = new Normal(0.0, 1.0, null); + for( final double mu : new double[]{-5.0, -3.2, -1.5, 0.0, 1.2, 3.0, 5.8977} ) { + for( final double sigma : new double[]{1.2, 3.0, 5.8977} ) { + for( final double x : new double[]{-5.0, -3.2, -1.5, 0.0, 1.2, 3.0, 5.8977} ) { + n.setState(mu, sigma); + Assert.assertEquals(n.pdf(x), MathUtils.normalDistribution(mu, sigma, x), requiredPrecision); + Assert.assertEquals(Math.log10(n.pdf(x)), MathUtils.normalDistributionLog10(mu, sigma, x), requiredPrecision); + } + } + } + } } diff --git a/public/java/test/org/broadinstitute/sting/utils/UtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/UtilsUnitTest.java index 154b000ce..0a6f9898e 100644 --- a/public/java/test/org/broadinstitute/sting/utils/UtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/UtilsUnitTest.java @@ -29,6 +29,7 @@ import org.apache.commons.io.FileUtils; import org.broadinstitute.sting.utils.io.IOUtils; import org.testng.Assert; import org.broadinstitute.sting.BaseTest; +import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; @@ -189,4 +190,50 @@ public class UtilsUnitTest extends BaseTest { final String sourceString = FileUtils.readFileToString(source); Assert.assertEquals(Utils.calcMD5(sourceString), sourceMD5); } + + @Test + public void testLongestCommonOps() { + for ( int prefixLen = 0; prefixLen < 20; prefixLen++ ) { + for ( int extraSeq1Len = 0; extraSeq1Len < 10; extraSeq1Len++ ) { + for ( int extraSeq2Len = 0; extraSeq2Len < 10; extraSeq2Len++ ) { + for ( int max = 0; max < 50; max++ ) { + final String prefix = Utils.dupString("A", prefixLen); + final int expected = Math.min(prefixLen, max); + + { + final String seq1 = prefix + Utils.dupString("C", extraSeq1Len); + final String seq2 = prefix + Utils.dupString("G", extraSeq1Len); + Assert.assertEquals(Utils.longestCommonPrefix(seq1.getBytes(), seq2.getBytes(), max), expected, "LongestCommonPrefix failed: seq1 " + seq1 + " seq2 " + seq2 + " max " + max); + } + + { + final String seq1 = Utils.dupString("C", extraSeq1Len) + prefix; + final String seq2 = Utils.dupString("G", extraSeq1Len) + prefix; + Assert.assertEquals(Utils.longestCommonSuffix(seq1.getBytes(), seq2.getBytes(), max), expected, "longestCommonSuffix failed: seq1 " + seq1 + " seq2 " + seq2 + " max " + max); + } + } + } + } + } + } + + @DataProvider(name = "trim") + public Object[][] createTrimTestData() { + List tests = new ArrayList(); + + final String s = "AAAA"; + for ( int front = 0; front < s.length(); front++ ) { + for ( int back = 0; back < s.length(); back++ ) { + if ( front + back <= s.length() ) + tests.add(new Object[]{s, front, back}); + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "trim", enabled = true) + public void testTrim(final String s, final int frontTrim, final int backTrim) { + Assert.assertEquals(s.length() - frontTrim - backTrim, Utils.trimArray(s.getBytes(), frontTrim, backTrim).length); + } } diff --git a/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java index ad5fd3642..0f9b8531a 100644 --- a/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java @@ -144,7 +144,7 @@ public class ActiveRegionUnitTest extends BaseTest { } @Test(enabled = !DEBUG, dataProvider = "ActiveRegionReads") - public void testActiveRegionReads(final GenomeLoc loc, final GATKSAMRecord read) { + public void testActiveRegionReads(final GenomeLoc loc, final GATKSAMRecord read) throws Exception { final GenomeLoc expectedSpan = loc.union(genomeLocParser.createGenomeLoc(read)); final ActiveRegion region = new ActiveRegion(loc, null, true, genomeLocParser, 0); @@ -176,19 +176,31 @@ public class ActiveRegionUnitTest extends BaseTest { Assert.assertEquals(region.getReadSpanLoc(), expectedSpan); Assert.assertTrue(region.equalExceptReads(region2)); - region.removeAll(Collections.emptyList()); + region.removeAll(Collections.emptySet()); Assert.assertEquals(region.getReads(), Collections.singletonList(read)); Assert.assertEquals(region.size(), 1); Assert.assertEquals(region.getExtendedLoc(), loc); Assert.assertEquals(region.getReadSpanLoc(), expectedSpan); Assert.assertTrue(region.equalExceptReads(region2)); - region.removeAll(Collections.singletonList(read)); + region.removeAll(Collections.singleton(read)); Assert.assertEquals(region.getReads(), Collections.emptyList()); Assert.assertEquals(region.size(), 0); Assert.assertEquals(region.getExtendedLoc(), loc); Assert.assertEquals(region.getReadSpanLoc(), loc); Assert.assertTrue(region.equalExceptReads(region2)); + + final GATKSAMRecord read2 = (GATKSAMRecord)read.clone(); + read2.setReadName(read.getReadName() + ".clone"); + + for ( final GATKSAMRecord readToKeep : Arrays.asList(read, read2)) { + region.addAll(Arrays.asList(read, read2)); + final GATKSAMRecord readToDiscard = readToKeep == read ? read2 : read; + region.removeAll(Collections.singleton(readToDiscard)); + Assert.assertEquals(region.getReads(), Arrays.asList(readToKeep)); + Assert.assertEquals(region.size(), 1); + Assert.assertEquals(region.getExtendedLoc(), loc); + } } // ----------------------------------------------------------------------------------------------- diff --git a/public/java/test/org/broadinstitute/sting/utils/activeregion/ActivityProfileUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/activeregion/ActivityProfileUnitTest.java index 9be250b8e..f208815f7 100644 --- a/public/java/test/org/broadinstitute/sting/utils/activeregion/ActivityProfileUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/activeregion/ActivityProfileUnitTest.java @@ -450,7 +450,7 @@ public class ActivityProfileUnitTest extends BaseTest { private double[] makeGaussian(final int mean, final int range, final double sigma) { final double[] gauss = new double[range]; for( int iii = 0; iii < range; iii++ ) { - gauss[iii] = MathUtils.NormalDistribution(mean, sigma, iii) + ActivityProfile.ACTIVE_PROB_THRESHOLD; + gauss[iii] = MathUtils.normalDistribution(mean, sigma, iii) + ActivityProfile.ACTIVE_PROB_THRESHOLD; } return gauss; } diff --git a/public/java/test/org/broadinstitute/sting/utils/classloader/JVMUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/classloader/JVMUtilsUnitTest.java new file mode 100644 index 000000000..6ffd47f37 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/utils/classloader/JVMUtilsUnitTest.java @@ -0,0 +1,75 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.classloader; + +import org.testng.Assert; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +public class JVMUtilsUnitTest { + + // Test classes used by the tests for JVMUtils.getCallingClass(): + private static class DummyTestClass1 { + public static Class getCaller( final Class callee ) { + return DummyTestClass2.getCaller(callee); + } + } + + private static class DummyTestClass2 { + public static Class getCaller( final Class callee ) { + return DummyTestClass3.getCaller(callee); + } + } + + private static class DummyTestClass3 { + public static Class getCaller( final Class callee ) { + return JVMUtils.getCallingClass(callee); + } + } + + @DataProvider( name = "TestGetCallingClassDataProvider" ) + public Object[][] getTestCallingClassTestData() { + return new Object[][] { + { DummyTestClass1.class, JVMUtilsUnitTest.class }, + { DummyTestClass2.class, DummyTestClass1.class }, + { DummyTestClass3.class, DummyTestClass2.class } + }; + } + + @Test( dataProvider = "TestGetCallingClassDataProvider" ) + public void testGetCallingClass( final Class callee, final Class expectedCaller ) { + final Class reportedCaller = DummyTestClass1.getCaller(callee); + + Assert.assertEquals(reportedCaller, expectedCaller, + String.format("Wrong calling class returned from DummyTestClass1.getCaller(%s)", callee.getSimpleName())); + } + + @Test( expectedExceptions = IllegalArgumentException.class ) + public void testGetCallingClassCalleeNotFound() { + // Trying to get the calling class of a class not on the runtime stack should produce an exception. + JVMUtils.getCallingClass(DummyTestClass1.class); + } +} diff --git a/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperTestUtils.java b/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperTestUtils.java index 0e0f6322e..cbbc8252b 100644 --- a/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperTestUtils.java +++ b/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperTestUtils.java @@ -28,8 +28,8 @@ package org.broadinstitute.sting.utils.clipping; import net.sf.samtools.Cigar; import net.sf.samtools.CigarElement; import net.sf.samtools.CigarOperator; +import net.sf.samtools.TextCigarCodec; import org.broadinstitute.sting.utils.Utils; -import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.Assert; @@ -38,13 +38,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Stack; -/** - * Created by IntelliJ IDEA. - * User: roger - * Date: 11/27/11 - * Time: 6:45 AM - * To change this template use File | Settings | File Templates. - */ public class ReadClipperTestUtils { //Should contain all the utils needed for tests to mass produce //reads, cigars, and other needed classes @@ -236,78 +229,6 @@ public class ReadClipperTestUtils { } public static Cigar cigarFromString(String cigarString) { - Cigar cigar = new Cigar(); - - boolean isNumber = false; - int number = 0; - for (int i = 0; i < cigarString.length(); i++) { - char x = cigarString.charAt(i); - - if (x >= '0' && x <='9') { - if (isNumber) { - number *= 10; - } - else { - isNumber = true; - } - number += x - '0'; - } - - else { - CigarElement e; - switch (x) { - case 'M': - case 'm': - e = new CigarElement(number, CigarOperator.M); - break; - - case 'I': - case 'i': - e = new CigarElement(number, CigarOperator.I); - break; - - case 'D': - case 'd': - e = new CigarElement(number, CigarOperator.D); - break; - - case 'S': - case 's': - e = new CigarElement(number, CigarOperator.S); - break; - - case 'N': - case 'n': - e = new CigarElement(number, CigarOperator.N); - break; - - case 'H': - case 'h': - e = new CigarElement(number, CigarOperator.H); - break; - - case 'P': - case 'p': - e = new CigarElement(number, CigarOperator.P); - break; - - case '=': - e = new CigarElement(number, CigarOperator.EQ); - break; - - case 'X': - case 'x': - e = new CigarElement(number, CigarOperator.X); - break; - - default: - throw new ReviewedStingException("Unrecognized cigar operator: " + x + " (number: " + number + ")"); - } - cigar.add(e); - } - } - return cigar; + return TextCigarCodec.getSingleton().decode(cigarString); } - - } diff --git a/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java index ae7c1e01c..d6bd0d4d2 100644 --- a/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java @@ -33,8 +33,10 @@ import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.Assert; import org.testng.annotations.BeforeClass; +import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -44,16 +46,17 @@ import java.util.List; * Date: 9/28/11 */ public class ReadClipperUnitTest extends BaseTest { + private final static boolean DEBUG = false; List cigarList; - int maximumCigarSize = 6; // 6 is the minimum necessary number to try all combinations of cigar types with guarantee of clipping an element with length = 2 + int maximumCigarSize = 10; // 6 is the minimum necessary number to try all combinations of cigar types with guarantee of clipping an element with length = 2 @BeforeClass public void init() { cigarList = ReadClipperTestUtils.generateCigarList(maximumCigarSize); } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipBothEndsByReferenceCoordinates() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -69,7 +72,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipByReadCoordinates() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -86,7 +89,31 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @DataProvider(name = "ClippedReadLengthData") + public Object[][] makeClippedReadLengthData() { + List tests = new ArrayList(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + final int originalReadLength = 50; + for ( int nToClip = 1; nToClip < originalReadLength - 1; nToClip++ ) { + tests.add(new Object[]{originalReadLength, nToClip}); + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "ClippedReadLengthData", enabled = !DEBUG) + public void testHardClipReadLengthIsRight(final int originalReadLength, final int nToClip) { + GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(originalReadLength + "M"); + read.getReadLength(); // provoke the caching of the read length + final int expectedReadLength = originalReadLength - nToClip; + GATKSAMRecord clipped = ReadClipper.hardClipByReadCoordinates(read, 0, nToClip - 1); + Assert.assertEquals(clipped.getReadLength(), expectedReadLength, + String.format("Clipped read length %d with cigar %s not equal to the expected read length %d after clipping %d bases from the left from a %d bp read with cigar %s", + clipped.getReadLength(), clipped.getCigar(), expectedReadLength, nToClip, read.getReadLength(), read.getCigar())); + } + + @Test(enabled = !DEBUG) public void testHardClipByReferenceCoordinates() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -109,7 +136,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipByReferenceCoordinatesLeftTail() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -128,7 +155,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipByReferenceCoordinatesRightTail() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -146,7 +173,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipLowQualEnds() { final byte LOW_QUAL = 2; final byte HIGH_QUAL = 30; @@ -190,7 +217,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipSoftClippedBases() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -225,7 +252,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testRevertSoftClippedBases() { for (Cigar cigar : cigarList) { final int leadingSoftClips = leadingCigarElementLength(cigar, CigarOperator.SOFT_CLIP); @@ -247,7 +274,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testRevertSoftClippedBasesWithThreshold() { for (Cigar cigar : cigarList) { final int leadingSoftClips = leadingCigarElementLength(cigar, CigarOperator.SOFT_CLIP); @@ -266,6 +293,40 @@ public class ReadClipperUnitTest extends BaseTest { } } + @DataProvider(name = "RevertSoftClipsBeforeContig") + public Object[][] makeRevertSoftClipsBeforeContig() { + List tests = new ArrayList<>(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + for ( int softStart : Arrays.asList(-10, -1, 0) ) { + for ( int alignmentStart : Arrays.asList(1, 10) ) { + tests.add(new Object[]{softStart, alignmentStart}); + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(enabled = true, dataProvider = "RevertSoftClipsBeforeContig") + public void testRevertSoftClippedBasesBeforeStartOfContig(final int softStart, final int alignmentStart) { + final int nMatches = 10; + final int nSoft = -1 * (softStart - alignmentStart); + final String cigar = nSoft + "S" + nMatches + "M"; + final GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); + read.setAlignmentStart(alignmentStart); + + Assert.assertEquals(read.getSoftStart(), softStart); + Assert.assertEquals(read.getAlignmentStart(), alignmentStart); + Assert.assertEquals(read.getCigarString(), cigar); + + final GATKSAMRecord reverted = ReadClipper.revertSoftClippedBases(read); + + final int expectedAlignmentStart = 1; + final String expectedCigar = (1 - softStart) + "H" + read.getAlignmentEnd() + "M"; + Assert.assertEquals(reverted.getSoftStart(), expectedAlignmentStart); + Assert.assertEquals(reverted.getAlignmentStart(), expectedAlignmentStart); + Assert.assertEquals(reverted.getCigarString(), expectedCigar); + } private void assertNoLowQualBases(GATKSAMRecord read, byte low_qual) { if (!read.isEmpty()) { @@ -349,7 +410,7 @@ public class ReadClipperUnitTest extends BaseTest { } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipReducedRead() { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar("10M"); final int[] counts = new int[read.getReadLength()]; @@ -365,4 +426,11 @@ public class ReadClipperUnitTest extends BaseTest { } } + @Test(enabled = !DEBUG) + public void testRevertEntirelySoftclippedReads() { + GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar("2H1S3H"); + GATKSAMRecord clippedRead = ReadClipper.revertSoftClippedBases(read); + Assert.assertEquals(clippedRead.getAlignmentStart(), read.getSoftStart()); + } + } \ No newline at end of file diff --git a/public/java/test/org/broadinstitute/sting/utils/crypt/GATKKeyIntegrationTest.java b/public/java/test/org/broadinstitute/sting/utils/crypt/GATKKeyIntegrationTest.java index 9c9248669..ca7314ca9 100644 --- a/public/java/test/org/broadinstitute/sting/utils/crypt/GATKKeyIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/crypt/GATKKeyIntegrationTest.java @@ -130,7 +130,7 @@ public class GATKKeyIntegrationTest extends WalkerTest { { "corrupt_bad_isize_field.key", UserException.UnreadableKeyException.class }, { "corrupt_bad_crc.key", UserException.UnreadableKeyException.class }, { "corrupt_no_email_address.key", UserException.UnreadableKeyException.class }, - { "corrupt_no_sectional_delimiter.key", UserException.KeySignatureVerificationException.class }, + { "corrupt_no_sectional_delimiter.key", UserException.UnreadableKeyException.class }, { "corrupt_no_signature.key", UserException.UnreadableKeyException.class }, { "corrupt_bad_signature.key", UserException.KeySignatureVerificationException.class }, { "corrupt_non_gzipped_valid_key.key", UserException.UnreadableKeyException.class } diff --git a/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsUnitTest.java index e9600480a..0886427ca 100644 --- a/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsUnitTest.java @@ -26,6 +26,7 @@ package org.broadinstitute.sting.utils.fragments; import net.sf.samtools.SAMFileHeader; +import net.sf.samtools.TextCigarCodec; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.pileup.PileupElement; @@ -296,4 +297,51 @@ public class FragmentUtilsUnitTest extends BaseTest { final GATKSAMRecord actual = FragmentUtils.mergeOverlappingPairedFragments(read1, read2); Assert.assertNull(actual); } + + @DataProvider(name = "MergeFragmentsOffContig") + public Object[][] makeMergeFragmentsOffContig() throws Exception { + List tests = new ArrayList<>(); + + for ( final int pre1 : Arrays.asList(0, 50)) { + for ( final int post1 : Arrays.asList(0, 50)) { + for ( final int pre2 : Arrays.asList(0, 50)) { + for ( final int post2 : Arrays.asList(0, 50)) { + tests.add(new Object[]{pre1, post1, pre2, post2}); + } + } + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "MergeFragmentsOffContig") + public void testMergeFragmentsOffContig(final int pre1, final int post1, final int pre2, final int post2) { + final int contigSize = 10; + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 0, contigSize); + + final GATKSAMRecord read1 = createReadOffContig(header, false, pre1, post1); + final GATKSAMRecord read2 = createReadOffContig(header, true, pre2, post2); + + final GATKSAMRecord merged = FragmentUtils.mergeOverlappingPairedFragments(read1, read2); + } + + private GATKSAMRecord createReadOffContig(final SAMFileHeader header, final boolean negStrand, final int pre, final int post) { + final int contigLen = header.getSequence(0).getSequenceLength(); + final int readLen = pre + contigLen + post; + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read1", 0, 1, readLen); + read.setAlignmentStart(1); + read.setCigar(TextCigarCodec.getSingleton().decode(pre + "S" + contigLen + "M" + post + "S")); + read.setBaseQualities(Utils.dupBytes((byte) 30, readLen)); + read.setReadBases(Utils.dupBytes((byte)'A', readLen)); + read.setMappingQuality(60); + read.setMateAlignmentStart(1); + read.setProperPairFlag(true); + read.setReadPairedFlag(true); + read.setInferredInsertSize(30); + read.setReadNegativeStrandFlag(negStrand); + read.setMateNegativeStrandFlag(! negStrand); + read.setReadGroup(new GATKSAMReadGroupRecord("foo")); + return read; + } } diff --git a/public/java/test/org/broadinstitute/sting/utils/progressmeter/ProgressMeterDaemonUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/progressmeter/ProgressMeterDaemonUnitTest.java index d127a2937..767646963 100644 --- a/public/java/test/org/broadinstitute/sting/utils/progressmeter/ProgressMeterDaemonUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/progressmeter/ProgressMeterDaemonUnitTest.java @@ -84,10 +84,19 @@ public class ProgressMeterDaemonUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } + @Test + public void testPeriodUpdateNano() { + final ProgressMeter meter = new TestingProgressMeter(10); + final long currentTime = meter.getRuntimeInNanoseconds(); + meter.updateElapsedTimeInNanoseconds(); + Assert.assertTrue( meter.getRuntimeInNanosecondsUpdatedPeriodically() > currentTime, "Updating the periodic runtime failed" ); + } + @Test(dataProvider = "PollingData", invocationCount = 10, successPercentage = 90) public void testProgressMeterDaemon(final long poll, final int ticks) throws InterruptedException { final TestingProgressMeter meter = new TestingProgressMeter(poll); final ProgressMeterDaemon daemon = meter.getProgressMeterDaemon(); + Assert.assertTrue(daemon.isDaemon()); Assert.assertFalse(daemon.isDone()); @@ -106,5 +115,7 @@ public class ProgressMeterDaemonUnitTest extends BaseTest { final int tolerance = (int)Math.ceil(0.8 * meter.progressCalls.size()); Assert.assertTrue(Math.abs(meter.progressCalls.size() - ticks) <= tolerance, "Expected " + ticks + " progress calls from daemon thread, but got " + meter.progressCalls.size() + " and a tolerance of only " + tolerance); + + Assert.assertTrue(meter.getRuntimeInNanosecondsUpdatedPeriodically() > 0, "Daemon should have updated our periodic runtime"); } } diff --git a/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java index 2a2d80206..fbf0242a3 100644 --- a/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java @@ -792,7 +792,8 @@ public class AlignmentUtilsUnitTest { tests.add(new Object[]{"2M2D2I", 3, 3, "1I"}); tests.add(new Object[]{"2M2D2I", 2, 2, "2D1I"}); tests.add(new Object[]{"2M2D2I", 1, 2, "1M2D1I"}); - tests.add(new Object[]{"2M2D2I", 1, 1, "1M"}); + tests.add(new Object[]{"2M2D2I", 0, 1, "2M2D"}); + tests.add(new Object[]{"2M2D2I", 1, 1, "1M2D"}); return tests.toArray(new Object[][]{}); } @@ -1032,5 +1033,12 @@ public class AlignmentUtilsUnitTest { Assert.assertEquals(AlignmentUtils.startsOrEndsWithInsertionOrDeletion(TextCigarCodec.getSingleton().decode(cigar)), expected); } + @Test(dataProvider = "StartsOrEndsWithInsertionOrDeletionData", enabled = true) + public void testRemoveTrailingDeletions(final String cigar, final boolean expected) { + final Cigar originalCigar = TextCigarCodec.getSingleton().decode(cigar); + final Cigar newCigar = AlignmentUtils.removeTrailingDeletions(originalCigar); + + Assert.assertEquals(originalCigar.equals(newCigar), !cigar.endsWith("D")); + } } diff --git a/public/java/test/org/broadinstitute/sting/utils/sam/GATKSAMRecordUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/sam/GATKSAMRecordUnitTest.java index eefc92799..e9af685a6 100644 --- a/public/java/test/org/broadinstitute/sting/utils/sam/GATKSAMRecordUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/sam/GATKSAMRecordUnitTest.java @@ -42,7 +42,7 @@ public class GATKSAMRecordUnitTest extends BaseTest { GATKSAMRecord read, reducedRead; final static String BASES = "ACTG"; final static String QUALS = "!+5?"; - final private static int[] REDUCED_READ_COUNTS = new int[]{10, 20, 30, 40, 1}; + final private static int[] REDUCED_READ_COUNTS = new int[]{10, 20, 30, 40}; @BeforeClass public void init() { @@ -200,6 +200,7 @@ public class GATKSAMRecordUnitTest extends BaseTest { @Test public void testGetReducedCountsIsCorrect() { + reducedRead.setReducedReadCountsTag(REDUCED_READ_COUNTS); final int[] counts = reducedRead.getReducedReadCounts(); Assert.assertNotSame(counts, reducedRead.getAttribute(GATKSAMRecord.REDUCED_READ_CONSENSUS_TAG)); for ( int i = 0; i < counts.length; i++ ) diff --git a/public/java/test/org/broadinstitute/sting/utils/sam/ReadUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/sam/ReadUtilsUnitTest.java index 331121c55..abe0c394b 100644 --- a/public/java/test/org/broadinstitute/sting/utils/sam/ReadUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/sam/ReadUtilsUnitTest.java @@ -151,6 +151,31 @@ public class ReadUtilsUnitTest extends BaseTest { read.setReadNegativeStrandFlag(false); boundary = get.getAdaptor(read); Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY); + + // Test case 8: read doesn't have proper pair flag set + read = makeRead(fragmentSize, mateStart); + read.setReadPairedFlag(true); + read.setProperPairFlag(false); + Assert.assertEquals(get.getAdaptor(read), ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY); + + // Test case 9: read and mate have same negative flag setting + for ( final boolean negFlag: Arrays.asList(true, false) ) { + read = makeRead(fragmentSize, mateStart); + read.setAlignmentStart(BEFORE); + read.setReadPairedFlag(true); + read.setProperPairFlag(true); + read.setReadNegativeStrandFlag(negFlag); + read.setMateNegativeStrandFlag(!negFlag); + Assert.assertTrue(get.getAdaptor(read) != ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY, "Get adaptor should have succeeded"); + + read = makeRead(fragmentSize, mateStart); + read.setAlignmentStart(BEFORE); + read.setReadPairedFlag(true); + read.setProperPairFlag(true); + read.setReadNegativeStrandFlag(negFlag); + read.setMateNegativeStrandFlag(negFlag); + Assert.assertEquals(get.getAdaptor(read), ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY, "Get adaptor should have failed for reads with bad alignment orientation"); + } } @Test (enabled = true) diff --git a/public/java/test/org/broadinstitute/sting/utils/smithwaterman/SmithWatermanBenchmark.java b/public/java/test/org/broadinstitute/sting/utils/smithwaterman/SmithWatermanBenchmark.java new file mode 100644 index 000000000..ee8f411bf --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/utils/smithwaterman/SmithWatermanBenchmark.java @@ -0,0 +1,88 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.smithwaterman; + +import com.google.caliper.Param; +import com.google.caliper.SimpleBenchmark; +import org.broadinstitute.sting.utils.Utils; + +/** + * Caliper microbenchmark of parsing a VCF file + */ +public class SmithWatermanBenchmark extends SimpleBenchmark { + + @Param({"Original", "Greedy"}) + String version; // set automatically by framework + + @Param({"10", "50", "100", "500"}) + int sizeOfMiddleRegion; // set automatically by framework + + @Param({"10", "50", "100", "500"}) + int sizeOfEndRegions; // set automatically by framework + + String refString; + String hapString; + + @Override protected void setUp() { + final StringBuilder ref = new StringBuilder(); + final StringBuilder hap = new StringBuilder(); + + ref.append(Utils.dupString('A', sizeOfEndRegions)); + hap.append(Utils.dupString('A', sizeOfEndRegions)); + + // introduce a SNP + ref.append("X"); + hap.append("Y"); + + ref.append(Utils.dupString('A', sizeOfMiddleRegion)); + hap.append(Utils.dupString('A', sizeOfMiddleRegion)); + + // introduce a SNP + ref.append("X"); + hap.append("Y"); + + ref.append(Utils.dupString('A', sizeOfEndRegions)); + hap.append(Utils.dupString('A', sizeOfEndRegions)); + + refString = ref.toString(); + hapString = hap.toString(); + } + + public void timeSW(int rep) { + for ( int i = 0; i < rep; i++ ) { + final SmithWaterman sw; + if ( version.equals("Greedy") ) + sw = new GlobalEdgeGreedySWPairwiseAlignment(refString.getBytes(), hapString.getBytes()); + else + sw = new SWPairwiseAlignment(refString.getBytes(), hapString.getBytes()); + sw.getCigar(); + } + } + + public static void main(String[] args) { + com.google.caliper.Runner.main(SmithWatermanBenchmark.class, args); + } +} diff --git a/public/java/test/org/broadinstitute/sting/utils/variant/GATKVCFUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/variant/GATKVCFUtilsUnitTest.java new file mode 100644 index 000000000..051d0bcec --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/utils/variant/GATKVCFUtilsUnitTest.java @@ -0,0 +1,86 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.variant; + +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.walkers.RodWalker; +import org.broadinstitute.sting.gatk.walkers.Walker; +import org.broadinstitute.variant.vcf.VCFHeader; +import org.broadinstitute.variant.vcf.VCFHeaderLine; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.Arrays; +import java.util.Collections; +import java.util.Set; + +public class GATKVCFUtilsUnitTest extends BaseTest { + public static class VCFHeaderTestWalker extends RodWalker { + public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { return null; } + public Integer reduceInit() { return 0; } + public Integer reduce(Integer value, Integer sum) { return value + sum; } + } + + public static class VCFHeaderTest2Walker extends VCFHeaderTestWalker {} + + @Test + public void testAddingVCFHeaderInfo() { + final VCFHeader header = new VCFHeader(); + + final Walker walker1 = new VCFHeaderTestWalker(); + final Walker walker2 = new VCFHeaderTest2Walker(); + + final GenomeAnalysisEngine testEngine1 = new GenomeAnalysisEngine(); + testEngine1.setWalker(walker1); + + final GenomeAnalysisEngine testEngine2 = new GenomeAnalysisEngine(); + testEngine2.setWalker(walker2); + + final VCFHeaderLine line1 = GATKVCFUtils.getCommandLineArgumentHeaderLine(testEngine1, Collections.EMPTY_LIST); + logger.warn(line1); + Assert.assertNotNull(line1); + Assert.assertEquals(line1.getKey(), GATKVCFUtils.GATK_COMMAND_LINE_KEY); + for ( final String field : Arrays.asList("Version", "ID", "Date", "CommandLineOptions")) + Assert.assertTrue(line1.toString().contains(field), "Couldn't find field " + field + " in " + line1.getValue()); + Assert.assertTrue(line1.toString().contains("ID=" + testEngine1.getWalkerName())); + + final VCFHeaderLine line2 = GATKVCFUtils.getCommandLineArgumentHeaderLine(testEngine2, Collections.EMPTY_LIST); + logger.warn(line2); + + header.addMetaDataLine(line1); + final Set lines1 = header.getMetaDataInInputOrder(); + Assert.assertTrue(lines1.contains(line1)); + + header.addMetaDataLine(line2); + final Set lines2 = header.getMetaDataInInputOrder(); + Assert.assertTrue(lines2.contains(line1)); + Assert.assertTrue(lines2.contains(line2)); + } +} \ No newline at end of file diff --git a/public/java/test/org/broadinstitute/sting/utils/variant/GATKVariantContextUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/variant/GATKVariantContextUtilsUnitTest.java index fcc7c7998..937698d82 100644 --- a/public/java/test/org/broadinstitute/sting/utils/variant/GATKVariantContextUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/variant/GATKVariantContextUtilsUnitTest.java @@ -28,6 +28,7 @@ package org.broadinstitute.sting.utils.variant; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.BaseUtils; +import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.variant.variantcontext.*; @@ -39,6 +40,7 @@ import org.testng.annotations.Test; import java.util.*; public class GATKVariantContextUtilsUnitTest extends BaseTest { + private final static boolean DEBUG = false; Allele Aref, T, C, G, Cref, ATC, ATCATC; @@ -168,7 +170,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return MergeAllelesTest.getTests(MergeAllelesTest.class); } - @Test(dataProvider = "mergeAlleles") + @Test(enabled = !DEBUG, dataProvider = "mergeAlleles") public void testMergeAlleles(MergeAllelesTest cfg) { final List inputs = new ArrayList(); @@ -229,7 +231,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return SimpleMergeRSIDTest.getTests(SimpleMergeRSIDTest.class); } - @Test(dataProvider = "simplemergersiddata") + @Test(enabled = !DEBUG, dataProvider = "simplemergersiddata") public void testRSIDMerge(SimpleMergeRSIDTest cfg) { VariantContext snpVC1 = makeVC("snpvc1", Arrays.asList(Aref, T)); final List inputs = new ArrayList(); @@ -352,7 +354,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return MergeFilteredTest.getTests(MergeFilteredTest.class); } - @Test(dataProvider = "mergeFiltered") + @Test(enabled = !DEBUG, dataProvider = "mergeFiltered") public void testMergeFiltered(MergeFilteredTest cfg) { final List priority = vcs2priority(cfg.inputs); final VariantContext merged = GATKVariantContextUtils.simpleMerge( @@ -479,7 +481,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return MergeGenotypesTest.getTests(MergeGenotypesTest.class); } - @Test(dataProvider = "mergeGenotypes") + @Test(enabled = !DEBUG, dataProvider = "mergeGenotypes") public void testMergeGenotypes(MergeGenotypesTest cfg) { final VariantContext merged = GATKVariantContextUtils.simpleMerge( cfg.inputs, cfg.priority, GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED, @@ -517,7 +519,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { } } - @Test + @Test(enabled = !DEBUG) public void testMergeGenotypesUniquify() { final VariantContext vc1 = makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)); final VariantContext vc2 = makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2)); @@ -547,7 +549,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { // // -------------------------------------------------------------------------------- - @Test + @Test(enabled = !DEBUG) public void testAnnotationSet() { for ( final boolean annotate : Arrays.asList(true, false)) { for ( final String set : Arrays.asList("set", "combine", "x")) { @@ -618,7 +620,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return ReverseClippingPositionTestProvider.getTests(ReverseClippingPositionTestProvider.class); } - @Test(dataProvider = "ReverseClippingPositionTestProvider") + @Test(enabled = !DEBUG, dataProvider = "ReverseClippingPositionTestProvider") public void testReverseClippingPositionTestProvider(ReverseClippingPositionTestProvider cfg) { int result = GATKVariantContextUtils.computeReverseClipping(cfg.alleles, cfg.ref.getBytes()); Assert.assertEquals(result, cfg.expectedClip); @@ -706,7 +708,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "SplitBiallelics") + @Test(enabled = !DEBUG, dataProvider = "SplitBiallelics") public void testSplitBiallelicsNoGenotypes(final VariantContext vc, final List expectedBiallelics) { final List biallelics = GATKVariantContextUtils.splitVariantContextToBiallelics(vc); Assert.assertEquals(biallelics.size(), expectedBiallelics.size()); @@ -717,7 +719,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { } } - @Test(dataProvider = "SplitBiallelics", dependsOnMethods = "testSplitBiallelicsNoGenotypes") + @Test(enabled = !DEBUG, dataProvider = "SplitBiallelics", dependsOnMethods = "testSplitBiallelicsNoGenotypes") public void testSplitBiallelicsGenotypes(final VariantContext vc, final List expectedBiallelics) { final List genotypes = new ArrayList(); @@ -745,7 +747,6 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { } } - // -------------------------------------------------------------------------------- // // Test repeats @@ -810,14 +811,14 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return RepeatDetectorTest.getTests(RepeatDetectorTest.class); } - @Test(dataProvider = "RepeatDetectorTest") + @Test(enabled = !DEBUG, dataProvider = "RepeatDetectorTest") public void testRepeatDetectorTest(RepeatDetectorTest cfg) { // test alleles are equal Assert.assertEquals(GATKVariantContextUtils.isTandemRepeat(cfg.vc, cfg.ref.getBytes()), cfg.isTrueRepeat); } - @Test + @Test(enabled = !DEBUG) public void testRepeatAllele() { Allele nullR = Allele.create("A", true); Allele nullA = Allele.create("A", false); @@ -940,7 +941,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "ForwardClippingData") + @Test(enabled = !DEBUG, dataProvider = "ForwardClippingData") public void testForwardClipping(final List alleleStrings, final int expectedClip) { final List alleles = new LinkedList(); for ( final String alleleString : alleleStrings ) @@ -975,7 +976,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "ClipAlleleTest") + @Test(enabled = !DEBUG, dataProvider = "ClipAlleleTest") public void testClipAlleles(final List alleleStrings, final List expected, final int numLeftClipped) { final int start = 10; final VariantContext unclipped = GATKVariantContextUtils.makeFromAlleles("test", "20", start, alleleStrings); @@ -1019,7 +1020,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "PrimitiveAlleleSplittingData") + @Test(enabled = !DEBUG, dataProvider = "PrimitiveAlleleSplittingData") public void testPrimitiveAlleleSplitting(final String ref, final String alt, final int expectedSplit, final List variantPositions) { final int start = 10; @@ -1066,7 +1067,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "AlleleRemappingData") + @Test(enabled = !DEBUG, dataProvider = "AlleleRemappingData") public void testAlleleRemapping(final Map alleleMap, final int numGenotypes) { final GATKVariantContextUtils.AlleleMapper alleleMapper = new GATKVariantContextUtils.AlleleMapper(alleleMap); @@ -1102,4 +1103,204 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return gc; } + + // -------------------------------------------------------------------------------- + // + // Test subsetDiploidAlleles + // + // -------------------------------------------------------------------------------- + + @DataProvider(name = "subsetDiploidAllelesData") + public Object[][] makesubsetDiploidAllelesData() { + List tests = new ArrayList<>(); + + final Allele A = Allele.create("A", true); + final Allele C = Allele.create("C"); + final Allele G = Allele.create("G"); + + final List AA = Arrays.asList(A,A); + final List AC = Arrays.asList(A,C); + final List CC = Arrays.asList(C,C); + final List AG = Arrays.asList(A,G); + final List CG = Arrays.asList(C,G); + final List GG = Arrays.asList(G,G); + final List ACG = Arrays.asList(A,C,G); + + final VariantContext vcBase = new VariantContextBuilder("test", "20", 10, 10, AC).make(); + + final double[] homRefPL = MathUtils.normalizeFromRealSpace(new double[]{0.9, 0.09, 0.01}); + final double[] hetPL = MathUtils.normalizeFromRealSpace(new double[]{0.09, 0.9, 0.01}); + final double[] homVarPL = MathUtils.normalizeFromRealSpace(new double[]{0.01, 0.09, 0.9}); + final double[] uninformative = new double[]{0, 0, 0}; + + final Genotype base = new GenotypeBuilder("NA12878").DP(10).GQ(50).make(); + + // make sure we don't screw up the simple case + final Genotype aaGT = new GenotypeBuilder(base).alleles(AA).AD(new int[]{10,2}).PL(homRefPL).GQ(8).make(); + final Genotype acGT = new GenotypeBuilder(base).alleles(AC).AD(new int[]{10,2}).PL(hetPL).GQ(8).make(); + final Genotype ccGT = new GenotypeBuilder(base).alleles(CC).AD(new int[]{10,2}).PL(homVarPL).GQ(8).make(); + + tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(aaGT).make(), AC, Arrays.asList(new GenotypeBuilder(aaGT).noAD().make())}); + tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(acGT).make(), AC, Arrays.asList(new GenotypeBuilder(acGT).noAD().make())}); + tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(ccGT).make(), AC, Arrays.asList(new GenotypeBuilder(ccGT).noAD().make())}); + + // uninformative test case + final Genotype uninformativeGT = new GenotypeBuilder(base).alleles(CC).noAD().PL(uninformative).GQ(0).make(); + final Genotype emptyGT = new GenotypeBuilder(base).alleles(GATKVariantContextUtils.NO_CALL_ALLELES).noAD().noPL().noGQ().make(); + tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(uninformativeGT).make(), AC, Arrays.asList(emptyGT)}); + + // actually subsetting down from multiple alt values + final double[] homRef3AllelesPL = new double[]{0, -10, -20, -30, -40, -50}; + final double[] hetRefC3AllelesPL = new double[]{-10, 0, -20, -30, -40, -50}; + final double[] homC3AllelesPL = new double[]{-20, -10, 0, -30, -40, -50}; + final double[] hetRefG3AllelesPL = new double[]{-20, -10, -30, 0, -40, -50}; + final double[] hetCG3AllelesPL = new double[]{-20, -10, -30, -40, 0, -50}; // AA, AC, CC, AG, CG, GG + final double[] homG3AllelesPL = new double[]{-20, -10, -30, -40, -50, 0}; // AA, AC, CC, AG, CG, GG + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(homRef3AllelesPL).make()).make(), + AC, + Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{0, -10, -20}).noAD().GQ(100).make())}); + + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(hetRefC3AllelesPL).make()).make(), + AC, + Arrays.asList(new GenotypeBuilder(base).alleles(AC).PL(new double[]{-10, 0, -20}).noAD().GQ(100).make())}); + + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(homC3AllelesPL).make()).make(), + AC, + Arrays.asList(new GenotypeBuilder(base).alleles(CC).PL(new double[]{-20, -10, 0}).noAD().GQ(100).make())}); + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(hetRefG3AllelesPL).make()).make(), + AG, + Arrays.asList(new GenotypeBuilder(base).alleles(AG).PL(new double[]{-20, 0, -50}).noAD().GQ(200).make())}); + + // wow, scary -- bad output but discussed with Eric and we think this is the only thing that can be done + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(hetCG3AllelesPL).make()).make(), + AG, + Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{0, -20, -30}).noAD().GQ(200).make())}); + + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(homG3AllelesPL).make()).make(), + AG, + Arrays.asList(new GenotypeBuilder(base).alleles(GG).PL(new double[]{-20, -40, 0}).noAD().GQ(200).make())}); + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "subsetDiploidAllelesData") + public void testsubsetDiploidAllelesData(final VariantContext inputVC, + final List allelesToUse, + final List expectedGenotypes) { + final GenotypesContext actual = GATKVariantContextUtils.subsetDiploidAlleles(inputVC, allelesToUse, GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN); + + Assert.assertEquals(actual.size(), expectedGenotypes.size()); + for ( final Genotype expected : expectedGenotypes ) { + final Genotype actualGT = actual.get(expected.getSampleName()); + Assert.assertNotNull(actualGT); + assertGenotypesAreEqual(actualGT, expected); + } + } + + @DataProvider(name = "UpdateGenotypeAfterSubsettingData") + public Object[][] makeUpdateGenotypeAfterSubsettingData() { + List tests = new ArrayList(); + + final Allele A = Allele.create("A", true); + final Allele C = Allele.create("C"); + final Allele G = Allele.create("G"); + + final List AA = Arrays.asList(A,A); + final List AC = Arrays.asList(A,C); + final List CC = Arrays.asList(C,C); + final List AG = Arrays.asList(A,G); + final List CG = Arrays.asList(C,G); + final List GG = Arrays.asList(G,G); + final List ACG = Arrays.asList(A,C,G); + final List> allSubsetAlleles = Arrays.asList(AC,AG,ACG); + + final double[] homRefPL = new double[]{0.9, 0.09, 0.01}; + final double[] hetPL = new double[]{0.09, 0.9, 0.01}; + final double[] homVarPL = new double[]{0.01, 0.09, 0.9}; + final double[] uninformative = new double[]{0.33, 0.33, 0.33}; + final List allPLs = Arrays.asList(homRefPL, hetPL, homVarPL, uninformative); + + for ( final List alleles : allSubsetAlleles ) { + for ( final double[] pls : allPLs ) { + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.SET_TO_NO_CALL, pls, AA, alleles, GATKVariantContextUtils.NO_CALL_ALLELES}); + } + } + + for ( final List originalGT : Arrays.asList(AA, AC, CC, AG, CG, GG) ) { + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, homRefPL, originalGT, AC, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, hetPL, originalGT, AC, AC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, homVarPL, originalGT, AC, CC}); +// tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, uninformative, AA, AC, GATKVariantContextUtils.NO_CALL_ALLELES}); + } + + for ( final double[] pls : allPLs ) { + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, AC, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, AC, AC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, AC, CC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, AC, AC}); + + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, AG, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, AG, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, AG, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, AG, AG}); + + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, ACG, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, ACG, AC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, ACG, CC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AG, ACG, AG}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, ACG, CG}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, GG, ACG, GG}); + } + + return tests.toArray(new Object[][]{}); + } + + @Test(enabled = !DEBUG, dataProvider = "UpdateGenotypeAfterSubsettingData") + public void testUpdateGenotypeAfterSubsetting(final GATKVariantContextUtils.GenotypeAssignmentMethod mode, + final double[] likelihoods, + final List originalGT, + final List allelesToUse, + final List expectedAlleles) { + final GenotypeBuilder gb = new GenotypeBuilder("test"); + final double[] log10Likelhoods = MathUtils.normalizeFromLog10(likelihoods, true, false); + GATKVariantContextUtils.updateGenotypeAfterSubsetting(originalGT, gb, mode, log10Likelhoods, allelesToUse); + final Genotype g = gb.make(); + Assert.assertEquals(new HashSet<>(g.getAlleles()), new HashSet<>(expectedAlleles)); + } + + @Test(enabled = !DEBUG) + public void testSubsetToRef() { + final Map tests = new LinkedHashMap<>(); + + for ( final List alleles : Arrays.asList(Arrays.asList(Aref), Arrays.asList(C), Arrays.asList(Aref, C), Arrays.asList(Aref, C, C) ) ) { + for ( final String name : Arrays.asList("test1", "test2") ) { + final GenotypeBuilder builder = new GenotypeBuilder(name, alleles); + builder.DP(10); + builder.GQ(30); + builder.AD(alleles.size() == 1 ? new int[]{1} : (alleles.size() == 2 ? new int[]{1, 2} : new int[]{1, 2, 3})); + builder.PL(alleles.size() == 1 ? new int[]{1} : (alleles.size() == 2 ? new int[]{1,2} : new int[]{1,2,3})); + final List refs = Collections.nCopies(alleles.size(), Aref); + tests.put(builder.make(), builder.alleles(refs).noAD().noPL().make()); + } + } + + for ( final int n : Arrays.asList(1, 2, 3) ) { + for ( final List genotypes : Utils.makePermutations(new ArrayList<>(tests.keySet()), n, false) ) { + final VariantContext vc = new VariantContextBuilder("test", "20", 1, 1, Arrays.asList(Aref, C)).genotypes(genotypes).make(); + final GenotypesContext gc = GATKVariantContextUtils.subsetToRefOnly(vc, 2); + + Assert.assertEquals(gc.size(), genotypes.size()); + for ( int i = 0; i < genotypes.size(); i++ ) { +// logger.warn("Testing " + genotypes.get(i) + " => " + gc.get(i) + " " + tests.get(genotypes.get(i))); + assertGenotypesAreEqual(gc.get(i), tests.get(genotypes.get(i))); + } + } + } + } } \ No newline at end of file diff --git a/public/scala/qscript/org/broadinstitute/sting/queue/qscripts/GATKResourcesBundle.scala b/public/scala/qscript/org/broadinstitute/sting/queue/qscripts/GATKResourcesBundle.scala index 55e56889a..1736adc17 100644 --- a/public/scala/qscript/org/broadinstitute/sting/queue/qscripts/GATKResourcesBundle.scala +++ b/public/scala/qscript/org/broadinstitute/sting/queue/qscripts/GATKResourcesBundle.scala @@ -40,8 +40,8 @@ class GATKResourcesBundle extends QScript { @Argument(doc="liftOverPerl", required=false) var liftOverPerl: File = new File("./public/perl/liftOverVCF.pl") - @Argument(shortName = "ver", doc="The SVN version of this release", required=true) - var VERSION: String = _ + @Argument(shortName = "ver", doc="The GIT version of this release", required=true) + var BUNDLE_VERSION: String = _ @Argument(shortName = "bundleDir", doc="Path to root where resource files will be placed", required=false) val BUNDLE_ROOT = new File("/humgen/gsa-hpprojects/GATK/bundle") @@ -57,8 +57,8 @@ class GATKResourcesBundle extends QScript { val SITES_EXT: String = "sites" - def BUNDLE_DIR: File = BUNDLE_ROOT + "/" + VERSION - def DOWNLOAD_DIR: File = DOWNLOAD_ROOT + "/" + VERSION + def BUNDLE_DIR: File = BUNDLE_ROOT + "/" + BUNDLE_VERSION + def DOWNLOAD_DIR: File = DOWNLOAD_ROOT + "/" + BUNDLE_VERSION // REFERENCES class Reference( val name: String, val file: File ) { } @@ -161,7 +161,7 @@ class GATKResourcesBundle extends QScript { "1000G_phase1.indels", b37, true, false)) addResource(new Resource("/humgen/1kg/processing/official_release/phase1/projectConsensus/phase1.wgs.projectConsensus.v2b.recal.highQuality.vcf", - "1000G_phase1.snps.high_confidence, b37, true, false)) + "1000G_phase1.snps.high_confidence", b37, true, false)) addResource(new Resource("/humgen/gsa-hpprojects/GATK/data/Comparisons/Unvalidated/GoldStandardIndel/gold.standard.indel.MillsAnd1000G.b37.vcf", "Mills_and_1000G_gold_standard.indels", b37, true, false)) @@ -182,7 +182,7 @@ class GATKResourcesBundle extends QScript { // // Test BAM file, specific to each reference // - addResource(new Resource("/humgen/gsa-hpprojects/NA12878Collection/bams/NA12878.HiSeq.WGS.bwa.cleaned.recal.b37.20.bam", + addResource(new Resource("/humgen/gsa-hpprojects/NA12878Collection/bams/CEUTrio.HiSeq.WGS.b37.NA12878.bam", "IGNORE", b37, false, false)) // @@ -234,8 +234,7 @@ class GATKResourcesBundle extends QScript { for ( resource: Resource <- RESOURCES ) { if ( isFASTA(resource.file) ) { - val f = copyBundleFile(resource, resource.ref) - add(new createDictandFAI(f)) + copyBundleFasta(resource, resource.ref) } else if ( isBAM(resource.file) ) { val f = copyBundleFile(resource, resource.ref) add(new IndexBAM(f)) @@ -312,6 +311,20 @@ class GATKResourcesBundle extends QScript { } } + def copyBundleFasta(res: Resource, ref: Reference) { + val out = destFile(BUNDLE_DIR, ref, res.destname(ref)) + add(new cpFile(res.file, out)) + + val oldRefDict = swapExt(res.file.getParent, res.file, ".fasta", ".dict") + val newRefDict = swapExt(out.getParent, out, ".fasta", ".dict") + + val oldRefFai = swapExt(res.file.getParent, res.file, ".fasta", ".fasta.fai") + val newRefFai = swapExt(out.getParent, out, ".fasta", ".fasta.fai") + + add(new cpFile(oldRefDict, newRefDict)) + add(new cpFile(oldRefFai, newRefFai)) + } + def copyBundleFile(res: Resource, ref: Reference): File = { val out = destFile(BUNDLE_DIR, ref, res.destname(ref)) add(new cpFile(res.file, out)) @@ -389,13 +402,5 @@ class GATKResourcesBundle extends QScript { else return ""; } - - class createDictandFAI (@Input ref: File) extends FastaStats with UNIVERSAL_GATK_ARGS { - @Output val outDict: File = swapExt(ref.getParent, ref, ".fasta", ".dict") - @Output val outFai: File = swapExt(ref.getParent, ref, ".fasta", ".fasta.fai") - @Output val outStats: File = swapExt(ref.getParent, ref, ".fasta", ".stats") - this.reference_sequence = ref - this.out = outStats - } } diff --git a/public/scala/src/org/broadinstitute/sting/queue/extensions/picard/CollectGcBiasMetrics.scala b/public/scala/src/org/broadinstitute/sting/queue/extensions/picard/CollectGcBiasMetrics.scala index 5d887016e..7c4c3f26a 100644 --- a/public/scala/src/org/broadinstitute/sting/queue/extensions/picard/CollectGcBiasMetrics.scala +++ b/public/scala/src/org/broadinstitute/sting/queue/extensions/picard/CollectGcBiasMetrics.scala @@ -52,6 +52,5 @@ class CollectGcBiasMetrics extends org.broadinstitute.sting.queue.function.JavaC override def commandLine = super.commandLine + required("SUMMARY_OUTPUT=" + output) + required("CHART_OUTPUT=" + output+".pdf") + - required("REFERENCE_SEQUENCE=" + reference) + - required("ASSUME_SORTED=true") + required("REFERENCE_SEQUENCE=" + reference) } diff --git a/public/scala/test/org/broadinstitute/sting/queue/pipeline/PipelineTest.scala b/public/scala/test/org/broadinstitute/sting/queue/pipeline/PipelineTest.scala index 03b38ffe9..6741e4107 100644 --- a/public/scala/test/org/broadinstitute/sting/queue/pipeline/PipelineTest.scala +++ b/public/scala/test/org/broadinstitute/sting/queue/pipeline/PipelineTest.scala @@ -113,7 +113,7 @@ object PipelineTest extends BaseTest with Logging { private def assertMatchingMD5s(name: String, fileMD5s: Traversable[(File, String)], parameterize: Boolean) { var failed = 0 for ((file, expectedMD5) <- fileMD5s) { - val calculatedMD5 = md5DB.testFileMD5(name, file, expectedMD5, parameterize) + val calculatedMD5 = md5DB.testFileMD5(name, "", file, expectedMD5, parameterize).actualMD5 if (!parameterize && expectedMD5 != "" && expectedMD5 != calculatedMD5) failed += 1 } diff --git a/public/testdata/exampleDBSNP.vcf.idx b/public/testdata/exampleDBSNP.vcf.idx new file mode 100644 index 000000000..7239e366f Binary files /dev/null and b/public/testdata/exampleDBSNP.vcf.idx differ diff --git a/settings/repository/net.sf/picard-1.90.1442.xml b/settings/repository/net.sf/picard-1.90.1442.xml deleted file mode 100644 index 4ec267817..000000000 --- a/settings/repository/net.sf/picard-1.90.1442.xml +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/settings/repository/net.sf/picard-1.90.1442.jar b/settings/repository/net.sf/picard-1.91.1453.jar similarity index 85% rename from settings/repository/net.sf/picard-1.90.1442.jar rename to settings/repository/net.sf/picard-1.91.1453.jar index caf2bc09d..f196ee5a4 100644 Binary files a/settings/repository/net.sf/picard-1.90.1442.jar and b/settings/repository/net.sf/picard-1.91.1453.jar differ diff --git a/settings/repository/net.sf/picard-1.91.1453.xml b/settings/repository/net.sf/picard-1.91.1453.xml new file mode 100644 index 000000000..5d1bf41e8 --- /dev/null +++ b/settings/repository/net.sf/picard-1.91.1453.xml @@ -0,0 +1,3 @@ + + + diff --git a/settings/repository/net.sf/sam-1.90.1442.xml b/settings/repository/net.sf/sam-1.90.1442.xml deleted file mode 100644 index 918ea6ff0..000000000 --- a/settings/repository/net.sf/sam-1.90.1442.xml +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/settings/repository/net.sf/sam-1.90.1442.jar b/settings/repository/net.sf/sam-1.91.1453.jar similarity index 85% rename from settings/repository/net.sf/sam-1.90.1442.jar rename to settings/repository/net.sf/sam-1.91.1453.jar index f6e003657..d25dc0f66 100644 Binary files a/settings/repository/net.sf/sam-1.90.1442.jar and b/settings/repository/net.sf/sam-1.91.1453.jar differ diff --git a/settings/repository/net.sf/sam-1.91.1453.xml b/settings/repository/net.sf/sam-1.91.1453.xml new file mode 100644 index 000000000..69f8b7ebf --- /dev/null +++ b/settings/repository/net.sf/sam-1.91.1453.xml @@ -0,0 +1,3 @@ + + + diff --git a/settings/repository/org.broad/tribble-1.90.1442.jar b/settings/repository/org.broad/tribble-1.91.1453.jar similarity index 82% rename from settings/repository/org.broad/tribble-1.90.1442.jar rename to settings/repository/org.broad/tribble-1.91.1453.jar index 75b4c2fc5..aad68d8dd 100644 Binary files a/settings/repository/org.broad/tribble-1.90.1442.jar and b/settings/repository/org.broad/tribble-1.91.1453.jar differ diff --git a/settings/repository/org.broad/tribble-1.90.1442.xml b/settings/repository/org.broad/tribble-1.91.1453.xml similarity index 76% rename from settings/repository/org.broad/tribble-1.90.1442.xml rename to settings/repository/org.broad/tribble-1.91.1453.xml index 01b944fe4..93c75edab 100644 --- a/settings/repository/org.broad/tribble-1.90.1442.xml +++ b/settings/repository/org.broad/tribble-1.91.1453.xml @@ -1,3 +1,3 @@ - + diff --git a/settings/repository/org.broadinstitute/variant-1.90.1442.jar b/settings/repository/org.broadinstitute/variant-1.91.1453.jar similarity index 80% rename from settings/repository/org.broadinstitute/variant-1.90.1442.jar rename to settings/repository/org.broadinstitute/variant-1.91.1453.jar index cf06f592e..d339781e9 100644 Binary files a/settings/repository/org.broadinstitute/variant-1.90.1442.jar and b/settings/repository/org.broadinstitute/variant-1.91.1453.jar differ diff --git a/settings/repository/org.broadinstitute/variant-1.90.1442.xml b/settings/repository/org.broadinstitute/variant-1.91.1453.xml similarity index 71% rename from settings/repository/org.broadinstitute/variant-1.90.1442.xml rename to settings/repository/org.broadinstitute/variant-1.91.1453.xml index 3838b8b6f..38f6fef3e 100644 --- a/settings/repository/org.broadinstitute/variant-1.90.1442.xml +++ b/settings/repository/org.broadinstitute/variant-1.91.1453.xml @@ -1,3 +1,3 @@ - +